From ecd71e8073fc7295ece96d8054415e64e7c0d304 Mon Sep 17 00:00:00 2001 From: manuel83 Date: Mon, 19 Aug 2019 22:54:59 +0200 Subject: [PATCH] python 3 migration --- .gitignore | 2 +- venv/bin/activate | 76 - venv/bin/activate.csh | 37 - venv/bin/activate.fish | 75 - venv/bin/chardetect | 11 - venv/bin/easy_install | 11 - venv/bin/easy_install-3.6 | 11 - venv/bin/flask | 11 - venv/bin/pip | 12 - venv/bin/pip3 | 12 - venv/bin/pip3.6 | 12 - venv/bin/python | Bin 13620 -> 0 bytes venv/bin/python3 | Bin 13620 -> 0 bytes venv/bin/python3.6 | Bin 13620 -> 0 bytes .../site/python3.6/greenlet/greenlet.h | 157 - .../Click-7.0.dist-info/INSTALLER | 1 - .../Click-7.0.dist-info/LICENSE.txt | 39 - .../Click-7.0.dist-info/METADATA | 121 - .../site-packages/Click-7.0.dist-info/RECORD | 40 - .../site-packages/Click-7.0.dist-info/WHEEL | 6 - .../Click-7.0.dist-info/top_level.txt | 1 - .../Flask-1.1.1.dist-info/INSTALLER | 1 - .../Flask-1.1.1.dist-info/LICENSE.rst | 28 - .../Flask-1.1.1.dist-info/METADATA | 134 - .../Flask-1.1.1.dist-info/RECORD | 48 - .../site-packages/Flask-1.1.1.dist-info/WHEEL | 6 - .../Flask-1.1.1.dist-info/entry_points.txt | 3 - .../Flask-1.1.1.dist-info/top_level.txt | 1 - .../PKG-INFO | 27 - .../SOURCES.txt | 10 - .../dependency_links.txt | 1 - .../installed-files.txt | 8 - .../not-zip-safe | 1 - .../requires.txt | 1 - .../top_level.txt | 1 - .../Flask_SocketIO-4.2.1.dist-info/INSTALLER | 1 - .../Flask_SocketIO-4.2.1.dist-info/LICENSE | 20 - .../Flask_SocketIO-4.2.1.dist-info/METADATA | 28 - .../Flask_SocketIO-4.2.1.dist-info/RECORD | 14 - .../Flask_SocketIO-4.2.1.dist-info/WHEEL | 6 - .../top_level.txt | 1 - .../GitPython-3.0.1.dist-info/AUTHORS | 38 - .../GitPython-3.0.1.dist-info/INSTALLER | 1 - .../GitPython-3.0.1.dist-info/LICENSE | 30 - .../GitPython-3.0.1.dist-info/METADATA | 30 - .../GitPython-3.0.1.dist-info/RECORD | 205 - .../GitPython-3.0.1.dist-info/WHEEL | 5 - .../GitPython-3.0.1.dist-info/top_level.txt | 1 - .../Jinja2-2.10.1.dist-info/INSTALLER | 1 - .../Jinja2-2.10.1.dist-info/LICENSE | 31 - .../Jinja2-2.10.1.dist-info/METADATA | 67 - .../Jinja2-2.10.1.dist-info/RECORD | 61 - .../Jinja2-2.10.1.dist-info/WHEEL | 6 - .../Jinja2-2.10.1.dist-info/entry_points.txt | 4 - .../Jinja2-2.10.1.dist-info/top_level.txt | 1 - .../MarkupSafe-1.1.1.dist-info/INSTALLER | 1 - .../MarkupSafe-1.1.1.dist-info/LICENSE.rst | 28 - .../MarkupSafe-1.1.1.dist-info/METADATA | 103 - .../MarkupSafe-1.1.1.dist-info/RECORD | 16 - .../MarkupSafe-1.1.1.dist-info/WHEEL | 5 - .../MarkupSafe-1.1.1.dist-info/top_level.txt | 1 - .../PyYAML-5.1.2-py3.6.egg-info/PKG-INFO | 38 - .../PyYAML-5.1.2-py3.6.egg-info/SOURCES.txt | 27 - .../dependency_links.txt | 1 - .../installed-files.txt | 38 - .../PyYAML-5.1.2-py3.6.egg-info/top_level.txt | 2 - .../Werkzeug-0.15.5.dist-info/INSTALLER | 1 - .../Werkzeug-0.15.5.dist-info/LICENSE.rst | 28 - .../Werkzeug-0.15.5.dist-info/METADATA | 128 - .../Werkzeug-0.15.5.dist-info/RECORD | 119 - .../Werkzeug-0.15.5.dist-info/WHEEL | 6 - .../Werkzeug-0.15.5.dist-info/top_level.txt | 1 - .../DESCRIPTION.rst | 50 - .../certifi-2019.6.16.dist-info/INSTALLER | 1 - .../certifi-2019.6.16.dist-info/LICENSE.txt | 21 - .../certifi-2019.6.16.dist-info/METADATA | 74 - .../certifi-2019.6.16.dist-info/RECORD | 15 - .../certifi-2019.6.16.dist-info/WHEEL | 6 - .../certifi-2019.6.16.dist-info/metadata.json | 1 - .../certifi-2019.6.16.dist-info/top_level.txt | 1 - .../site-packages/certifi/__init__.py | 3 - .../site-packages/certifi/__main__.py | 2 - .../site-packages/certifi/cacert.pem | 4618 ---------- .../python3.6/site-packages/certifi/core.py | 15 - .../chardet-3.0.4.dist-info/DESCRIPTION.rst | 70 - .../chardet-3.0.4.dist-info/INSTALLER | 1 - .../chardet-3.0.4.dist-info/METADATA | 96 - .../chardet-3.0.4.dist-info/RECORD | 91 - .../chardet-3.0.4.dist-info/WHEEL | 6 - .../chardet-3.0.4.dist-info/entry_points.txt | 3 - .../chardet-3.0.4.dist-info/metadata.json | 1 - .../chardet-3.0.4.dist-info/top_level.txt | 1 - .../site-packages/chardet/__init__.py | 39 - .../site-packages/chardet/big5freq.py | 386 - .../site-packages/chardet/big5prober.py | 47 - .../site-packages/chardet/chardistribution.py | 233 - .../chardet/charsetgroupprober.py | 106 - .../site-packages/chardet/charsetprober.py | 145 - .../site-packages/chardet/cli/__init__.py | 1 - .../site-packages/chardet/cli/chardetect.py | 85 - .../chardet/codingstatemachine.py | 88 - .../python3.6/site-packages/chardet/compat.py | 34 - .../site-packages/chardet/cp949prober.py | 49 - .../python3.6/site-packages/chardet/enums.py | 76 - .../site-packages/chardet/escprober.py | 101 - .../python3.6/site-packages/chardet/escsm.py | 246 - .../site-packages/chardet/eucjpprober.py | 92 - .../site-packages/chardet/euckrfreq.py | 195 - .../site-packages/chardet/euckrprober.py | 47 - .../site-packages/chardet/euctwfreq.py | 387 - .../site-packages/chardet/euctwprober.py | 46 - .../site-packages/chardet/gb2312freq.py | 283 - .../site-packages/chardet/gb2312prober.py | 46 - .../site-packages/chardet/hebrewprober.py | 292 - .../site-packages/chardet/jisfreq.py | 325 - .../python3.6/site-packages/chardet/jpcntx.py | 233 - .../chardet/langbulgarianmodel.py | 228 - .../chardet/langcyrillicmodel.py | 333 - .../site-packages/chardet/langgreekmodel.py | 225 - .../site-packages/chardet/langhebrewmodel.py | 200 - .../chardet/langhungarianmodel.py | 225 - .../site-packages/chardet/langthaimodel.py | 199 - .../site-packages/chardet/langturkishmodel.py | 193 - .../site-packages/chardet/latin1prober.py | 145 - .../site-packages/chardet/mbcharsetprober.py | 91 - .../site-packages/chardet/mbcsgroupprober.py | 54 - .../python3.6/site-packages/chardet/mbcssm.py | 572 -- .../site-packages/chardet/sbcharsetprober.py | 132 - .../site-packages/chardet/sbcsgroupprober.py | 73 - .../site-packages/chardet/sjisprober.py | 92 - .../chardet/universaldetector.py | 286 - .../site-packages/chardet/utf8prober.py | 82 - .../site-packages/chardet/version.py | 9 - .../python3.6/site-packages/click/__init__.py | 97 - .../site-packages/click/_bashcomplete.py | 293 - .../python3.6/site-packages/click/_compat.py | 703 -- .../site-packages/click/_termui_impl.py | 621 -- .../site-packages/click/_textwrap.py | 38 - .../site-packages/click/_unicodefun.py | 125 - .../site-packages/click/_winconsole.py | 307 - .../lib/python3.6/site-packages/click/core.py | 1856 ---- .../site-packages/click/decorators.py | 311 - .../site-packages/click/exceptions.py | 235 - .../site-packages/click/formatting.py | 256 - .../python3.6/site-packages/click/globals.py | 48 - .../python3.6/site-packages/click/parser.py | 427 - .../python3.6/site-packages/click/termui.py | 606 -- .../python3.6/site-packages/click/testing.py | 374 - .../python3.6/site-packages/click/types.py | 668 -- .../python3.6/site-packages/click/utils.py | 440 - .../site-packages/dateutil/__init__.py | 8 - .../site-packages/dateutil/_common.py | 43 - .../site-packages/dateutil/_version.py | 4 - .../site-packages/dateutil/easter.py | 89 - .../site-packages/dateutil/parser/__init__.py | 60 - .../site-packages/dateutil/parser/_parser.py | 1580 ---- .../dateutil/parser/isoparser.py | 411 - .../site-packages/dateutil/relativedelta.py | 599 -- .../python3.6/site-packages/dateutil/rrule.py | 1736 ---- .../site-packages/dateutil/tz/__init__.py | 17 - .../site-packages/dateutil/tz/_common.py | 419 - .../site-packages/dateutil/tz/_factories.py | 73 - .../python3.6/site-packages/dateutil/tz/tz.py | 1836 ---- .../site-packages/dateutil/tz/win.py | 370 - .../python3.6/site-packages/dateutil/tzwin.py | 2 - .../python3.6/site-packages/dateutil/utils.py | 71 - .../dateutil/zoneinfo/__init__.py | 167 - .../zoneinfo/dateutil-zoneinfo.tar.gz | Bin 154405 -> 0 bytes .../dateutil/zoneinfo/rebuild.py | 53 - .../ddt-1.2.1.dist-info/INSTALLER | 1 - .../ddt-1.2.1.dist-info/LICENSE.md | 21 - .../ddt-1.2.1.dist-info/METADATA | 25 - .../site-packages/ddt-1.2.1.dist-info/RECORD | 8 - .../site-packages/ddt-1.2.1.dist-info/WHEEL | 6 - .../ddt-1.2.1.dist-info/top_level.txt | 1 - venv/lib/python3.6/site-packages/ddt.py | 310 - .../python3.6/site-packages/dns/__init__.py | 56 - .../python3.6/site-packages/dns/_compat.py | 59 - .../lib/python3.6/site-packages/dns/dnssec.py | 519 -- venv/lib/python3.6/site-packages/dns/e164.py | 105 - venv/lib/python3.6/site-packages/dns/edns.py | 269 - .../python3.6/site-packages/dns/entropy.py | 148 - .../python3.6/site-packages/dns/exception.py | 128 - venv/lib/python3.6/site-packages/dns/flags.py | 130 - .../lib/python3.6/site-packages/dns/grange.py | 69 - venv/lib/python3.6/site-packages/dns/hash.py | 37 - venv/lib/python3.6/site-packages/dns/inet.py | 124 - venv/lib/python3.6/site-packages/dns/ipv4.py | 63 - venv/lib/python3.6/site-packages/dns/ipv6.py | 181 - .../python3.6/site-packages/dns/message.py | 1175 --- venv/lib/python3.6/site-packages/dns/name.py | 994 -- .../python3.6/site-packages/dns/namedict.py | 108 - venv/lib/python3.6/site-packages/dns/node.py | 182 - .../lib/python3.6/site-packages/dns/opcode.py | 119 - venv/lib/python3.6/site-packages/dns/py.typed | 0 venv/lib/python3.6/site-packages/dns/query.py | 683 -- venv/lib/python3.6/site-packages/dns/rcode.py | 144 - venv/lib/python3.6/site-packages/dns/rdata.py | 456 - .../python3.6/site-packages/dns/rdataclass.py | 122 - .../python3.6/site-packages/dns/rdataset.py | 347 - .../python3.6/site-packages/dns/rdatatype.py | 287 - .../site-packages/dns/rdtypes/ANY/AFSDB.py | 55 - .../site-packages/dns/rdtypes/ANY/AVC.py | 25 - .../site-packages/dns/rdtypes/ANY/CAA.py | 75 - .../site-packages/dns/rdtypes/ANY/CDNSKEY.py | 27 - .../site-packages/dns/rdtypes/ANY/CDS.py | 23 - .../site-packages/dns/rdtypes/ANY/CERT.py | 123 - .../site-packages/dns/rdtypes/ANY/CNAME.py | 27 - .../site-packages/dns/rdtypes/ANY/CSYNC.py | 126 - .../site-packages/dns/rdtypes/ANY/DLV.py | 23 - .../site-packages/dns/rdtypes/ANY/DNAME.py | 26 - .../site-packages/dns/rdtypes/ANY/DNSKEY.py | 27 - .../site-packages/dns/rdtypes/ANY/DS.py | 23 - .../site-packages/dns/rdtypes/ANY/EUI48.py | 29 - .../site-packages/dns/rdtypes/ANY/EUI64.py | 29 - .../site-packages/dns/rdtypes/ANY/GPOS.py | 162 - .../site-packages/dns/rdtypes/ANY/HINFO.py | 86 - .../site-packages/dns/rdtypes/ANY/HIP.py | 115 - .../site-packages/dns/rdtypes/ANY/ISDN.py | 99 - .../site-packages/dns/rdtypes/ANY/LOC.py | 327 - .../site-packages/dns/rdtypes/ANY/MX.py | 23 - .../site-packages/dns/rdtypes/ANY/NS.py | 23 - .../site-packages/dns/rdtypes/ANY/NSEC.py | 128 - .../site-packages/dns/rdtypes/ANY/NSEC3.py | 196 - .../dns/rdtypes/ANY/NSEC3PARAM.py | 90 - .../dns/rdtypes/ANY/OPENPGPKEY.py | 60 - .../site-packages/dns/rdtypes/ANY/PTR.py | 23 - .../site-packages/dns/rdtypes/ANY/RP.py | 82 - .../site-packages/dns/rdtypes/ANY/RRSIG.py | 158 - .../site-packages/dns/rdtypes/ANY/RT.py | 23 - .../site-packages/dns/rdtypes/ANY/SOA.py | 116 - .../site-packages/dns/rdtypes/ANY/SPF.py | 25 - .../site-packages/dns/rdtypes/ANY/SSHFP.py | 79 - .../site-packages/dns/rdtypes/ANY/TLSA.py | 84 - .../site-packages/dns/rdtypes/ANY/TXT.py | 23 - .../site-packages/dns/rdtypes/ANY/URI.py | 82 - .../site-packages/dns/rdtypes/ANY/X25.py | 66 - .../site-packages/dns/rdtypes/ANY/__init__.py | 57 - .../site-packages/dns/rdtypes/CH/A.py | 70 - .../site-packages/dns/rdtypes/CH/__init__.py | 22 - .../site-packages/dns/rdtypes/IN/A.py | 54 - .../site-packages/dns/rdtypes/IN/AAAA.py | 55 - .../site-packages/dns/rdtypes/IN/APL.py | 165 - .../site-packages/dns/rdtypes/IN/DHCID.py | 61 - .../site-packages/dns/rdtypes/IN/IPSECKEY.py | 150 - .../site-packages/dns/rdtypes/IN/KX.py | 23 - .../site-packages/dns/rdtypes/IN/NAPTR.py | 127 - .../site-packages/dns/rdtypes/IN/NSAP.py | 60 - .../site-packages/dns/rdtypes/IN/NSAP_PTR.py | 23 - .../site-packages/dns/rdtypes/IN/PX.py | 89 - .../site-packages/dns/rdtypes/IN/SRV.py | 83 - .../site-packages/dns/rdtypes/IN/WKS.py | 107 - .../site-packages/dns/rdtypes/IN/__init__.py | 33 - .../site-packages/dns/rdtypes/__init__.py | 27 - .../site-packages/dns/rdtypes/dnskeybase.py | 138 - .../site-packages/dns/rdtypes/dsbase.py | 85 - .../site-packages/dns/rdtypes/euibase.py | 71 - .../site-packages/dns/rdtypes/mxbase.py | 103 - .../site-packages/dns/rdtypes/nsbase.py | 83 - .../site-packages/dns/rdtypes/txtbase.py | 97 - .../python3.6/site-packages/dns/renderer.py | 291 - .../python3.6/site-packages/dns/resolver.py | 1383 --- .../site-packages/dns/reversename.py | 96 - venv/lib/python3.6/site-packages/dns/rrset.py | 189 - venv/lib/python3.6/site-packages/dns/set.py | 261 - .../python3.6/site-packages/dns/tokenizer.py | 571 -- venv/lib/python3.6/site-packages/dns/tsig.py | 236 - .../site-packages/dns/tsigkeyring.py | 50 - venv/lib/python3.6/site-packages/dns/ttl.py | 70 - .../lib/python3.6/site-packages/dns/update.py | 279 - .../python3.6/site-packages/dns/version.py | 43 - .../python3.6/site-packages/dns/wiredata.py | 103 - venv/lib/python3.6/site-packages/dns/zone.py | 1127 --- .../DESCRIPTION.rst | 9 - .../dnspython-1.16.0.dist-info/INSTALLER | 1 - .../dnspython-1.16.0.dist-info/LICENSE.txt | 35 - .../dnspython-1.16.0.dist-info/METADATA | 44 - .../dnspython-1.16.0.dist-info/RECORD | 201 - .../dnspython-1.16.0.dist-info/WHEEL | 6 - .../dnspython-1.16.0.dist-info/metadata.json | 1 - .../dnspython-1.16.0.dist-info/top_level.txt | 1 - .../python3.6/site-packages/easy-install.pth | 1 - .../python3.6/site-packages/easy_install.py | 5 - .../site-packages/engineio/__init__.py | 25 - .../site-packages/engineio/async_aiohttp.py | 129 - .../site-packages/engineio/async_asgi.py | 223 - .../engineio/async_drivers/__init__.py | 0 .../engineio/async_drivers/aiohttp.py | 128 - .../engineio/async_drivers/asgi.py | 214 - .../engineio/async_drivers/eventlet.py | 30 - .../engineio/async_drivers/gevent.py | 63 - .../engineio/async_drivers/gevent_uwsgi.py | 156 - .../engineio/async_drivers/sanic.py | 144 - .../engineio/async_drivers/threading.py | 17 - .../engineio/async_drivers/tornado.py | 184 - .../site-packages/engineio/async_eventlet.py | 30 - .../site-packages/engineio/async_gevent.py | 63 - .../engineio/async_gevent_uwsgi.py | 155 - .../site-packages/engineio/async_sanic.py | 145 - .../site-packages/engineio/async_threading.py | 17 - .../site-packages/engineio/async_tornado.py | 154 - .../site-packages/engineio/asyncio_client.py | 566 -- .../site-packages/engineio/asyncio_server.py | 463 - .../site-packages/engineio/asyncio_socket.py | 235 - .../site-packages/engineio/client.py | 651 -- .../site-packages/engineio/exceptions.py | 22 - .../site-packages/engineio/middleware.py | 87 - .../site-packages/engineio/packet.py | 92 - .../site-packages/engineio/payload.py | 80 - .../site-packages/engineio/server.py | 659 -- .../site-packages/engineio/socket.py | 247 - .../site-packages/engineio/static_files.py | 55 - .../eventlet-0.25.0.dist-info/AUTHORS | 174 - .../eventlet-0.25.0.dist-info/INSTALLER | 1 - .../eventlet-0.25.0.dist-info/LICENSE | 23 - .../eventlet-0.25.0.dist-info/METADATA | 106 - .../eventlet-0.25.0.dist-info/RECORD | 187 - .../eventlet-0.25.0.dist-info/WHEEL | 6 - .../eventlet-0.25.0.dist-info/top_level.txt | 1 - .../site-packages/eventlet/__init__.py | 68 - .../site-packages/eventlet/backdoor.py | 136 - .../site-packages/eventlet/convenience.py | 189 - .../site-packages/eventlet/corolocal.py | 53 - .../python3.6/site-packages/eventlet/coros.py | 61 - .../site-packages/eventlet/dagpool.py | 602 -- .../site-packages/eventlet/db_pool.py | 461 - .../python3.6/site-packages/eventlet/debug.py | 174 - .../python3.6/site-packages/eventlet/event.py | 220 - .../eventlet/green/BaseHTTPServer.py | 16 - .../eventlet/green/CGIHTTPServer.py | 19 - .../site-packages/eventlet/green/MySQLdb.py | 37 - .../eventlet/green/OpenSSL/SSL.py | 124 - .../eventlet/green/OpenSSL/__init__.py | 4 - .../eventlet/green/OpenSSL/crypto.py | 1 - .../eventlet/green/OpenSSL/tsafe.py | 1 - .../eventlet/green/OpenSSL/version.py | 1 - .../site-packages/eventlet/green/Queue.py | 32 - .../eventlet/green/SimpleHTTPServer.py | 14 - .../eventlet/green/SocketServer.py | 15 - .../site-packages/eventlet/green/__init__.py | 1 - .../eventlet/green/_socket_nodns.py | 33 - .../site-packages/eventlet/green/asynchat.py | 11 - .../site-packages/eventlet/green/asyncore.py | 13 - .../site-packages/eventlet/green/builtin.py | 47 - .../site-packages/eventlet/green/ftplib.py | 13 - .../eventlet/green/http/__init__.py | 191 - .../eventlet/green/http/client.py | 1567 ---- .../eventlet/green/http/cookiejar.py | 2152 ----- .../eventlet/green/http/cookies.py | 691 -- .../eventlet/green/http/server.py | 1266 --- .../site-packages/eventlet/green/httplib.py | 22 - .../site-packages/eventlet/green/os.py | 111 - .../site-packages/eventlet/green/profile.py | 257 - .../site-packages/eventlet/green/select.py | 86 - .../site-packages/eventlet/green/selectors.py | 34 - .../site-packages/eventlet/green/socket.py | 63 - .../site-packages/eventlet/green/ssl.py | 475 - .../eventlet/green/subprocess.py | 143 - .../site-packages/eventlet/green/thread.py | 114 - .../site-packages/eventlet/green/threading.py | 134 - .../site-packages/eventlet/green/time.py | 6 - .../eventlet/green/urllib/__init__.py | 40 - .../eventlet/green/urllib/error.py | 4 - .../eventlet/green/urllib/parse.py | 3 - .../eventlet/green/urllib/request.py | 50 - .../eventlet/green/urllib/response.py | 3 - .../site-packages/eventlet/green/urllib2.py | 20 - .../site-packages/eventlet/green/zmq.py | 465 - .../eventlet/greenio/__init__.py | 8 - .../site-packages/eventlet/greenio/base.py | 506 - .../site-packages/eventlet/greenio/py2.py | 227 - .../site-packages/eventlet/greenio/py3.py | 214 - .../site-packages/eventlet/greenpool.py | 257 - .../site-packages/eventlet/greenthread.py | 302 - .../site-packages/eventlet/hubs/__init__.py | 190 - .../site-packages/eventlet/hubs/epolls.py | 31 - .../site-packages/eventlet/hubs/hub.py | 486 - .../site-packages/eventlet/hubs/kqueue.py | 112 - .../site-packages/eventlet/hubs/poll.py | 119 - .../site-packages/eventlet/hubs/pyevent.py | 187 - .../site-packages/eventlet/hubs/selects.py | 64 - .../site-packages/eventlet/hubs/timer.py | 106 - .../site-packages/eventlet/patcher.py | 485 - .../python3.6/site-packages/eventlet/pools.py | 186 - .../python3.6/site-packages/eventlet/queue.py | 492 - .../site-packages/eventlet/semaphore.py | 315 - .../eventlet/support/__init__.py | 78 - .../eventlet/support/greendns.py | 841 -- .../eventlet/support/greenlets.py | 8 - .../eventlet/support/psycopg2_patcher.py | 55 - .../site-packages/eventlet/support/pylib.py | 12 - .../eventlet/support/stacklesspypys.py | 12 - .../eventlet/support/stacklesss.py | 84 - .../site-packages/eventlet/timeout.py | 179 - .../python3.6/site-packages/eventlet/tpool.py | 340 - .../site-packages/eventlet/websocket.py | 831 -- .../python3.6/site-packages/eventlet/wsgi.py | 1012 -- .../site-packages/eventlet/zipkin/__init__.py | 0 .../eventlet/zipkin/_thrift/__init__.py | 0 .../zipkin/_thrift/zipkinCore/__init__.py | 1 - .../zipkin/_thrift/zipkinCore/constants.py | 14 - .../zipkin/_thrift/zipkinCore/ttypes.py | 452 - .../site-packages/eventlet/zipkin/api.py | 186 - .../site-packages/eventlet/zipkin/client.py | 56 - .../eventlet/zipkin/greenthread.py | 33 - .../site-packages/eventlet/zipkin/http.py | 61 - .../site-packages/eventlet/zipkin/log.py | 19 - .../site-packages/eventlet/zipkin/patcher.py | 41 - .../site-packages/eventlet/zipkin/wsgi.py | 78 - .../python3.6/site-packages/flask/__init__.py | 60 - .../python3.6/site-packages/flask/__main__.py | 15 - .../python3.6/site-packages/flask/_compat.py | 145 - venv/lib/python3.6/site-packages/flask/app.py | 2466 ----- .../site-packages/flask/blueprints.py | 569 -- venv/lib/python3.6/site-packages/flask/cli.py | 970 -- .../python3.6/site-packages/flask/config.py | 269 - venv/lib/python3.6/site-packages/flask/ctx.py | 475 - .../site-packages/flask/debughelpers.py | 183 - .../python3.6/site-packages/flask/globals.py | 62 - .../python3.6/site-packages/flask/helpers.py | 1153 --- .../site-packages/flask/json/__init__.py | 376 - .../python3.6/site-packages/flask/json/tag.py | 309 - .../python3.6/site-packages/flask/logging.py | 109 - .../python3.6/site-packages/flask/sessions.py | 388 - .../python3.6/site-packages/flask/signals.py | 65 - .../site-packages/flask/templating.py | 155 - .../python3.6/site-packages/flask/testing.py | 283 - .../python3.6/site-packages/flask/views.py | 163 - .../python3.6/site-packages/flask/wrappers.py | 137 - .../python3.6/site-packages/flask_classy.py | 327 - .../site-packages/flask_socketio/__init__.py | 922 -- .../site-packages/flask_socketio/cli.py | 68 - .../site-packages/flask_socketio/namespace.py | 47 - .../flask_socketio/test_client.py | 205 - .../python3.6/site-packages/git/__init__.py | 86 - venv/lib/python3.6/site-packages/git/cmd.py | 1109 --- .../lib/python3.6/site-packages/git/compat.py | 316 - .../lib/python3.6/site-packages/git/config.py | 710 -- venv/lib/python3.6/site-packages/git/db.py | 60 - venv/lib/python3.6/site-packages/git/diff.py | 516 -- venv/lib/python3.6/site-packages/git/exc.py | 132 - .../site-packages/git/index/__init__.py | 6 - .../python3.6/site-packages/git/index/base.py | 1239 --- .../python3.6/site-packages/git/index/fun.py | 384 - .../python3.6/site-packages/git/index/typ.py | 176 - .../python3.6/site-packages/git/index/util.py | 99 - .../site-packages/git/objects/__init__.py | 26 - .../site-packages/git/objects/base.py | 182 - .../site-packages/git/objects/blob.py | 33 - .../site-packages/git/objects/commit.py | 533 -- .../site-packages/git/objects/fun.py | 207 - .../git/objects/submodule/__init__.py | 2 - .../git/objects/submodule/base.py | 1204 --- .../git/objects/submodule/root.py | 350 - .../git/objects/submodule/util.py | 94 - .../site-packages/git/objects/tag.py | 75 - .../site-packages/git/objects/tree.py | 341 - .../site-packages/git/objects/util.py | 363 - .../site-packages/git/refs/__init__.py | 10 - .../python3.6/site-packages/git/refs/head.py | 247 - .../python3.6/site-packages/git/refs/log.py | 317 - .../site-packages/git/refs/reference.py | 126 - .../site-packages/git/refs/remote.py | 52 - .../site-packages/git/refs/symbolic.py | 679 -- .../python3.6/site-packages/git/refs/tag.py | 93 - .../lib/python3.6/site-packages/git/remote.py | 876 -- .../site-packages/git/repo/__init__.py | 4 - .../python3.6/site-packages/git/repo/base.py | 1075 --- .../python3.6/site-packages/git/repo/fun.py | 344 - .../site-packages/git/test/__init__.py | 5 - .../site-packages/git/test/fixtures/blame | 131 - .../git/test/fixtures/blame_binary | Bin 14807 -> 0 bytes .../git/test/fixtures/blame_complex_revision | 177 - .../git/test/fixtures/blame_incremental | 30 - .../fixtures/blame_incremental_2.11.1_plus | 33 - .../git/test/fixtures/cat_file.py | 6 - .../git/test/fixtures/cat_file_blob | 1 - .../git/test/fixtures/cat_file_blob_nl | 1 - .../git/test/fixtures/cat_file_blob_size | 1 - .../git/test/fixtures/commit_invalid_data | 6 - .../git/test/fixtures/commit_with_gpgsig | 30 - .../site-packages/git/test/fixtures/diff_2 | 54 - .../site-packages/git/test/fixtures/diff_2f | 19 - .../diff_abbrev-40_full-index_M_raw_no-color | 1 - .../git/test/fixtures/diff_change_in_type | 10 - .../git/test/fixtures/diff_change_in_type_raw | 1 - .../site-packages/git/test/fixtures/diff_f | 15 - .../git/test/fixtures/diff_file_with_spaces | 7 - .../site-packages/git/test/fixtures/diff_i | 201 - .../git/test/fixtures/diff_index_patch | 100 - .../git/test/fixtures/diff_index_raw | 1 - .../git/test/fixtures/diff_initial | 8 - .../git/test/fixtures/diff_mode_only | 1152 --- .../git/test/fixtures/diff_new_mode | 14 - .../git/test/fixtures/diff_numstat | 2 - .../site-packages/git/test/fixtures/diff_p | 610 -- .../git/test/fixtures/diff_patch_binary | 3 - .../git/test/fixtures/diff_patch_unsafe_paths | 89 - .../git/test/fixtures/diff_raw_binary | 1 - .../git/test/fixtures/diff_rename | 12 - .../git/test/fixtures/diff_rename_raw | 1 - .../git/test/fixtures/diff_tree_numstat_root | 3 - .../fixtures/for_each_ref_with_path_component | Bin 84 -> 0 bytes .../git/test/fixtures/git_config | 46 - .../git/test/fixtures/git_config-inc.cfg | 5 - .../git/test/fixtures/git_config_global | 25 - .../git/test/fixtures/git_config_multiple | 7 - .../test/fixtures/git_config_with_comments | 183 - .../test/fixtures/git_config_with_empty_value | 4 - .../site-packages/git/test/fixtures/git_file | 1 - .../site-packages/git/test/fixtures/index | Bin 163616 -> 0 bytes .../git/test/fixtures/index_merge | Bin 9192 -> 0 bytes .../git/test/fixtures/issue-301_stderr | 5002 ---------- .../site-packages/git/test/fixtures/ls_tree_a | 7 - .../site-packages/git/test/fixtures/ls_tree_b | 2 - .../git/test/fixtures/ls_tree_commit | 3 - .../git/test/fixtures/ls_tree_empty | 0 .../git/test/fixtures/reflog_HEAD | 460 - .../git/test/fixtures/reflog_invalid_date | 2 - .../git/test/fixtures/reflog_invalid_email | 2 - .../git/test/fixtures/reflog_invalid_newsha | 2 - .../git/test/fixtures/reflog_invalid_oldsha | 2 - .../git/test/fixtures/reflog_invalid_sep | 2 - .../git/test/fixtures/reflog_master | 124 - .../site-packages/git/test/fixtures/rev_list | 3 - .../git/test/fixtures/rev_list_bisect_all | 51 - .../git/test/fixtures/rev_list_commit_diffs | 8 - .../test/fixtures/rev_list_commit_idabbrev | 8 - .../git/test/fixtures/rev_list_commit_stats | 7 - .../git/test/fixtures/rev_list_count | 655 -- .../git/test/fixtures/rev_list_delta_a | 8 - .../git/test/fixtures/rev_list_delta_b | 11 - .../git/test/fixtures/rev_list_single | 7 - .../site-packages/git/test/fixtures/rev_parse | 1 - .../git/test/fixtures/show_empty_commit | 6 - .../uncommon_branch_prefix_FETCH_HEAD | 6 - .../fixtures/uncommon_branch_prefix_stderr | 6 - .../site-packages/git/test/lib/__init__.py | 13 - .../site-packages/git/test/lib/asserts.py | 71 - .../site-packages/git/test/lib/helper.py | 381 - .../git/test/performance/__init__.py | 0 .../site-packages/git/test/performance/lib.py | 94 - .../git/test/performance/test_commit.py | 109 - .../git/test/performance/test_odb.py | 74 - .../git/test/performance/test_streams.py | 149 - .../site-packages/git/test/test_actor.py | 37 - .../site-packages/git/test/test_base.py | 150 - .../site-packages/git/test/test_blob.py | 25 - .../site-packages/git/test/test_commit.py | 402 - .../site-packages/git/test/test_config.py | 374 - .../site-packages/git/test/test_db.py | 27 - .../site-packages/git/test/test_diff.py | 297 - .../site-packages/git/test/test_docs.py | 493 - .../site-packages/git/test/test_exc.py | 169 - .../site-packages/git/test/test_fun.py | 293 - .../site-packages/git/test/test_git.py | 296 - .../site-packages/git/test/test_index.py | 929 -- .../site-packages/git/test/test_reflog.py | 107 - .../site-packages/git/test/test_refs.py | 568 -- .../site-packages/git/test/test_remote.py | 648 -- .../site-packages/git/test/test_repo.py | 1047 --- .../site-packages/git/test/test_stats.py | 31 - .../site-packages/git/test/test_submodule.py | 938 -- .../site-packages/git/test/test_tree.py | 108 - .../site-packages/git/test/test_util.py | 282 - venv/lib/python3.6/site-packages/git/util.py | 956 -- .../python3.6/site-packages/gitdb/__init__.py | 39 - .../lib/python3.6/site-packages/gitdb/base.py | 315 - .../python3.6/site-packages/gitdb/const.py | 4 - .../site-packages/gitdb/db/__init__.py | 11 - .../python3.6/site-packages/gitdb/db/base.py | 273 - .../python3.6/site-packages/gitdb/db/git.py | 85 - .../python3.6/site-packages/gitdb/db/loose.py | 262 - .../python3.6/site-packages/gitdb/db/mem.py | 112 - .../python3.6/site-packages/gitdb/db/pack.py | 207 - .../python3.6/site-packages/gitdb/db/ref.py | 82 - venv/lib/python3.6/site-packages/gitdb/exc.py | 46 - venv/lib/python3.6/site-packages/gitdb/fun.py | 781 -- .../lib/python3.6/site-packages/gitdb/pack.py | 1033 --- .../python3.6/site-packages/gitdb/stream.py | 732 -- .../site-packages/gitdb/test/__init__.py | 4 - .../python3.6/site-packages/gitdb/test/lib.py | 208 - .../site-packages/gitdb/test/test_base.py | 105 - .../site-packages/gitdb/test/test_example.py | 43 - .../site-packages/gitdb/test/test_pack.py | 255 - .../site-packages/gitdb/test/test_stream.py | 164 - .../site-packages/gitdb/test/test_util.py | 100 - venv/lib/python3.6/site-packages/gitdb/typ.py | 10 - .../lib/python3.6/site-packages/gitdb/util.py | 398 - .../site-packages/gitdb/utils/__init__.py | 0 .../site-packages/gitdb/utils/compat.py | 43 - .../site-packages/gitdb/utils/encoding.py | 31 - .../gitdb2-2.0.5.dist-info/INSTALLER | 1 - .../gitdb2-2.0.5.dist-info/METADATA | 30 - .../gitdb2-2.0.5.dist-info/RECORD | 57 - .../gitdb2-2.0.5.dist-info/WHEEL | 6 - .../gitdb2-2.0.5.dist-info/top_level.txt | 1 - .../greenlet-0.4.15-py3.6.egg-info/PKG-INFO | 88 - .../SOURCES.txt | 70 - .../dependency_links.txt | 1 - .../installed-files.txt | 7 - .../not-zip-safe | 1 - .../top_level.txt | 1 - .../greenlet.cpython-36m-darwin.so | Bin 30700 -> 0 bytes .../httplib2-0.13.1.dist-info/INSTALLER | 1 - .../httplib2-0.13.1.dist-info/LICENSE | 23 - .../httplib2-0.13.1.dist-info/METADATA | 70 - .../httplib2-0.13.1.dist-info/RECORD | 15 - .../httplib2-0.13.1.dist-info/WHEEL | 5 - .../httplib2-0.13.1.dist-info/top_level.txt | 1 - .../site-packages/httplib2/__init__.py | 2039 ---- .../site-packages/httplib2/cacerts.txt | 2197 ----- .../python3.6/site-packages/httplib2/certs.py | 42 - .../site-packages/httplib2/iri2uri.py | 124 - .../python3.6/site-packages/httplib2/socks.py | 516 -- .../idna-2.8.dist-info/INSTALLER | 1 - .../idna-2.8.dist-info/LICENSE.rst | 80 - .../site-packages/idna-2.8.dist-info/METADATA | 239 - .../site-packages/idna-2.8.dist-info/RECORD | 22 - .../site-packages/idna-2.8.dist-info/WHEEL | 6 - .../idna-2.8.dist-info/top_level.txt | 1 - .../python3.6/site-packages/idna/__init__.py | 2 - .../lib/python3.6/site-packages/idna/codec.py | 118 - .../python3.6/site-packages/idna/compat.py | 12 - venv/lib/python3.6/site-packages/idna/core.py | 396 - .../python3.6/site-packages/idna/idnadata.py | 1979 ---- .../python3.6/site-packages/idna/intranges.py | 53 - .../site-packages/idna/package_data.py | 2 - .../python3.6/site-packages/idna/uts46data.py | 8205 ----------------- .../itsdangerous-1.1.0.dist-info/INSTALLER | 1 - .../itsdangerous-1.1.0.dist-info/LICENSE.rst | 47 - .../itsdangerous-1.1.0.dist-info/METADATA | 98 - .../itsdangerous-1.1.0.dist-info/RECORD | 26 - .../itsdangerous-1.1.0.dist-info/WHEEL | 6 - .../top_level.txt | 1 - .../site-packages/itsdangerous/__init__.py | 22 - .../site-packages/itsdangerous/_compat.py | 46 - .../site-packages/itsdangerous/_json.py | 18 - .../site-packages/itsdangerous/encoding.py | 49 - .../site-packages/itsdangerous/exc.py | 98 - .../site-packages/itsdangerous/jws.py | 218 - .../site-packages/itsdangerous/serializer.py | 233 - .../site-packages/itsdangerous/signer.py | 179 - .../site-packages/itsdangerous/timed.py | 147 - .../site-packages/itsdangerous/url_safe.py | 65 - .../site-packages/jinja2/__init__.py | 83 - .../python3.6/site-packages/jinja2/_compat.py | 99 - .../site-packages/jinja2/_identifier.py | 2 - .../site-packages/jinja2/asyncfilters.py | 146 - .../site-packages/jinja2/asyncsupport.py | 256 - .../python3.6/site-packages/jinja2/bccache.py | 362 - .../site-packages/jinja2/compiler.py | 1721 ---- .../site-packages/jinja2/constants.py | 32 - .../python3.6/site-packages/jinja2/debug.py | 372 - .../site-packages/jinja2/defaults.py | 56 - .../site-packages/jinja2/environment.py | 1276 --- .../site-packages/jinja2/exceptions.py | 146 - .../lib/python3.6/site-packages/jinja2/ext.py | 627 -- .../python3.6/site-packages/jinja2/filters.py | 1190 --- .../site-packages/jinja2/idtracking.py | 286 - .../python3.6/site-packages/jinja2/lexer.py | 739 -- .../python3.6/site-packages/jinja2/loaders.py | 481 - .../python3.6/site-packages/jinja2/meta.py | 106 - .../site-packages/jinja2/nativetypes.py | 220 - .../python3.6/site-packages/jinja2/nodes.py | 999 -- .../site-packages/jinja2/optimizer.py | 49 - .../python3.6/site-packages/jinja2/parser.py | 903 -- .../python3.6/site-packages/jinja2/runtime.py | 813 -- .../python3.6/site-packages/jinja2/sandbox.py | 486 - .../python3.6/site-packages/jinja2/tests.py | 175 - .../python3.6/site-packages/jinja2/utils.py | 647 -- .../python3.6/site-packages/jinja2/visitor.py | 87 - .../site-packages/markupsafe/__init__.py | 327 - .../site-packages/markupsafe/_compat.py | 33 - .../site-packages/markupsafe/_constants.py | 264 - .../site-packages/markupsafe/_native.py | 69 - .../site-packages/markupsafe/_speedups.c | 423 - .../_speedups.cpython-36m-darwin.so | Bin 35080 -> 0 bytes venv/lib/python3.6/site-packages/mimeparse.py | 191 - .../monotonic-1.5.dist-info/DESCRIPTION.rst | 25 - .../monotonic-1.5.dist-info/INSTALLER | 1 - .../monotonic-1.5.dist-info/METADATA | 40 - .../monotonic-1.5.dist-info/RECORD | 9 - .../monotonic-1.5.dist-info/WHEEL | 6 - .../monotonic-1.5.dist-info/metadata.json | 1 - .../monotonic-1.5.dist-info/top_level.txt | 1 - venv/lib/python3.6/site-packages/monotonic.py | 169 - .../pip-10.0.1-py3.6.egg/EGG-INFO/PKG-INFO | 69 - .../pip-10.0.1-py3.6.egg/EGG-INFO/SOURCES.txt | 347 - .../EGG-INFO/dependency_links.txt | 1 - .../EGG-INFO/entry_points.txt | 5 - .../EGG-INFO/not-zip-safe | 1 - .../EGG-INFO/requires.txt | 8 - .../EGG-INFO/top_level.txt | 1 - .../pip-10.0.1-py3.6.egg/pip/__init__.py | 1 - .../pip-10.0.1-py3.6.egg/pip/__main__.py | 19 - .../pip/_internal/__init__.py | 246 - .../pip/_internal/basecommand.py | 373 - .../pip/_internal/baseparser.py | 240 - .../pip/_internal/build_env.py | 92 - .../pip/_internal/cache.py | 202 - .../pip/_internal/cmdoptions.py | 609 -- .../pip/_internal/commands/__init__.py | 79 - .../pip/_internal/commands/check.py | 42 - .../pip/_internal/commands/completion.py | 94 - .../pip/_internal/commands/configuration.py | 227 - .../pip/_internal/commands/download.py | 233 - .../pip/_internal/commands/freeze.py | 96 - .../pip/_internal/commands/hash.py | 57 - .../pip/_internal/commands/help.py | 36 - .../pip/_internal/commands/install.py | 502 - .../pip/_internal/commands/list.py | 343 - .../pip/_internal/commands/search.py | 135 - .../pip/_internal/commands/show.py | 164 - .../pip/_internal/commands/uninstall.py | 71 - .../pip/_internal/commands/wheel.py | 179 - .../pip/_internal/compat.py | 235 - .../pip/_internal/configuration.py | 378 - .../pip/_internal/download.py | 922 -- .../pip/_internal/exceptions.py | 249 - .../pip/_internal/index.py | 1117 --- .../pip/_internal/locations.py | 194 - .../pip/_internal/models/__init__.py | 4 - .../pip/_internal/models/index.py | 15 - .../pip/_internal/operations/__init__.py | 0 .../pip/_internal/operations/check.py | 106 - .../pip/_internal/operations/freeze.py | 252 - .../pip/_internal/operations/prepare.py | 380 - .../pip/_internal/pep425tags.py | 317 - .../pip/_internal/req/__init__.py | 69 - .../pip/_internal/req/req_file.py | 338 - .../pip/_internal/req/req_install.py | 1115 --- .../pip/_internal/req/req_set.py | 164 - .../pip/_internal/req/req_uninstall.py | 455 - .../pip/_internal/resolve.py | 354 - .../pip/_internal/status_codes.py | 8 - .../pip/_internal/utils/__init__.py | 0 .../pip/_internal/utils/appdirs.py | 258 - .../pip/_internal/utils/deprecation.py | 77 - .../pip/_internal/utils/encoding.py | 33 - .../pip/_internal/utils/filesystem.py | 28 - .../pip/_internal/utils/glibc.py | 84 - .../pip/_internal/utils/hashes.py | 94 - .../pip/_internal/utils/logging.py | 132 - .../pip/_internal/utils/misc.py | 851 -- .../pip/_internal/utils/outdated.py | 163 - .../pip/_internal/utils/packaging.py | 70 - .../pip/_internal/utils/setuptools_build.py | 8 - .../pip/_internal/utils/temp_dir.py | 82 - .../pip/_internal/utils/typing.py | 29 - .../pip/_internal/utils/ui.py | 421 - .../pip/_internal/vcs/__init__.py | 471 - .../pip/_internal/vcs/bazaar.py | 113 - .../pip/_internal/vcs/git.py | 311 - .../pip/_internal/vcs/mercurial.py | 105 - .../pip/_internal/vcs/subversion.py | 271 - .../pip/_internal/wheel.py | 817 -- .../pip/_vendor/__init__.py | 109 - .../pip/_vendor/appdirs.py | 604 -- .../pip/_vendor/cachecontrol/__init__.py | 11 - .../pip/_vendor/cachecontrol/_cmd.py | 60 - .../pip/_vendor/cachecontrol/adapter.py | 134 - .../pip/_vendor/cachecontrol/cache.py | 39 - .../_vendor/cachecontrol/caches/__init__.py | 2 - .../_vendor/cachecontrol/caches/file_cache.py | 133 - .../cachecontrol/caches/redis_cache.py | 43 - .../pip/_vendor/cachecontrol/compat.py | 29 - .../pip/_vendor/cachecontrol/controller.py | 373 - .../pip/_vendor/cachecontrol/filewrapper.py | 78 - .../pip/_vendor/cachecontrol/heuristics.py | 138 - .../pip/_vendor/cachecontrol/serialize.py | 194 - .../pip/_vendor/cachecontrol/wrapper.py | 27 - .../pip/_vendor/certifi/__init__.py | 3 - .../pip/_vendor/certifi/__main__.py | 2 - .../pip/_vendor/certifi/cacert.pem | 4433 --------- .../pip/_vendor/certifi/core.py | 37 - .../pip/_vendor/chardet/__init__.py | 39 - .../pip/_vendor/chardet/big5freq.py | 386 - .../pip/_vendor/chardet/big5prober.py | 47 - .../pip/_vendor/chardet/chardistribution.py | 233 - .../pip/_vendor/chardet/charsetgroupprober.py | 106 - .../pip/_vendor/chardet/charsetprober.py | 145 - .../pip/_vendor/chardet/cli/__init__.py | 1 - .../pip/_vendor/chardet/cli/chardetect.py | 85 - .../pip/_vendor/chardet/codingstatemachine.py | 88 - .../pip/_vendor/chardet/compat.py | 34 - .../pip/_vendor/chardet/cp949prober.py | 49 - .../pip/_vendor/chardet/enums.py | 76 - .../pip/_vendor/chardet/escprober.py | 101 - .../pip/_vendor/chardet/escsm.py | 246 - .../pip/_vendor/chardet/eucjpprober.py | 92 - .../pip/_vendor/chardet/euckrfreq.py | 195 - .../pip/_vendor/chardet/euckrprober.py | 47 - .../pip/_vendor/chardet/euctwfreq.py | 387 - .../pip/_vendor/chardet/euctwprober.py | 46 - .../pip/_vendor/chardet/gb2312freq.py | 283 - .../pip/_vendor/chardet/gb2312prober.py | 46 - .../pip/_vendor/chardet/hebrewprober.py | 292 - .../pip/_vendor/chardet/jisfreq.py | 325 - .../pip/_vendor/chardet/jpcntx.py | 233 - .../pip/_vendor/chardet/langbulgarianmodel.py | 228 - .../pip/_vendor/chardet/langcyrillicmodel.py | 333 - .../pip/_vendor/chardet/langgreekmodel.py | 225 - .../pip/_vendor/chardet/langhebrewmodel.py | 200 - .../pip/_vendor/chardet/langhungarianmodel.py | 225 - .../pip/_vendor/chardet/langthaimodel.py | 199 - .../pip/_vendor/chardet/langturkishmodel.py | 193 - .../pip/_vendor/chardet/latin1prober.py | 145 - .../pip/_vendor/chardet/mbcharsetprober.py | 91 - .../pip/_vendor/chardet/mbcsgroupprober.py | 54 - .../pip/_vendor/chardet/mbcssm.py | 572 -- .../pip/_vendor/chardet/sbcharsetprober.py | 132 - .../pip/_vendor/chardet/sbcsgroupprober.py | 73 - .../pip/_vendor/chardet/sjisprober.py | 92 - .../pip/_vendor/chardet/universaldetector.py | 286 - .../pip/_vendor/chardet/utf8prober.py | 82 - .../pip/_vendor/chardet/version.py | 9 - .../pip/_vendor/colorama/__init__.py | 7 - .../pip/_vendor/colorama/ansi.py | 102 - .../pip/_vendor/colorama/ansitowin32.py | 236 - .../pip/_vendor/colorama/initialise.py | 82 - .../pip/_vendor/colorama/win32.py | 156 - .../pip/_vendor/colorama/winterm.py | 162 - .../pip/_vendor/distlib/__init__.py | 23 - .../pip/_vendor/distlib/_backport/__init__.py | 6 - .../pip/_vendor/distlib/_backport/misc.py | 41 - .../pip/_vendor/distlib/_backport/shutil.py | 761 -- .../_vendor/distlib/_backport/sysconfig.cfg | 84 - .../_vendor/distlib/_backport/sysconfig.py | 788 -- .../pip/_vendor/distlib/_backport/tarfile.py | 2607 ------ .../pip/_vendor/distlib/compat.py | 1120 --- .../pip/_vendor/distlib/database.py | 1336 --- .../pip/_vendor/distlib/index.py | 516 -- .../pip/_vendor/distlib/locators.py | 1292 --- .../pip/_vendor/distlib/manifest.py | 393 - .../pip/_vendor/distlib/markers.py | 131 - .../pip/_vendor/distlib/metadata.py | 1091 --- .../pip/_vendor/distlib/resources.py | 355 - .../pip/_vendor/distlib/scripts.py | 415 - .../pip/_vendor/distlib/t32.exe | Bin 92672 -> 0 bytes .../pip/_vendor/distlib/t64.exe | Bin 102400 -> 0 bytes .../pip/_vendor/distlib/util.py | 1755 ---- .../pip/_vendor/distlib/version.py | 736 -- .../pip/_vendor/distlib/w32.exe | Bin 89088 -> 0 bytes .../pip/_vendor/distlib/w64.exe | Bin 99328 -> 0 bytes .../pip/_vendor/distlib/wheel.py | 984 -- .../pip/_vendor/distro.py | 1104 --- .../pip/_vendor/html5lib/__init__.py | 35 - .../pip/_vendor/html5lib/_ihatexml.py | 288 - .../pip/_vendor/html5lib/_inputstream.py | 923 -- .../pip/_vendor/html5lib/_tokenizer.py | 1721 ---- .../pip/_vendor/html5lib/_trie/__init__.py | 14 - .../pip/_vendor/html5lib/_trie/_base.py | 37 - .../pip/_vendor/html5lib/_trie/datrie.py | 44 - .../pip/_vendor/html5lib/_trie/py.py | 67 - .../pip/_vendor/html5lib/_utils.py | 124 - .../pip/_vendor/html5lib/constants.py | 2947 ------ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../filters/alphabeticalattributes.py | 29 - .../pip/_vendor/html5lib/filters/base.py | 12 - .../html5lib/filters/inject_meta_charset.py | 73 - .../pip/_vendor/html5lib/filters/lint.py | 93 - .../_vendor/html5lib/filters/optionaltags.py | 207 - .../pip/_vendor/html5lib/filters/sanitizer.py | 896 -- .../_vendor/html5lib/filters/whitespace.py | 38 - .../pip/_vendor/html5lib/html5parser.py | 2791 ------ .../pip/_vendor/html5lib/serializer.py | 409 - .../_vendor/html5lib/treeadapters/__init__.py | 30 - .../_vendor/html5lib/treeadapters/genshi.py | 54 - .../pip/_vendor/html5lib/treeadapters/sax.py | 50 - .../_vendor/html5lib/treebuilders/__init__.py | 88 - .../pip/_vendor/html5lib/treebuilders/base.py | 417 - .../pip/_vendor/html5lib/treebuilders/dom.py | 236 - .../_vendor/html5lib/treebuilders/etree.py | 340 - .../html5lib/treebuilders/etree_lxml.py | 366 - .../_vendor/html5lib/treewalkers/__init__.py | 154 - .../pip/_vendor/html5lib/treewalkers/base.py | 252 - .../pip/_vendor/html5lib/treewalkers/dom.py | 43 - .../pip/_vendor/html5lib/treewalkers/etree.py | 130 - .../html5lib/treewalkers/etree_lxml.py | 213 - .../_vendor/html5lib/treewalkers/genshi.py | 69 - .../pip/_vendor/idna/__init__.py | 2 - .../pip/_vendor/idna/codec.py | 118 - .../pip/_vendor/idna/compat.py | 12 - .../pip/_vendor/idna/core.py | 387 - .../pip/_vendor/idna/idnadata.py | 1585 ---- .../pip/_vendor/idna/intranges.py | 53 - .../pip/_vendor/idna/package_data.py | 2 - .../pip/_vendor/idna/uts46data.py | 7634 --------------- .../pip/_vendor/ipaddress.py | 2419 ----- .../pip/_vendor/lockfile/__init__.py | 347 - .../pip/_vendor/lockfile/linklockfile.py | 73 - .../pip/_vendor/lockfile/mkdirlockfile.py | 84 - .../pip/_vendor/lockfile/pidlockfile.py | 190 - .../pip/_vendor/lockfile/sqlitelockfile.py | 156 - .../pip/_vendor/lockfile/symlinklockfile.py | 70 - .../pip/_vendor/msgpack/__init__.py | 66 - .../pip/_vendor/msgpack/_version.py | 1 - .../pip/_vendor/msgpack/exceptions.py | 41 - .../pip/_vendor/msgpack/fallback.py | 977 -- .../pip/_vendor/packaging/__about__.py | 21 - .../pip/_vendor/packaging/__init__.py | 14 - .../pip/_vendor/packaging/_compat.py | 30 - .../pip/_vendor/packaging/_structures.py | 70 - .../pip/_vendor/packaging/markers.py | 301 - .../pip/_vendor/packaging/requirements.py | 130 - .../pip/_vendor/packaging/specifiers.py | 774 -- .../pip/_vendor/packaging/utils.py | 63 - .../pip/_vendor/packaging/version.py | 441 - .../pip/_vendor/pkg_resources/__init__.py | 3125 ------- .../pip/_vendor/pkg_resources/py31compat.py | 22 - .../pip/_vendor/progress/__init__.py | 127 - .../pip/_vendor/progress/bar.py | 88 - .../pip/_vendor/progress/counter.py | 48 - .../pip/_vendor/progress/helpers.py | 91 - .../pip/_vendor/progress/spinner.py | 44 - .../pip/_vendor/pyparsing.py | 5720 ------------ .../pip/_vendor/pytoml/__init__.py | 3 - .../pip/_vendor/pytoml/core.py | 13 - .../pip/_vendor/pytoml/parser.py | 374 - .../pip/_vendor/pytoml/writer.py | 127 - .../pip/_vendor/requests/__init__.py | 123 - .../pip/_vendor/requests/__version__.py | 14 - .../pip/_vendor/requests/_internal_utils.py | 42 - .../pip/_vendor/requests/adapters.py | 525 -- .../pip/_vendor/requests/api.py | 152 - .../pip/_vendor/requests/auth.py | 293 - .../pip/_vendor/requests/certs.py | 18 - .../pip/_vendor/requests/compat.py | 73 - .../pip/_vendor/requests/cookies.py | 542 -- .../pip/_vendor/requests/exceptions.py | 122 - .../pip/_vendor/requests/help.py | 120 - .../pip/_vendor/requests/hooks.py | 34 - .../pip/_vendor/requests/models.py | 948 -- .../pip/_vendor/requests/packages.py | 16 - .../pip/_vendor/requests/sessions.py | 737 -- .../pip/_vendor/requests/status_codes.py | 91 - .../pip/_vendor/requests/structures.py | 105 - .../pip/_vendor/requests/utils.py | 904 -- .../pip/_vendor/retrying.py | 267 - .../pip-10.0.1-py3.6.egg/pip/_vendor/six.py | 891 -- .../pip/_vendor/urllib3/__init__.py | 97 - .../pip/_vendor/urllib3/_collections.py | 319 - .../pip/_vendor/urllib3/connection.py | 373 - .../pip/_vendor/urllib3/connectionpool.py | 905 -- .../pip/_vendor/urllib3/contrib/__init__.py | 0 .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 593 -- .../contrib/_securetransport/low_level.py | 343 - .../pip/_vendor/urllib3/contrib/appengine.py | 296 - .../pip/_vendor/urllib3/contrib/ntlmpool.py | 112 - .../pip/_vendor/urllib3/contrib/pyopenssl.py | 455 - .../urllib3/contrib/securetransport.py | 810 -- .../pip/_vendor/urllib3/contrib/socks.py | 188 - .../pip/_vendor/urllib3/exceptions.py | 246 - .../pip/_vendor/urllib3/fields.py | 178 - .../pip/_vendor/urllib3/filepost.py | 94 - .../pip/_vendor/urllib3/packages/__init__.py | 5 - .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 53 - .../_vendor/urllib3/packages/ordered_dict.py | 259 - .../pip/_vendor/urllib3/packages/six.py | 868 -- .../packages/ssl_match_hostname/__init__.py | 19 - .../ssl_match_hostname/_implementation.py | 157 - .../pip/_vendor/urllib3/poolmanager.py | 440 - .../pip/_vendor/urllib3/request.py | 148 - .../pip/_vendor/urllib3/response.py | 626 -- .../pip/_vendor/urllib3/util/__init__.py | 54 - .../pip/_vendor/urllib3/util/connection.py | 130 - .../pip/_vendor/urllib3/util/request.py | 118 - .../pip/_vendor/urllib3/util/response.py | 81 - .../pip/_vendor/urllib3/util/retry.py | 401 - .../pip/_vendor/urllib3/util/selectors.py | 581 -- .../pip/_vendor/urllib3/util/ssl_.py | 341 - .../pip/_vendor/urllib3/util/timeout.py | 242 - .../pip/_vendor/urllib3/util/url.py | 230 - .../pip/_vendor/urllib3/util/wait.py | 40 - .../pip/_vendor/webencodings/__init__.py | 342 - .../pip/_vendor/webencodings/labels.py | 231 - .../pip/_vendor/webencodings/mklabels.py | 59 - .../pip/_vendor/webencodings/tests.py | 153 - .../_vendor/webencodings/x_user_defined.py | 325 - .../site-packages/pkg_resources/__init__.py | 3295 ------- .../pkg_resources/_vendor/__init__.py | 0 .../pkg_resources/_vendor/appdirs.py | 608 -- .../_vendor/packaging/__about__.py | 21 - .../_vendor/packaging/__init__.py | 14 - .../_vendor/packaging/_compat.py | 30 - .../_vendor/packaging/_structures.py | 68 - .../_vendor/packaging/markers.py | 301 - .../_vendor/packaging/requirements.py | 127 - .../_vendor/packaging/specifiers.py | 774 -- .../pkg_resources/_vendor/packaging/utils.py | 14 - .../_vendor/packaging/version.py | 393 - .../pkg_resources/_vendor/pyparsing.py | 5742 ------------ .../pkg_resources/_vendor/six.py | 868 -- .../pkg_resources/extern/__init__.py | 73 - .../site-packages/pkg_resources/py31compat.py | 23 - .../python_dateutil-2.8.0.dist-info/INSTALLER | 1 - .../python_dateutil-2.8.0.dist-info/LICENSE | 54 - .../python_dateutil-2.8.0.dist-info/METADATA | 195 - .../python_dateutil-2.8.0.dist-info/RECORD | 44 - .../python_dateutil-2.8.0.dist-info/WHEEL | 6 - .../top_level.txt | 1 - .../python_dateutil-2.8.0.dist-info/zip-safe | 1 - .../python_engineio-3.9.3.dist-info/INSTALLER | 1 - .../python_engineio-3.9.3.dist-info/LICENSE | 20 - .../python_engineio-3.9.3.dist-info/METADATA | 50 - .../python_engineio-3.9.3.dist-info/RECORD | 122 - .../python_engineio-3.9.3.dist-info/WHEEL | 6 - .../top_level.txt | 1 - .../DESCRIPTION.rst | 65 - .../INSTALLER | 1 - .../python_mimeparse-1.6.0.dist-info/METADATA | 85 - .../python_mimeparse-1.6.0.dist-info/RECORD | 9 - .../python_mimeparse-1.6.0.dist-info/WHEEL | 6 - .../metadata.json | 1 - .../top_level.txt | 1 - .../python_socketio-4.3.1.dist-info/INSTALLER | 1 - .../python_socketio-4.3.1.dist-info/LICENSE | 20 - .../python_socketio-4.3.1.dist-info/METADATA | 51 - .../python_socketio-4.3.1.dist-info/RECORD | 50 - .../python_socketio-4.3.1.dist-info/WHEEL | 6 - .../top_level.txt | 1 - .../requests-2.22.0.dist-info/INSTALLER | 1 - .../requests-2.22.0.dist-info/LICENSE | 13 - .../requests-2.22.0.dist-info/METADATA | 145 - .../requests-2.22.0.dist-info/RECORD | 42 - .../requests-2.22.0.dist-info/WHEEL | 6 - .../requests-2.22.0.dist-info/top_level.txt | 1 - .../site-packages/requests/__init__.py | 131 - .../site-packages/requests/__version__.py | 14 - .../site-packages/requests/_internal_utils.py | 42 - .../site-packages/requests/adapters.py | 533 -- .../python3.6/site-packages/requests/api.py | 158 - .../python3.6/site-packages/requests/auth.py | 305 - .../python3.6/site-packages/requests/certs.py | 18 - .../site-packages/requests/compat.py | 70 - .../site-packages/requests/cookies.py | 549 -- .../site-packages/requests/exceptions.py | 126 - .../python3.6/site-packages/requests/help.py | 119 - .../python3.6/site-packages/requests/hooks.py | 34 - .../site-packages/requests/models.py | 953 -- .../site-packages/requests/packages.py | 14 - .../site-packages/requests/sessions.py | 770 -- .../site-packages/requests/status_codes.py | 120 - .../site-packages/requests/structures.py | 103 - .../python3.6/site-packages/requests/utils.py | 977 -- .../setuptools-41.1.0.dist-info/INSTALLER | 1 - .../setuptools-41.1.0.dist-info/LICENSE | 19 - .../setuptools-41.1.0.dist-info/METADATA | 77 - .../setuptools-41.1.0.dist-info/RECORD | 186 - .../setuptools-41.1.0.dist-info/WHEEL | 6 - .../dependency_links.txt | 2 - .../entry_points.txt | 65 - .../setuptools-41.1.0.dist-info/top_level.txt | 3 - .../setuptools-41.1.0.dist-info/zip-safe | 1 - .../python3.6/site-packages/setuptools.pth | 1 - .../site-packages/setuptools/__init__.py | 228 - .../setuptools/_deprecation_warning.py | 7 - .../setuptools/_vendor/__init__.py | 0 .../setuptools/_vendor/packaging/__about__.py | 21 - .../setuptools/_vendor/packaging/__init__.py | 14 - .../setuptools/_vendor/packaging/_compat.py | 30 - .../_vendor/packaging/_structures.py | 68 - .../setuptools/_vendor/packaging/markers.py | 301 - .../_vendor/packaging/requirements.py | 127 - .../_vendor/packaging/specifiers.py | 774 -- .../setuptools/_vendor/packaging/utils.py | 14 - .../setuptools/_vendor/packaging/version.py | 393 - .../setuptools/_vendor/pyparsing.py | 5742 ------------ .../site-packages/setuptools/_vendor/six.py | 868 -- .../site-packages/setuptools/archive_util.py | 173 - .../site-packages/setuptools/build_meta.py | 257 - .../site-packages/setuptools/cli-32.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/cli-64.exe | Bin 74752 -> 0 bytes .../site-packages/setuptools/cli.exe | Bin 65536 -> 0 bytes .../setuptools/command/__init__.py | 18 - .../site-packages/setuptools/command/alias.py | 80 - .../setuptools/command/bdist_egg.py | 502 - .../setuptools/command/bdist_rpm.py | 43 - .../setuptools/command/bdist_wininst.py | 21 - .../setuptools/command/build_clib.py | 98 - .../setuptools/command/build_ext.py | 321 - .../setuptools/command/build_py.py | 270 - .../setuptools/command/develop.py | 221 - .../setuptools/command/dist_info.py | 36 - .../setuptools/command/easy_install.py | 2342 ----- .../setuptools/command/egg_info.py | 717 -- .../setuptools/command/install.py | 125 - .../setuptools/command/install_egg_info.py | 62 - .../setuptools/command/install_lib.py | 121 - .../setuptools/command/install_scripts.py | 65 - .../setuptools/command/launcher manifest.xml | 15 - .../setuptools/command/py36compat.py | 136 - .../setuptools/command/register.py | 18 - .../setuptools/command/rotate.py | 66 - .../setuptools/command/saveopts.py | 22 - .../site-packages/setuptools/command/sdist.py | 221 - .../setuptools/command/setopt.py | 149 - .../site-packages/setuptools/command/test.py | 271 - .../setuptools/command/upload.py | 196 - .../setuptools/command/upload_docs.py | 206 - .../site-packages/setuptools/config.py | 656 -- .../site-packages/setuptools/dep_util.py | 23 - .../site-packages/setuptools/depends.py | 186 - .../site-packages/setuptools/dist.py | 1280 --- .../site-packages/setuptools/extension.py | 57 - .../setuptools/extern/__init__.py | 73 - .../site-packages/setuptools/glibc.py | 86 - .../site-packages/setuptools/glob.py | 174 - .../site-packages/setuptools/gui-32.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/gui-64.exe | Bin 75264 -> 0 bytes .../site-packages/setuptools/gui.exe | Bin 65536 -> 0 bytes .../site-packages/setuptools/launch.py | 35 - .../site-packages/setuptools/lib2to3_ex.py | 62 - .../site-packages/setuptools/monkey.py | 179 - .../site-packages/setuptools/msvc.py | 1301 --- .../site-packages/setuptools/namespaces.py | 107 - .../site-packages/setuptools/package_index.py | 1136 --- .../site-packages/setuptools/pep425tags.py | 319 - .../site-packages/setuptools/py27compat.py | 28 - .../site-packages/setuptools/py31compat.py | 32 - .../site-packages/setuptools/py33compat.py | 59 - .../site-packages/setuptools/sandbox.py | 491 - .../setuptools/script (dev).tmpl | 6 - .../site-packages/setuptools/script.tmpl | 3 - .../site-packages/setuptools/site-patch.py | 74 - .../site-packages/setuptools/ssl_support.py | 260 - .../site-packages/setuptools/unicode_utils.py | 44 - .../site-packages/setuptools/version.py | 6 - .../site-packages/setuptools/wheel.py | 211 - .../setuptools/windows_support.py | 29 - .../six-1.12.0.dist-info/INSTALLER | 1 - .../six-1.12.0.dist-info/LICENSE | 18 - .../six-1.12.0.dist-info/METADATA | 52 - .../site-packages/six-1.12.0.dist-info/RECORD | 8 - .../site-packages/six-1.12.0.dist-info/WHEEL | 6 - .../six-1.12.0.dist-info/top_level.txt | 1 - venv/lib/python3.6/site-packages/six.py | 952 -- .../python3.6/site-packages/smmap/__init__.py | 11 - venv/lib/python3.6/site-packages/smmap/buf.py | 151 - venv/lib/python3.6/site-packages/smmap/exc.py | 11 - .../lib/python3.6/site-packages/smmap/mman.py | 590 -- .../site-packages/smmap/test/__init__.py | 0 .../python3.6/site-packages/smmap/test/lib.py | 72 - .../site-packages/smmap/test/test_buf.py | 128 - .../site-packages/smmap/test/test_mman.py | 226 - .../site-packages/smmap/test/test_tutorial.py | 81 - .../site-packages/smmap/test/test_util.py | 105 - .../lib/python3.6/site-packages/smmap/util.py | 240 - .../smmap2-2.0.5.dist-info/INSTALLER | 1 - .../smmap2-2.0.5.dist-info/METADATA | 113 - .../smmap2-2.0.5.dist-info/RECORD | 28 - .../smmap2-2.0.5.dist-info/WHEEL | 6 - .../smmap2-2.0.5.dist-info/top_level.txt | 1 - .../smmap2-2.0.5.dist-info/zip-safe | 1 - .../site-packages/socketio/__init__.py | 38 - .../python3.6/site-packages/socketio/asgi.py | 36 - .../socketio/asyncio_aiopika_manager.py | 105 - .../site-packages/socketio/asyncio_client.py | 464 - .../site-packages/socketio/asyncio_manager.py | 58 - .../socketio/asyncio_namespace.py | 204 - .../socketio/asyncio_pubsub_manager.py | 163 - .../socketio/asyncio_redis_manager.py | 107 - .../site-packages/socketio/asyncio_server.py | 523 -- .../site-packages/socketio/base_manager.py | 178 - .../site-packages/socketio/client.py | 609 -- .../site-packages/socketio/exceptions.py | 30 - .../site-packages/socketio/kafka_manager.py | 63 - .../site-packages/socketio/kombu_manager.py | 122 - .../site-packages/socketio/middleware.py | 42 - .../site-packages/socketio/namespace.py | 191 - .../site-packages/socketio/packet.py | 179 - .../site-packages/socketio/pubsub_manager.py | 154 - .../site-packages/socketio/redis_manager.py | 115 - .../site-packages/socketio/server.py | 727 -- .../site-packages/socketio/tornado.py | 11 - .../site-packages/socketio/zmq_manager.py | 111 - .../python3.6/site-packages/tests/__init__.py | 0 .../site-packages/tests/asyncio/__init__.py | 0 .../tests/asyncio/test_async_aiohttp.py | 57 - .../tests/asyncio/test_async_asgi.py | 279 - .../tests/asyncio/test_async_tornado.py | 76 - .../tests/asyncio/test_asyncio_client.py | 1215 --- .../tests/asyncio/test_asyncio_server.py | 919 -- .../tests/asyncio/test_asyncio_socket.py | 441 - .../site-packages/tests/common/__init__.py | 0 .../tests/common/test_async_eventlet.py | 30 - .../site-packages/tests/common/test_client.py | 1155 --- .../tests/common/test_middleware.py | 162 - .../site-packages/tests/common/test_packet.py | 103 - .../tests/common/test_payload.py | 70 - .../site-packages/tests/common/test_server.py | 952 -- .../site-packages/tests/common/test_socket.py | 424 - .../site-packages/tests/test_async_aiohttp.py | 57 - .../site-packages/tests/test_async_asgi.py | 244 - .../tests/test_async_eventlet.py | 30 - .../site-packages/tests/test_async_tornado.py | 76 - .../tests/test_asyncio_client.py | 1156 --- .../tests/test_asyncio_server.py | 895 -- .../tests/test_asyncio_socket.py | 421 - .../site-packages/tests/test_client.py | 1080 --- .../site-packages/tests/test_middleware.py | 126 - .../site-packages/tests/test_packet.py | 103 - .../site-packages/tests/test_payload.py | 57 - .../site-packages/tests/test_server.py | 890 -- .../site-packages/tests/test_socket.py | 401 - .../urllib3-1.25.3.dist-info/INSTALLER | 1 - .../urllib3-1.25.3.dist-info/LICENSE.txt | 21 - .../urllib3-1.25.3.dist-info/METADATA | 1206 --- .../urllib3-1.25.3.dist-info/RECORD | 104 - .../urllib3-1.25.3.dist-info/WHEEL | 6 - .../urllib3-1.25.3.dist-info/top_level.txt | 1 - .../site-packages/urllib3/__init__.py | 91 - .../site-packages/urllib3/_collections.py | 329 - .../site-packages/urllib3/connection.py | 417 - .../site-packages/urllib3/connectionpool.py | 897 -- .../site-packages/urllib3/contrib/__init__.py | 0 .../urllib3/contrib/_appengine_environ.py | 30 - .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 593 -- .../contrib/_securetransport/low_level.py | 346 - .../urllib3/contrib/appengine.py | 289 - .../site-packages/urllib3/contrib/ntlmpool.py | 111 - .../urllib3/contrib/pyopenssl.py | 485 - .../urllib3/contrib/securetransport.py | 853 -- .../site-packages/urllib3/contrib/socks.py | 205 - .../site-packages/urllib3/exceptions.py | 246 - .../python3.6/site-packages/urllib3/fields.py | 272 - .../site-packages/urllib3/filepost.py | 98 - .../urllib3/packages/__init__.py | 5 - .../urllib3/packages/backports/__init__.py | 0 .../urllib3/packages/backports/makefile.py | 53 - .../urllib3/packages/rfc3986/__init__.py | 56 - .../urllib3/packages/rfc3986/_mixin.py | 353 - .../urllib3/packages/rfc3986/abnf_regexp.py | 267 - .../urllib3/packages/rfc3986/api.py | 106 - .../urllib3/packages/rfc3986/builder.py | 298 - .../urllib3/packages/rfc3986/compat.py | 54 - .../urllib3/packages/rfc3986/exceptions.py | 118 - .../urllib3/packages/rfc3986/iri.py | 147 - .../urllib3/packages/rfc3986/misc.py | 124 - .../urllib3/packages/rfc3986/normalizers.py | 167 - .../urllib3/packages/rfc3986/parseresult.py | 385 - .../urllib3/packages/rfc3986/uri.py | 153 - .../urllib3/packages/rfc3986/validators.py | 450 - .../site-packages/urllib3/packages/six.py | 868 -- .../packages/ssl_match_hostname/__init__.py | 19 - .../ssl_match_hostname/_implementation.py | 156 - .../site-packages/urllib3/poolmanager.py | 455 - .../site-packages/urllib3/request.py | 150 - .../site-packages/urllib3/response.py | 760 -- .../site-packages/urllib3/util/__init__.py | 56 - .../site-packages/urllib3/util/connection.py | 134 - .../site-packages/urllib3/util/queue.py | 21 - .../site-packages/urllib3/util/request.py | 125 - .../site-packages/urllib3/util/response.py | 87 - .../site-packages/urllib3/util/retry.py | 412 - .../site-packages/urllib3/util/ssl_.py | 392 - .../site-packages/urllib3/util/timeout.py | 243 - .../site-packages/urllib3/util/url.py | 289 - .../site-packages/urllib3/util/wait.py | 150 - .../site-packages/werkzeug/__init__.py | 233 - .../site-packages/werkzeug/_compat.py | 219 - .../site-packages/werkzeug/_internal.py | 484 - .../site-packages/werkzeug/_reloader.py | 335 - .../werkzeug/contrib/__init__.py | 16 - .../site-packages/werkzeug/contrib/atom.py | 362 - .../site-packages/werkzeug/contrib/cache.py | 933 -- .../site-packages/werkzeug/contrib/fixers.py | 262 - .../site-packages/werkzeug/contrib/iterio.py | 358 - .../site-packages/werkzeug/contrib/lint.py | 11 - .../werkzeug/contrib/profiler.py | 42 - .../werkzeug/contrib/securecookie.py | 362 - .../werkzeug/contrib/sessions.py | 389 - .../werkzeug/contrib/wrappers.py | 385 - .../site-packages/werkzeug/datastructures.py | 2852 ------ .../site-packages/werkzeug/debug/__init__.py | 524 -- .../site-packages/werkzeug/debug/console.py | 216 - .../site-packages/werkzeug/debug/repr.py | 297 - .../werkzeug/debug/shared/FONT_LICENSE | 96 - .../werkzeug/debug/shared/console.png | Bin 507 -> 0 bytes .../werkzeug/debug/shared/less.png | Bin 191 -> 0 bytes .../werkzeug/debug/shared/more.png | Bin 200 -> 0 bytes .../werkzeug/debug/shared/source.png | Bin 818 -> 0 bytes .../werkzeug/debug/shared/ubuntu.ttf | Bin 70220 -> 0 bytes .../site-packages/werkzeug/debug/tbtools.py | 629 -- .../site-packages/werkzeug/exceptions.py | 786 -- .../site-packages/werkzeug/filesystem.py | 64 - .../site-packages/werkzeug/formparser.py | 586 -- .../python3.6/site-packages/werkzeug/http.py | 1303 --- .../python3.6/site-packages/werkzeug/local.py | 421 - .../werkzeug/middleware/__init__.py | 25 - .../werkzeug/middleware/dispatcher.py | 66 - .../werkzeug/middleware/http_proxy.py | 219 - .../site-packages/werkzeug/middleware/lint.py | 408 - .../werkzeug/middleware/profiler.py | 132 - .../werkzeug/middleware/proxy_fix.py | 228 - .../werkzeug/middleware/shared_data.py | 253 - .../site-packages/werkzeug/posixemulation.py | 117 - .../site-packages/werkzeug/routing.py | 2039 ---- .../site-packages/werkzeug/security.py | 249 - .../site-packages/werkzeug/serving.py | 1074 --- .../python3.6/site-packages/werkzeug/test.py | 1146 --- .../site-packages/werkzeug/testapp.py | 241 - .../python3.6/site-packages/werkzeug/urls.py | 1134 --- .../site-packages/werkzeug/useragents.py | 220 - .../python3.6/site-packages/werkzeug/utils.py | 836 -- .../werkzeug/wrappers/__init__.py | 36 - .../site-packages/werkzeug/wrappers/accept.py | 50 - .../site-packages/werkzeug/wrappers/auth.py | 33 - .../werkzeug/wrappers/base_request.py | 695 -- .../werkzeug/wrappers/base_response.py | 702 -- .../werkzeug/wrappers/common_descriptors.py | 322 - .../site-packages/werkzeug/wrappers/etag.py | 304 - .../site-packages/werkzeug/wrappers/json.py | 145 - .../werkzeug/wrappers/request.py | 44 - .../werkzeug/wrappers/response.py | 78 - .../werkzeug/wrappers/user_agent.py | 15 - .../python3.6/site-packages/werkzeug/wsgi.py | 1067 --- .../python3.6/site-packages/yaml/__init__.py | 402 - .../python3.6/site-packages/yaml/composer.py | 139 - .../site-packages/yaml/constructor.py | 720 -- .../lib/python3.6/site-packages/yaml/cyaml.py | 101 - .../python3.6/site-packages/yaml/dumper.py | 62 - .../python3.6/site-packages/yaml/emitter.py | 1137 --- .../lib/python3.6/site-packages/yaml/error.py | 75 - .../python3.6/site-packages/yaml/events.py | 86 - .../python3.6/site-packages/yaml/loader.py | 63 - .../lib/python3.6/site-packages/yaml/nodes.py | 49 - .../python3.6/site-packages/yaml/parser.py | 589 -- .../python3.6/site-packages/yaml/reader.py | 185 - .../site-packages/yaml/representer.py | 389 - .../python3.6/site-packages/yaml/resolver.py | 227 - .../python3.6/site-packages/yaml/scanner.py | 1435 --- .../site-packages/yaml/serializer.py | 111 - .../python3.6/site-packages/yaml/tokens.py | 104 - venv/pip-selfcheck.json | 1 - venv/pyvenv.cfg | 3 - 1340 files changed, 1 insertion(+), 350962 deletions(-) delete mode 100644 venv/bin/activate delete mode 100644 venv/bin/activate.csh delete mode 100644 venv/bin/activate.fish delete mode 100755 venv/bin/chardetect delete mode 100755 venv/bin/easy_install delete mode 100755 venv/bin/easy_install-3.6 delete mode 100755 venv/bin/flask delete mode 100755 venv/bin/pip delete mode 100755 venv/bin/pip3 delete mode 100755 venv/bin/pip3.6 delete mode 100755 venv/bin/python delete mode 100755 venv/bin/python3 delete mode 100755 venv/bin/python3.6 delete mode 100644 venv/include/site/python3.6/greenlet/greenlet.h delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/PKG-INFO delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/SOURCES.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/dependency_links.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/installed-files.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/not-zip-safe delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/requires.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/AUTHORS delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/PKG-INFO delete mode 100644 venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/SOURCES.txt delete mode 100644 venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/dependency_links.txt delete mode 100644 venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/installed-files.txt delete mode 100644 venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json delete mode 100644 venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/certifi/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/certifi/__main__.py delete mode 100644 venv/lib/python3.6/site-packages/certifi/cacert.pem delete mode 100644 venv/lib/python3.6/site-packages/certifi/core.py delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json delete mode 100644 venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/chardet/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/big5freq.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/big5prober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/chardistribution.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/charsetgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/charsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/cli/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/cli/chardetect.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/codingstatemachine.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/compat.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/cp949prober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/enums.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/escprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/escsm.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/eucjpprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/euckrfreq.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/euckrprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/euctwfreq.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/euctwprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/gb2312freq.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/gb2312prober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/hebrewprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/jisfreq.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/jpcntx.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langgreekmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langhebrewmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langhungarianmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langthaimodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/langturkishmodel.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/latin1prober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/mbcharsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/mbcssm.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/sbcharsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/sjisprober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/universaldetector.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/utf8prober.py delete mode 100644 venv/lib/python3.6/site-packages/chardet/version.py delete mode 100644 venv/lib/python3.6/site-packages/click/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/click/_bashcomplete.py delete mode 100644 venv/lib/python3.6/site-packages/click/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/click/_termui_impl.py delete mode 100644 venv/lib/python3.6/site-packages/click/_textwrap.py delete mode 100644 venv/lib/python3.6/site-packages/click/_unicodefun.py delete mode 100644 venv/lib/python3.6/site-packages/click/_winconsole.py delete mode 100644 venv/lib/python3.6/site-packages/click/core.py delete mode 100644 venv/lib/python3.6/site-packages/click/decorators.py delete mode 100644 venv/lib/python3.6/site-packages/click/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/click/formatting.py delete mode 100644 venv/lib/python3.6/site-packages/click/globals.py delete mode 100644 venv/lib/python3.6/site-packages/click/parser.py delete mode 100644 venv/lib/python3.6/site-packages/click/termui.py delete mode 100644 venv/lib/python3.6/site-packages/click/testing.py delete mode 100644 venv/lib/python3.6/site-packages/click/types.py delete mode 100644 venv/lib/python3.6/site-packages/click/utils.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/_common.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/_version.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/easter.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/parser/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/parser/_parser.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/parser/isoparser.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/relativedelta.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/rrule.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tz/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tz/_common.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tz/_factories.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tz/tz.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tz/win.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/tzwin.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/utils.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz delete mode 100644 venv/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/LICENSE.md delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/ddt.py delete mode 100644 venv/lib/python3.6/site-packages/dns/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dns/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/dns/dnssec.py delete mode 100644 venv/lib/python3.6/site-packages/dns/e164.py delete mode 100644 venv/lib/python3.6/site-packages/dns/edns.py delete mode 100644 venv/lib/python3.6/site-packages/dns/entropy.py delete mode 100644 venv/lib/python3.6/site-packages/dns/exception.py delete mode 100644 venv/lib/python3.6/site-packages/dns/flags.py delete mode 100644 venv/lib/python3.6/site-packages/dns/grange.py delete mode 100644 venv/lib/python3.6/site-packages/dns/hash.py delete mode 100644 venv/lib/python3.6/site-packages/dns/inet.py delete mode 100644 venv/lib/python3.6/site-packages/dns/ipv4.py delete mode 100644 venv/lib/python3.6/site-packages/dns/ipv6.py delete mode 100644 venv/lib/python3.6/site-packages/dns/message.py delete mode 100644 venv/lib/python3.6/site-packages/dns/name.py delete mode 100644 venv/lib/python3.6/site-packages/dns/namedict.py delete mode 100644 venv/lib/python3.6/site-packages/dns/node.py delete mode 100644 venv/lib/python3.6/site-packages/dns/opcode.py delete mode 100644 venv/lib/python3.6/site-packages/dns/py.typed delete mode 100644 venv/lib/python3.6/site-packages/dns/query.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rcode.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdata.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdataclass.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdataset.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdatatype.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AFSDB.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AVC.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CAA.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDNSKEY.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDS.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CERT.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CNAME.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CSYNC.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DLV.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNAME.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNSKEY.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DS.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI48.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI64.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/GPOS.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HINFO.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HIP.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/ISDN.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/LOC.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/MX.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NS.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/PTR.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RP.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RRSIG.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RT.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SOA.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SPF.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SSHFP.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TLSA.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TXT.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/URI.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/X25.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/ANY/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/CH/A.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/CH/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/A.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/AAAA.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/APL.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/DHCID.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/IPSECKEY.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/KX.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/NAPTR.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP_PTR.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/PX.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/SRV.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/WKS.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/IN/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/dnskeybase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/dsbase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/euibase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/mxbase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/nsbase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rdtypes/txtbase.py delete mode 100644 venv/lib/python3.6/site-packages/dns/renderer.py delete mode 100644 venv/lib/python3.6/site-packages/dns/resolver.py delete mode 100644 venv/lib/python3.6/site-packages/dns/reversename.py delete mode 100644 venv/lib/python3.6/site-packages/dns/rrset.py delete mode 100644 venv/lib/python3.6/site-packages/dns/set.py delete mode 100644 venv/lib/python3.6/site-packages/dns/tokenizer.py delete mode 100644 venv/lib/python3.6/site-packages/dns/tsig.py delete mode 100644 venv/lib/python3.6/site-packages/dns/tsigkeyring.py delete mode 100644 venv/lib/python3.6/site-packages/dns/ttl.py delete mode 100644 venv/lib/python3.6/site-packages/dns/update.py delete mode 100644 venv/lib/python3.6/site-packages/dns/version.py delete mode 100644 venv/lib/python3.6/site-packages/dns/wiredata.py delete mode 100644 venv/lib/python3.6/site-packages/dns/zone.py delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/DESCRIPTION.rst delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/LICENSE.txt delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/metadata.json delete mode 100644 venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/easy-install.pth delete mode 100644 venv/lib/python3.6/site-packages/easy_install.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_aiohttp.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_asgi.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/aiohttp.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/asgi.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/eventlet.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/gevent.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/gevent_uwsgi.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/sanic.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/threading.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_drivers/tornado.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_eventlet.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_gevent.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_gevent_uwsgi.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_sanic.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_threading.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/async_tornado.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/asyncio_client.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/asyncio_server.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/asyncio_socket.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/client.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/middleware.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/packet.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/payload.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/server.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/socket.py delete mode 100644 venv/lib/python3.6/site-packages/engineio/static_files.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/AUTHORS delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/eventlet/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/backdoor.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/convenience.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/corolocal.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/coros.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/dagpool.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/db_pool.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/debug.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/event.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/BaseHTTPServer.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/CGIHTTPServer.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/MySQLdb.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/SSL.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/crypto.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/tsafe.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/version.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/Queue.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/SimpleHTTPServer.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/SocketServer.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/_socket_nodns.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/asynchat.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/asyncore.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/builtin.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/ftplib.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/http/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/http/client.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/http/cookiejar.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/http/cookies.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/http/server.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/httplib.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/os.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/profile.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/select.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/selectors.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/socket.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/ssl.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/subprocess.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/thread.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/threading.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/time.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib/error.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib/parse.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib/request.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib/response.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/urllib2.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/green/zmq.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenio/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenio/base.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenio/py2.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenio/py3.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenpool.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/greenthread.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/epolls.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/hub.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/kqueue.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/poll.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/pyevent.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/selects.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/hubs/timer.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/patcher.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/pools.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/queue.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/semaphore.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/greendns.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/greenlets.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/psycopg2_patcher.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/pylib.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/stacklesspypys.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/support/stacklesss.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/timeout.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/tpool.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/websocket.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/wsgi.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/constants.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/ttypes.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/api.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/client.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/greenthread.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/http.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/log.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/patcher.py delete mode 100644 venv/lib/python3.6/site-packages/eventlet/zipkin/wsgi.py delete mode 100644 venv/lib/python3.6/site-packages/flask/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/flask/__main__.py delete mode 100644 venv/lib/python3.6/site-packages/flask/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/flask/app.py delete mode 100644 venv/lib/python3.6/site-packages/flask/blueprints.py delete mode 100644 venv/lib/python3.6/site-packages/flask/cli.py delete mode 100644 venv/lib/python3.6/site-packages/flask/config.py delete mode 100644 venv/lib/python3.6/site-packages/flask/ctx.py delete mode 100644 venv/lib/python3.6/site-packages/flask/debughelpers.py delete mode 100644 venv/lib/python3.6/site-packages/flask/globals.py delete mode 100644 venv/lib/python3.6/site-packages/flask/helpers.py delete mode 100644 venv/lib/python3.6/site-packages/flask/json/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/flask/json/tag.py delete mode 100644 venv/lib/python3.6/site-packages/flask/logging.py delete mode 100644 venv/lib/python3.6/site-packages/flask/sessions.py delete mode 100644 venv/lib/python3.6/site-packages/flask/signals.py delete mode 100644 venv/lib/python3.6/site-packages/flask/templating.py delete mode 100644 venv/lib/python3.6/site-packages/flask/testing.py delete mode 100644 venv/lib/python3.6/site-packages/flask/views.py delete mode 100644 venv/lib/python3.6/site-packages/flask/wrappers.py delete mode 100644 venv/lib/python3.6/site-packages/flask_classy.py delete mode 100644 venv/lib/python3.6/site-packages/flask_socketio/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/flask_socketio/cli.py delete mode 100644 venv/lib/python3.6/site-packages/flask_socketio/namespace.py delete mode 100644 venv/lib/python3.6/site-packages/flask_socketio/test_client.py delete mode 100644 venv/lib/python3.6/site-packages/git/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/cmd.py delete mode 100644 venv/lib/python3.6/site-packages/git/compat.py delete mode 100644 venv/lib/python3.6/site-packages/git/config.py delete mode 100644 venv/lib/python3.6/site-packages/git/db.py delete mode 100644 venv/lib/python3.6/site-packages/git/diff.py delete mode 100644 venv/lib/python3.6/site-packages/git/exc.py delete mode 100644 venv/lib/python3.6/site-packages/git/index/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/index/base.py delete mode 100644 venv/lib/python3.6/site-packages/git/index/fun.py delete mode 100644 venv/lib/python3.6/site-packages/git/index/typ.py delete mode 100644 venv/lib/python3.6/site-packages/git/index/util.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/base.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/blob.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/commit.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/fun.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/submodule/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/submodule/base.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/submodule/root.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/submodule/util.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/tag.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/tree.py delete mode 100644 venv/lib/python3.6/site-packages/git/objects/util.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/head.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/log.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/reference.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/remote.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/symbolic.py delete mode 100644 venv/lib/python3.6/site-packages/git/refs/tag.py delete mode 100644 venv/lib/python3.6/site-packages/git/remote.py delete mode 100644 venv/lib/python3.6/site-packages/git/repo/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/repo/base.py delete mode 100644 venv/lib/python3.6/site-packages/git/repo/fun.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/blame delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/blame_binary delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/blame_complex_revision delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/blame_incremental delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/blame_incremental_2.11.1_plus delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/cat_file.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/cat_file_blob delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/cat_file_blob_nl delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/cat_file_blob_size delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/commit_invalid_data delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/commit_with_gpgsig delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_2 delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_2f delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_abbrev-40_full-index_M_raw_no-color delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_change_in_type delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_change_in_type_raw delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_f delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_file_with_spaces delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_i delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_index_patch delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_index_raw delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_initial delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_mode_only delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_new_mode delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_numstat delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_p delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_patch_binary delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_patch_unsafe_paths delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_raw_binary delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_rename delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_rename_raw delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/diff_tree_numstat_root delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/for_each_ref_with_path_component delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config-inc.cfg delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config_global delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config_multiple delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config_with_comments delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_config_with_empty_value delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/git_file delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/index delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/index_merge delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/issue-301_stderr delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/ls_tree_a delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/ls_tree_b delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/ls_tree_commit delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/ls_tree_empty delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_HEAD delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_invalid_date delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_invalid_email delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_invalid_newsha delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_invalid_oldsha delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_invalid_sep delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/reflog_master delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_bisect_all delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_commit_diffs delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_commit_idabbrev delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_commit_stats delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_count delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_delta_a delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_delta_b delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_list_single delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/rev_parse delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/show_empty_commit delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/uncommon_branch_prefix_FETCH_HEAD delete mode 100644 venv/lib/python3.6/site-packages/git/test/fixtures/uncommon_branch_prefix_stderr delete mode 100644 venv/lib/python3.6/site-packages/git/test/lib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/lib/asserts.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/lib/helper.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/performance/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/performance/lib.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/performance/test_commit.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/performance/test_odb.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/performance/test_streams.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_actor.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_base.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_blob.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_commit.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_config.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_db.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_diff.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_docs.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_exc.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_fun.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_git.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_index.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_reflog.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_refs.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_remote.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_repo.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_stats.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_submodule.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_tree.py delete mode 100644 venv/lib/python3.6/site-packages/git/test/test_util.py delete mode 100644 venv/lib/python3.6/site-packages/git/util.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/base.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/const.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/base.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/git.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/loose.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/mem.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/pack.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/db/ref.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/exc.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/fun.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/pack.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/stream.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/lib.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/test_base.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/test_example.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/test_pack.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/test_stream.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/test/test_util.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/typ.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/util.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/utils/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/utils/compat.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb/utils/encoding.py delete mode 100644 venv/lib/python3.6/site-packages/gitdb2-2.0.5.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/gitdb2-2.0.5.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/gitdb2-2.0.5.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/gitdb2-2.0.5.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/gitdb2-2.0.5.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/PKG-INFO delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/SOURCES.txt delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/dependency_links.txt delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/installed-files.txt delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/not-zip-safe delete mode 100644 venv/lib/python3.6/site-packages/greenlet-0.4.15-py3.6.egg-info/top_level.txt delete mode 100755 venv/lib/python3.6/site-packages/greenlet.cpython-36m-darwin.so delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/httplib2-0.13.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/httplib2/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/httplib2/cacerts.txt delete mode 100644 venv/lib/python3.6/site-packages/httplib2/certs.py delete mode 100644 venv/lib/python3.6/site-packages/httplib2/iri2uri.py delete mode 100644 venv/lib/python3.6/site-packages/httplib2/socks.py delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/idna-2.8.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/idna/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/idna/codec.py delete mode 100644 venv/lib/python3.6/site-packages/idna/compat.py delete mode 100644 venv/lib/python3.6/site-packages/idna/core.py delete mode 100644 venv/lib/python3.6/site-packages/idna/idnadata.py delete mode 100644 venv/lib/python3.6/site-packages/idna/intranges.py delete mode 100644 venv/lib/python3.6/site-packages/idna/package_data.py delete mode 100644 venv/lib/python3.6/site-packages/idna/uts46data.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/LICENSE.rst delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous-1.1.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/_json.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/encoding.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/exc.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/jws.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/serializer.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/signer.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/timed.py delete mode 100644 venv/lib/python3.6/site-packages/itsdangerous/url_safe.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/_identifier.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/asyncfilters.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/asyncsupport.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/bccache.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/compiler.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/constants.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/debug.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/defaults.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/environment.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/ext.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/filters.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/idtracking.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/lexer.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/loaders.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/meta.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/nativetypes.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/nodes.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/optimizer.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/parser.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/runtime.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/sandbox.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/tests.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/utils.py delete mode 100644 venv/lib/python3.6/site-packages/jinja2/visitor.py delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/_constants.py delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/_native.py delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/_speedups.c delete mode 100644 venv/lib/python3.6/site-packages/markupsafe/_speedups.cpython-36m-darwin.so delete mode 100644 venv/lib/python3.6/site-packages/mimeparse.py delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/DESCRIPTION.rst delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/metadata.json delete mode 100644 venv/lib/python3.6/site-packages/monotonic-1.5.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/monotonic.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/PKG-INFO delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/SOURCES.txt delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/dependency_links.txt delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/entry_points.txt delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/not-zip-safe delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/requires.txt delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/EGG-INFO/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/__main__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/basecommand.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/baseparser.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/build_env.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cache.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/cmdoptions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/check.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/completion.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/configuration.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/download.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/freeze.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/hash.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/help.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/install.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/list.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/search.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/show.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/uninstall.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/commands/wheel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/configuration.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/download.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/index.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/locations.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/models/index.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/check.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/freeze.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/operations/prepare.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/pep425tags.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_file.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_install.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_set.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/req/req_uninstall.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/resolve.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/status_codes.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/appdirs.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/deprecation.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/encoding.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/filesystem.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/glibc.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/hashes.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/logging.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/misc.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/outdated.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/packaging.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/setuptools_build.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/temp_dir.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/typing.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/utils/ui.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/bazaar.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/git.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/mercurial.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/vcs/subversion.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_internal/wheel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/appdirs.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/_cmd.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/adapter.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/cache.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/caches/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/caches/file_cache.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/caches/redis_cache.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/controller.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/filewrapper.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/heuristics.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/serialize.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/cachecontrol/wrapper.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/certifi/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/certifi/__main__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/certifi/cacert.pem delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/certifi/core.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/big5freq.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/big5prober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/chardistribution.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/charsetgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/charsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/cli/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/cli/chardetect.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/codingstatemachine.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/cp949prober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/enums.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/escprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/escsm.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/eucjpprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/euckrfreq.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/euckrprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/euctwfreq.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/euctwprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/gb2312freq.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/gb2312prober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/hebrewprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/jisfreq.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/jpcntx.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langbulgarianmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langcyrillicmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langgreekmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langhebrewmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langhungarianmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langthaimodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/langturkishmodel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/latin1prober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/mbcharsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/mbcsgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/mbcssm.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/sbcharsetprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/sbcsgroupprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/sjisprober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/universaldetector.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/utf8prober.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/chardet/version.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/ansi.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/ansitowin32.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/initialise.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/win32.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/colorama/winterm.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/misc.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/shutil.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/sysconfig.cfg delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/sysconfig.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/_backport/tarfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/database.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/index.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/locators.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/manifest.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/markers.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/metadata.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/resources.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/scripts.py delete mode 100755 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/t32.exe delete mode 100755 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/t64.exe delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/util.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/version.py delete mode 100755 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/w32.exe delete mode 100755 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/w64.exe delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distlib/wheel.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/distro.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_ihatexml.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_inputstream.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_tokenizer.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_trie/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_trie/_base.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_trie/datrie.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_trie/py.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/_utils.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/constants.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/base.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/lint.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/optionaltags.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/sanitizer.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/whitespace.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/html5parser.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/serializer.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treeadapters/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treeadapters/genshi.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treeadapters/sax.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treebuilders/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treebuilders/base.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treebuilders/dom.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treebuilders/etree.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treebuilders/etree_lxml.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/base.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/dom.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/etree.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/etree_lxml.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/treewalkers/genshi.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/codec.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/core.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/idnadata.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/intranges.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/package_data.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/idna/uts46data.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/ipaddress.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/linklockfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/mkdirlockfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/pidlockfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/sqlitelockfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/lockfile/symlinklockfile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/msgpack/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/msgpack/_version.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/msgpack/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/msgpack/fallback.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pkg_resources/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pkg_resources/py31compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/progress/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/progress/bar.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/progress/counter.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/progress/helpers.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/progress/spinner.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pytoml/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pytoml/core.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pytoml/parser.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/pytoml/writer.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/__version__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/_internal_utils.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/adapters.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/api.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/auth.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/certs.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/compat.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/cookies.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/help.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/hooks.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/models.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/packages.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/sessions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/status_codes.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/structures.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/requests/utils.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/retrying.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/six.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/_collections.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/connection.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/connectionpool.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/_securetransport/bindings.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/_securetransport/low_level.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/appengine.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/ntlmpool.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/pyopenssl.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/securetransport.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/contrib/socks.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/fields.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/filepost.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/backports/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/backports/makefile.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/ordered_dict.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/six.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/poolmanager.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/request.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/response.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/connection.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/request.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/response.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/retry.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/selectors.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/ssl_.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/timeout.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/url.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/urllib3/util/wait.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/webencodings/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/webencodings/labels.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/webencodings/mklabels.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/webencodings/tests.py delete mode 100644 venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/webencodings/x_user_defined.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/_vendor/six.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/extern/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/pkg_resources/py31compat.py delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/python_dateutil-2.8.0.dist-info/zip-safe delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/python_engineio-3.9.3.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/DESCRIPTION.rst delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/metadata.json delete mode 100644 venv/lib/python3.6/site-packages/python_mimeparse-1.6.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/python_socketio-4.3.1.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/requests-2.22.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/requests/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/requests/__version__.py delete mode 100644 venv/lib/python3.6/site-packages/requests/_internal_utils.py delete mode 100644 venv/lib/python3.6/site-packages/requests/adapters.py delete mode 100644 venv/lib/python3.6/site-packages/requests/api.py delete mode 100644 venv/lib/python3.6/site-packages/requests/auth.py delete mode 100644 venv/lib/python3.6/site-packages/requests/certs.py delete mode 100644 venv/lib/python3.6/site-packages/requests/compat.py delete mode 100644 venv/lib/python3.6/site-packages/requests/cookies.py delete mode 100644 venv/lib/python3.6/site-packages/requests/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/requests/help.py delete mode 100644 venv/lib/python3.6/site-packages/requests/hooks.py delete mode 100644 venv/lib/python3.6/site-packages/requests/models.py delete mode 100644 venv/lib/python3.6/site-packages/requests/packages.py delete mode 100644 venv/lib/python3.6/site-packages/requests/sessions.py delete mode 100644 venv/lib/python3.6/site-packages/requests/status_codes.py delete mode 100644 venv/lib/python3.6/site-packages/requests/structures.py delete mode 100644 venv/lib/python3.6/site-packages/requests/utils.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/dependency_links.txt delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/entry_points.txt delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/setuptools-41.1.0.dist-info/zip-safe delete mode 100644 venv/lib/python3.6/site-packages/setuptools.pth delete mode 100644 venv/lib/python3.6/site-packages/setuptools/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_deprecation_warning.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/__about__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/_structures.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/markers.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/requirements.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/specifiers.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/utils.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/packaging/version.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/pyparsing.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/_vendor/six.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/archive_util.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/build_meta.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/cli-32.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/cli-64.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/cli.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/alias.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/bdist_egg.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/bdist_rpm.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/bdist_wininst.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/build_clib.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/build_ext.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/build_py.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/develop.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/dist_info.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/easy_install.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/egg_info.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/install.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/install_egg_info.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/install_lib.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/install_scripts.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/launcher manifest.xml delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/py36compat.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/register.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/rotate.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/saveopts.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/sdist.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/setopt.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/test.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/upload.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/command/upload_docs.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/config.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/dep_util.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/depends.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/dist.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/extension.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/extern/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/glibc.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/glob.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/gui-32.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/gui-64.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/gui.exe delete mode 100644 venv/lib/python3.6/site-packages/setuptools/launch.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/lib2to3_ex.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/monkey.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/msvc.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/namespaces.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/package_index.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/pep425tags.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/py27compat.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/py31compat.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/py33compat.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/sandbox.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/script (dev).tmpl delete mode 100644 venv/lib/python3.6/site-packages/setuptools/script.tmpl delete mode 100644 venv/lib/python3.6/site-packages/setuptools/site-patch.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/ssl_support.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/unicode_utils.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/version.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/wheel.py delete mode 100644 venv/lib/python3.6/site-packages/setuptools/windows_support.py delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/LICENSE delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/six-1.12.0.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/six.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/buf.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/exc.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/mman.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/lib.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/test_buf.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/test_mman.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/test_tutorial.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/test/test_util.py delete mode 100644 venv/lib/python3.6/site-packages/smmap/util.py delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/smmap2-2.0.5.dist-info/zip-safe delete mode 100644 venv/lib/python3.6/site-packages/socketio/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asgi.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_aiopika_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_client.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_namespace.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_pubsub_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_redis_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/asyncio_server.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/base_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/client.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/kafka_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/kombu_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/middleware.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/namespace.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/packet.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/pubsub_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/redis_manager.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/server.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/tornado.py delete mode 100644 venv/lib/python3.6/site-packages/socketio/zmq_manager.py delete mode 100644 venv/lib/python3.6/site-packages/tests/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_async_aiohttp.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_async_asgi.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_async_tornado.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_asyncio_client.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_asyncio_server.py delete mode 100644 venv/lib/python3.6/site-packages/tests/asyncio/test_asyncio_socket.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_async_eventlet.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_client.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_middleware.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_packet.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_payload.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_server.py delete mode 100644 venv/lib/python3.6/site-packages/tests/common/test_socket.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_async_aiohttp.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_async_asgi.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_async_eventlet.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_async_tornado.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_asyncio_client.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_asyncio_server.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_asyncio_socket.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_client.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_middleware.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_packet.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_payload.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_server.py delete mode 100644 venv/lib/python3.6/site-packages/tests/test_socket.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/INSTALLER delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/LICENSE.txt delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/METADATA delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/RECORD delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/WHEEL delete mode 100644 venv/lib/python3.6/site-packages/urllib3-1.25.3.dist-info/top_level.txt delete mode 100644 venv/lib/python3.6/site-packages/urllib3/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/_collections.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/connection.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/connectionpool.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/_appengine_environ.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/_securetransport/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/_securetransport/bindings.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/_securetransport/low_level.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/appengine.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/ntlmpool.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/pyopenssl.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/securetransport.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/contrib/socks.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/fields.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/filepost.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/backports/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/backports/makefile.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/_mixin.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/abnf_regexp.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/api.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/builder.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/compat.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/iri.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/misc.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/normalizers.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/parseresult.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/uri.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/rfc3986/validators.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/six.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/packages/ssl_match_hostname/_implementation.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/poolmanager.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/request.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/response.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/connection.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/queue.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/request.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/response.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/retry.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/ssl_.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/timeout.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/url.py delete mode 100644 venv/lib/python3.6/site-packages/urllib3/util/wait.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/_compat.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/_internal.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/_reloader.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/atom.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/cache.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/fixers.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/iterio.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/lint.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/profiler.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/securecookie.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/sessions.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/contrib/wrappers.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/datastructures.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/console.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/repr.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/FONT_LICENSE delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/console.png delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/less.png delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/more.png delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/source.png delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/shared/ubuntu.ttf delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/debug/tbtools.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/exceptions.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/filesystem.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/formparser.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/http.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/local.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/dispatcher.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/http_proxy.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/lint.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/profiler.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/proxy_fix.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/middleware/shared_data.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/posixemulation.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/routing.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/security.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/serving.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/test.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/testapp.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/urls.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/useragents.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/utils.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/accept.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/auth.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/base_request.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/base_response.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/common_descriptors.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/etag.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/json.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/request.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/response.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wrappers/user_agent.py delete mode 100644 venv/lib/python3.6/site-packages/werkzeug/wsgi.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/__init__.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/composer.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/constructor.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/cyaml.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/dumper.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/emitter.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/error.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/events.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/loader.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/nodes.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/parser.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/reader.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/representer.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/resolver.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/scanner.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/serializer.py delete mode 100644 venv/lib/python3.6/site-packages/yaml/tokens.py delete mode 100644 venv/pip-selfcheck.json delete mode 100644 venv/pyvenv.cfg diff --git a/.gitignore b/.gitignore index 53b31a6..bfd1002 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,7 @@ craftbeerpi.db **/node_modules/* *.pyc *.templog -venv/ +venv2/ modules/plugins/* !modules/plugins/__init__.py *.pyc diff --git a/venv/bin/activate b/venv/bin/activate deleted file mode 100644 index 465303c..0000000 --- a/venv/bin/activate +++ /dev/null @@ -1,76 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "$_OLD_VIRTUAL_PATH" ] ; then - PATH="$_OLD_VIRTUAL_PATH" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then - PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then - hash -r - fi - - if [ -n "$_OLD_VIRTUAL_PS1" ] ; then - PS1="$_OLD_VIRTUAL_PS1" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "$1" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/Users/manuelfritsch/cbp3_py3/venv" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "$PYTHONHOME" ] ; then - _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" - unset PYTHONHOME -fi - -if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then - _OLD_VIRTUAL_PS1="$PS1" - if [ "x(venv) " != x ] ; then - PS1="(venv) $PS1" - else - if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" - else - PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" - fi - fi - export PS1 -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then - hash -r -fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh deleted file mode 100644 index dce34da..0000000 --- a/venv/bin/activate.csh +++ /dev/null @@ -1,37 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/Users/manuelfritsch/cbp3_py3/venv" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - if ("venv" != "") then - set env_name = "venv" - else - if (`basename "VIRTUAL_ENV"` == "__") then - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` - else - set env_name = `basename "$VIRTUAL_ENV"` - endif - endif - set prompt = "[$env_name] $prompt" - unset env_name -endif - -alias pydoc python -m pydoc - -rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish deleted file mode 100644 index fc1a53b..0000000 --- a/venv/bin/activate.fish +++ /dev/null @@ -1,75 +0,0 @@ -# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) -# you cannot run it directly - -function deactivate -d "Exit virtualenv and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - functions -e fish_prompt - set -e _OLD_FISH_PROMPT_OVERRIDE - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - - set -e VIRTUAL_ENV - if test "$argv[1]" != "nondestructive" - # Self destruct! - functions -e deactivate - end -end - -# unset irrelevant variables -deactivate nondestructive - -set -gx VIRTUAL_ENV "/Users/manuelfritsch/cbp3_py3/venv" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# unset PYTHONHOME if set -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # save the current fish_prompt function as the function _old_fish_prompt - functions -c fish_prompt _old_fish_prompt - - # with the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command - set -l old_status $status - - # Prompt override? - if test -n "(venv) " - printf "%s%s" "(venv) " (set_color normal) - else - # ...Otherwise, prepend env - set -l _checkbase (basename "$VIRTUAL_ENV") - if test $_checkbase = "__" - # special case for Aspen magic directories - # see http://www.zetadev.com/software/aspen/ - printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) - else - printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) - end - end - - # Restore the return status of the previous command. - echo "exit $old_status" | . - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" -end diff --git a/venv/bin/chardetect b/venv/bin/chardetect deleted file mode 100755 index 946f893..0000000 --- a/venv/bin/chardetect +++ /dev/null @@ -1,11 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python - -# -*- coding: utf-8 -*- -import re -import sys - -from chardet.cli.chardetect import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/easy_install b/venv/bin/easy_install deleted file mode 100755 index 304835c..0000000 --- a/venv/bin/easy_install +++ /dev/null @@ -1,11 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python - -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/easy_install-3.6 b/venv/bin/easy_install-3.6 deleted file mode 100755 index 304835c..0000000 --- a/venv/bin/easy_install-3.6 +++ /dev/null @@ -1,11 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python - -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/flask b/venv/bin/flask deleted file mode 100755 index 6d42e9b..0000000 --- a/venv/bin/flask +++ /dev/null @@ -1,11 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python - -# -*- coding: utf-8 -*- -import re -import sys - -from flask.cli import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip deleted file mode 100755 index 55533fe..0000000 --- a/venv/bin/pip +++ /dev/null @@ -1,12 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python -# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip' -__requires__ = 'pip==10.0.1' -import re -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('pip==10.0.1', 'console_scripts', 'pip')() - ) diff --git a/venv/bin/pip3 b/venv/bin/pip3 deleted file mode 100755 index e68222c..0000000 --- a/venv/bin/pip3 +++ /dev/null @@ -1,12 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python -# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3' -__requires__ = 'pip==10.0.1' -import re -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')() - ) diff --git a/venv/bin/pip3.6 b/venv/bin/pip3.6 deleted file mode 100755 index c271dfa..0000000 --- a/venv/bin/pip3.6 +++ /dev/null @@ -1,12 +0,0 @@ -#!/Users/manuelfritsch/cbp3_py3/venv/bin/python -# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.6' -__requires__ = 'pip==10.0.1' -import re -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.6')() - ) diff --git a/venv/bin/python b/venv/bin/python deleted file mode 100755 index 48fcd067fb3083ba608067154479df35e3432186..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13620 zcmeHOZ){uD6~FG%l0mwp6xQxfhsBzb6)kSs1{Kt3c3Zd4!wI2DLja+h7yG$3yx89J zbCa;P%GPFz7c4~sp&{)9(xhpBkcbc;ntli!MGgB>Ha4c9{SjiNya+aJ)5KV{)%%_M z?u+dggOK)d_e%Gk^E>C~?hJJ26W78}Mjr1dRO4;v!#vkXJd2cyG-^(JC|uwEvYXV@FVc0?x&&TV7DCeCes ze^)+_!$9>%It&`&y4J*|YBA>pyEs}{75V+Wyv_BOk`^BJCNxWb8>GPX$J2(HH!R5f z{yM(o`a3V}3Cpdzk~RBF6l^P#J-+I{K7MaYf4`P`!nLi=Hx!%~v*Vd;l4r81oWS+| z&fVtjkDfU-!hRpF#vSRo&WPuFC^8tS(F9#T-g8`~^aDeq=ZfDPYa)QgdDi5G$+6L# z$@8`a+}NL-&d49l_Zr22lLtUKKc1`knXK{G7IgQwRn8&o_qSQx9au`u*Ef%oQ- zf3K7Mt>_=shB5z+2FlMs7DR7B4;2em$jpp}l9Ofp!1z)YC1+@j+$l8TauMV_T zTj6NPLUvc(X1CoJ~9)H_`%J`BZm+H<-49E#h{M&jw*bMB-cLI~X-AU;pN z7ie&P2G3eg0Bm6d4oZ)t~C zKDDeZ(S-(cwn0?7=v)O~+B*Ha;#aluv0pB&gA)6IS-E$9QOvyti!&cqinn3O!*IOv z3{)_s9QXHLxrihVSBF%0Z1VZaJ!iO>pB%NLV6sBl2rRPu3FVgjdd#D&VVeZ`R&i_kwz7vEf+I3~+ zZ~v)OD(xfiQE49)V^WMMF{Z`%s2EK#X2qBnqa{XLj3>l6A;!nWcv6g~#W;mAUf%X5 zHV_%8wEqHhE{z?qm8)1j{YBz_Pu!ivy-r-1xL1kmBW{tn2Z?);ID@#K5=U2HE6)=5 zE#jUg?kVD)Aa0(xZxiw4^hVeoxXL zNctm5=OsNa=_`_6mULOtPqbNOq#~dqpdz3mpdz3mpdz3mpdz3mpdz3mpdz3mP#1ws zw|9(ef@D*e_fPUzHe+XWGxNB??zh_JZVq$$o;_(;R-Vm;xsg4Qu~71u3)7dk;>&rC zx(4)YY8j;#(E?OZWL5`F?i)B_*wG0iQMC0@)96PThRrc+mv9WPIg8*vxsGKfbekvA zkC8A8Vbau-NnxS^!JZWT#7<;v;)lVf#;uHP5E=m*(@is%Ae08GDeI6ZUoVp78rRiC z16{Rj!?Oqgv!()#t=zH<3}(o{t2k{x6gm1ptdHXl4+DKPeU7wniZ~MdzJRus$WIb{ zRzlN2gqihY39Uq$&h51T=VNIEfMROke+D{5e+!j;8n!FluDA=8p8a4CM0N2cILfN5 zHP9xMR0N{$oIzLXLG%qaz^dOKA3p5E_xtdO4^zFc*55aL_=FFi^kJ&A*3N&zhpB8^ ztAE~yFZ;0L!xbOC0nTduwfgWD9}fC3)!Hb=^uGiuNq-t0U!aTc;!y=>8`0^T_EvOy z3Y3F4qi;sP1)aW;m;V=_BEI*ZzupZ-B<>i#LLsxHYc^!B(@^ z%iGoWg-BgDX}xsy616TxYe6+R*GTKrvXR=&$B#%vF9Y%Z5~)X&a*n7-wlyy|OHW$& PghJhSb?xomyPN$Nx>Ha4c9{SjiNya+aJ)5KV{)%%_M z?u+dggOK)d_e%Gk^E>C~?hJJ26W78}Mjr1dRO4;v!#vkXJd2cyG-^(JC|uwEvYXV@FVc0?x&&TV7DCeCes ze^)+_!$9>%It&`&y4J*|YBA>pyEs}{75V+Wyv_BOk`^BJCNxWb8>GPX$J2(HH!R5f z{yM(o`a3V}3Cpdzk~RBF6l^P#J-+I{K7MaYf4`P`!nLi=Hx!%~v*Vd;l4r81oWS+| z&fVtjkDfU-!hRpF#vSRo&WPuFC^8tS(F9#T-g8`~^aDeq=ZfDPYa)QgdDi5G$+6L# z$@8`a+}NL-&d49l_Zr22lLtUKKc1`knXK{G7IgQwRn8&o_qSQx9au`u*Ef%oQ- zf3K7Mt>_=shB5z+2FlMs7DR7B4;2em$jpp}l9Ofp!1z)YC1+@j+$l8TauMV_T zTj6NPLUvc(X1CoJ~9)H_`%J`BZm+H<-49E#h{M&jw*bMB-cLI~X-AU;pN z7ie&P2G3eg0Bm6d4oZ)t~C zKDDeZ(S-(cwn0?7=v)O~+B*Ha;#aluv0pB&gA)6IS-E$9QOvyti!&cqinn3O!*IOv z3{)_s9QXHLxrihVSBF%0Z1VZaJ!iO>pB%NLV6sBl2rRPu3FVgjdd#D&VVeZ`R&i_kwz7vEf+I3~+ zZ~v)OD(xfiQE49)V^WMMF{Z`%s2EK#X2qBnqa{XLj3>l6A;!nWcv6g~#W;mAUf%X5 zHV_%8wEqHhE{z?qm8)1j{YBz_Pu!ivy-r-1xL1kmBW{tn2Z?);ID@#K5=U2HE6)=5 zE#jUg?kVD)Aa0(xZxiw4^hVeoxXL zNctm5=OsNa=_`_6mULOtPqbNOq#~dqpdz3mpdz3mpdz3mpdz3mpdz3mpdz3mP#1ws zw|9(ef@D*e_fPUzHe+XWGxNB??zh_JZVq$$o;_(;R-Vm;xsg4Qu~71u3)7dk;>&rC zx(4)YY8j;#(E?OZWL5`F?i)B_*wG0iQMC0@)96PThRrc+mv9WPIg8*vxsGKfbekvA zkC8A8Vbau-NnxS^!JZWT#7<;v;)lVf#;uHP5E=m*(@is%Ae08GDeI6ZUoVp78rRiC z16{Rj!?Oqgv!()#t=zH<3}(o{t2k{x6gm1ptdHXl4+DKPeU7wniZ~MdzJRus$WIb{ zRzlN2gqihY39Uq$&h51T=VNIEfMROke+D{5e+!j;8n!FluDA=8p8a4CM0N2cILfN5 zHP9xMR0N{$oIzLXLG%qaz^dOKA3p5E_xtdO4^zFc*55aL_=FFi^kJ&A*3N&zhpB8^ ztAE~yFZ;0L!xbOC0nTduwfgWD9}fC3)!Hb=^uGiuNq-t0U!aTc;!y=>8`0^T_EvOy z3Y3F4qi;sP1)aW;m;V=_BEI*ZzupZ-B<>i#LLsxHYc^!B(@^ z%iGoWg-BgDX}xsy616TxYe6+R*GTKrvXR=&$B#%vF9Y%Z5~)X&a*n7-wlyy|OHW$& PghJhSb?xomyPN$Nx>Ha4c9{SjiNya+aJ)5KV{)%%_M z?u+dggOK)d_e%Gk^E>C~?hJJ26W78}Mjr1dRO4;v!#vkXJd2cyG-^(JC|uwEvYXV@FVc0?x&&TV7DCeCes ze^)+_!$9>%It&`&y4J*|YBA>pyEs}{75V+Wyv_BOk`^BJCNxWb8>GPX$J2(HH!R5f z{yM(o`a3V}3Cpdzk~RBF6l^P#J-+I{K7MaYf4`P`!nLi=Hx!%~v*Vd;l4r81oWS+| z&fVtjkDfU-!hRpF#vSRo&WPuFC^8tS(F9#T-g8`~^aDeq=ZfDPYa)QgdDi5G$+6L# z$@8`a+}NL-&d49l_Zr22lLtUKKc1`knXK{G7IgQwRn8&o_qSQx9au`u*Ef%oQ- zf3K7Mt>_=shB5z+2FlMs7DR7B4;2em$jpp}l9Ofp!1z)YC1+@j+$l8TauMV_T zTj6NPLUvc(X1CoJ~9)H_`%J`BZm+H<-49E#h{M&jw*bMB-cLI~X-AU;pN z7ie&P2G3eg0Bm6d4oZ)t~C zKDDeZ(S-(cwn0?7=v)O~+B*Ha;#aluv0pB&gA)6IS-E$9QOvyti!&cqinn3O!*IOv z3{)_s9QXHLxrihVSBF%0Z1VZaJ!iO>pB%NLV6sBl2rRPu3FVgjdd#D&VVeZ`R&i_kwz7vEf+I3~+ zZ~v)OD(xfiQE49)V^WMMF{Z`%s2EK#X2qBnqa{XLj3>l6A;!nWcv6g~#W;mAUf%X5 zHV_%8wEqHhE{z?qm8)1j{YBz_Pu!ivy-r-1xL1kmBW{tn2Z?);ID@#K5=U2HE6)=5 zE#jUg?kVD)Aa0(xZxiw4^hVeoxXL zNctm5=OsNa=_`_6mULOtPqbNOq#~dqpdz3mpdz3mpdz3mpdz3mpdz3mpdz3mP#1ws zw|9(ef@D*e_fPUzHe+XWGxNB??zh_JZVq$$o;_(;R-Vm;xsg4Qu~71u3)7dk;>&rC zx(4)YY8j;#(E?OZWL5`F?i)B_*wG0iQMC0@)96PThRrc+mv9WPIg8*vxsGKfbekvA zkC8A8Vbau-NnxS^!JZWT#7<;v;)lVf#;uHP5E=m*(@is%Ae08GDeI6ZUoVp78rRiC z16{Rj!?Oqgv!()#t=zH<3}(o{t2k{x6gm1ptdHXl4+DKPeU7wniZ~MdzJRus$WIb{ zRzlN2gqihY39Uq$&h51T=VNIEfMROke+D{5e+!j;8n!FluDA=8p8a4CM0N2cILfN5 zHP9xMR0N{$oIzLXLG%qaz^dOKA3p5E_xtdO4^zFc*55aL_=FFi^kJ&A*3N&zhpB8^ ztAE~yFZ;0L!xbOC0nTduwfgWD9}fC3)!Hb=^uGiuNq-t0U!aTc;!y=>8`0^T_EvOy z3Y3F4qi;sP1)aW;m;V=_BEI*ZzupZ-B<>i#LLsxHYc^!B(@^ z%iGoWg-BgDX}xsy616TxYe6+R*GTKrvXR=&$B#%vF9Y%Z5~)X&a*n7-wlyy|OHW$& PghJhSb?xomyPN$Nx> - -#ifdef __cplusplus -extern "C" { -#endif - -#define GREENLET_VERSION "0.4.15" - -#if PY_VERSION_HEX >= 0x030700A3 -# define GREENLET_USE_EXC_INFO -#endif - -typedef struct _greenlet { - PyObject_HEAD - char* stack_start; - char* stack_stop; - char* stack_copy; - intptr_t stack_saved; - struct _greenlet* stack_prev; - struct _greenlet* parent; - PyObject* run_info; - struct _frame* top_frame; - int recursion_depth; - PyObject* weakreflist; -#ifdef GREENLET_USE_EXC_INFO - _PyErr_StackItem* exc_info; - _PyErr_StackItem exc_state; -#else - PyObject* exc_type; - PyObject* exc_value; - PyObject* exc_traceback; -#endif - PyObject* dict; -} PyGreenlet; - -#define PyGreenlet_Check(op) PyObject_TypeCheck(op, &PyGreenlet_Type) -#define PyGreenlet_MAIN(op) (((PyGreenlet*)(op))->stack_stop == (char*) -1) -#define PyGreenlet_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL) -#define PyGreenlet_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL) -#define PyGreenlet_GET_PARENT(op) (((PyGreenlet*)(op))->parent) - -#if (PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 7) || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION >= 1) || PY_MAJOR_VERSION > 3 -#define GREENLET_USE_PYCAPSULE -#endif - -/* C API functions */ - -/* Total number of symbols that are exported */ -#define PyGreenlet_API_pointers 8 - -#define PyGreenlet_Type_NUM 0 -#define PyExc_GreenletError_NUM 1 -#define PyExc_GreenletExit_NUM 2 - -#define PyGreenlet_New_NUM 3 -#define PyGreenlet_GetCurrent_NUM 4 -#define PyGreenlet_Throw_NUM 5 -#define PyGreenlet_Switch_NUM 6 -#define PyGreenlet_SetParent_NUM 7 - -#ifndef GREENLET_MODULE -/* This section is used by modules that uses the greenlet C API */ -static void **_PyGreenlet_API = NULL; - -#define PyGreenlet_Type (*(PyTypeObject *) _PyGreenlet_API[PyGreenlet_Type_NUM]) - -#define PyExc_GreenletError \ - ((PyObject *) _PyGreenlet_API[PyExc_GreenletError_NUM]) - -#define PyExc_GreenletExit \ - ((PyObject *) _PyGreenlet_API[PyExc_GreenletExit_NUM]) - -/* - * PyGreenlet_New(PyObject *args) - * - * greenlet.greenlet(run, parent=None) - */ -#define PyGreenlet_New \ - (* (PyGreenlet * (*)(PyObject *run, PyGreenlet *parent)) \ - _PyGreenlet_API[PyGreenlet_New_NUM]) - -/* - * PyGreenlet_GetCurrent(void) - * - * greenlet.getcurrent() - */ -#define PyGreenlet_GetCurrent \ - (* (PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) - -/* - * PyGreenlet_Throw( - * PyGreenlet *greenlet, - * PyObject *typ, - * PyObject *val, - * PyObject *tb) - * - * g.throw(...) - */ -#define PyGreenlet_Throw \ - (* (PyObject * (*) \ - (PyGreenlet *self, PyObject *typ, PyObject *val, PyObject *tb)) \ - _PyGreenlet_API[PyGreenlet_Throw_NUM]) - -/* - * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) - * - * g.switch(*args, **kwargs) - */ -#define PyGreenlet_Switch \ - (* (PyObject * (*)(PyGreenlet *greenlet, PyObject *args, PyObject *kwargs)) \ - _PyGreenlet_API[PyGreenlet_Switch_NUM]) - -/* - * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) - * - * g.parent = new_parent - */ -#define PyGreenlet_SetParent \ - (* (int (*)(PyGreenlet *greenlet, PyGreenlet *nparent)) \ - _PyGreenlet_API[PyGreenlet_SetParent_NUM]) - -/* Macro that imports greenlet and initializes C API */ -#ifdef GREENLET_USE_PYCAPSULE -#define PyGreenlet_Import() \ -{ \ - _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ -} -#else -#define PyGreenlet_Import() \ -{ \ - PyObject *module = PyImport_ImportModule("greenlet"); \ - if (module != NULL) { \ - PyObject *c_api_object = PyObject_GetAttrString( \ - module, "_C_API"); \ - if (c_api_object != NULL && PyCObject_Check(c_api_object)) { \ - _PyGreenlet_API = \ - (void **) PyCObject_AsVoidPtr(c_api_object); \ - Py_DECREF(c_api_object); \ - } \ - Py_DECREF(module); \ - } \ -} -#endif - -#endif /* GREENLET_MODULE */ - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GREENLETOBJECT_H */ diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt deleted file mode 100644 index 87ce152..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,39 +0,0 @@ -Copyright © 2014 by the Pallets team. - -Some rights reserved. - -Redistribution and use in source and binary forms of the software as -well as documentation, with or without modification, are permitted -provided that the following conditions are met: - -- Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -- Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -- Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND -CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, -BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT -NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF -SUCH DAMAGE. - ----- - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright © 2001-2006 Gregory P. Ward. All rights reserved. -Copyright © 2002-2006 Python Software Foundation. All rights reserved. diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA deleted file mode 100644 index 625bdad..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/METADATA +++ /dev/null @@ -1,121 +0,0 @@ -Metadata-Version: 2.1 -Name: Click -Version: 7.0 -Summary: Composable command line interface toolkit -Home-page: https://palletsprojects.com/p/click/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets Team -Maintainer-email: contact@palletsprojects.com -License: BSD -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Code, https://github.com/pallets/click -Project-URL: Issue tracker, https://github.com/pallets/click/issues -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* - -\$ click\_ -========== - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install click - -Click supports Python 3.4 and newer, Python 2.7, and PyPy. - -.. _pip: https://pip.pypa.io/en/stable/quickstart/ - - -A Simple Example ----------------- - -What does it look like? Here is an example of a simple Click program: - -.. code-block:: python - - import click - - @click.command() - @click.option("--count", default=1, help="Number of greetings.") - @click.option("--name", prompt="Your name", - help="The person to greet.") - def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo("Hello, %s!" % name) - - if __name__ == '__main__': - hello() - -And what it looks like when run: - -.. code-block:: text - - $ python hello.py --count=3 - Your name: Click - Hello, Click! - Hello, Click! - Hello, Click! - - -Donate ------- - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -* Website: https://palletsprojects.com/p/click/ -* Documentation: https://click.palletsprojects.com/ -* License: `BSD `_ -* Releases: https://pypi.org/project/click/ -* Code: https://github.com/pallets/click -* Issue tracker: https://github.com/pallets/click/issues -* Test status: - - * Linux, Mac: https://travis-ci.org/pallets/click - * Windows: https://ci.appveyor.com/project/pallets/click - -* Test coverage: https://codecov.io/gh/pallets/click - - diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD deleted file mode 100644 index f3c834e..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/RECORD +++ /dev/null @@ -1,40 +0,0 @@ -Click-7.0.dist-info/LICENSE.txt,sha256=4hIxn676T0Wcisk3_chVcECjyrivKTZsoqSNI5AlIlw,1876 -Click-7.0.dist-info/METADATA,sha256=-r8jeke3Zer4diRvT1MjFZuiJ6yTT_qFP39svLqdaLI,3516 -Click-7.0.dist-info/RECORD,, -Click-7.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 -Click-7.0.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 -click/__init__.py,sha256=HjGThQ7tef9kkwCV371TBnrf0SAi6fKfU_jtEnbYTvQ,2789 -click/_bashcomplete.py,sha256=iaNUmtxag0YPfxba3TDYCNietiTMQIrvhRLj-H8okFU,11014 -click/_compat.py,sha256=vYmvoj4opPxo-c-2GMQQjYT_r_QkOKybkfGoeVrt0dA,23399 -click/_termui_impl.py,sha256=xHmLtOJhKUCVD6168yucJ9fknUJPAMs0eUTPgVUO-GQ,19611 -click/_textwrap.py,sha256=gwS4m7bdQiJnzaDG8osFcRb-5vn4t4l2qSCy-5csCEc,1198 -click/_unicodefun.py,sha256=QHy2_5jYlX-36O-JVrTHNnHOqg8tquUR0HmQFev7Ics,4364 -click/_winconsole.py,sha256=PPWVak8Iikm_gAPsxMrzwsVFCvHgaW3jPaDWZ1JBl3U,8965 -click/core.py,sha256=q8FLcDZsagBGSRe5Y9Hi_FGvAeZvusNfoO5EkhkSQ8Y,75305 -click/decorators.py,sha256=idKt6duLUUfAFftrHoREi8MJSd39XW36pUVHthdglwk,11226 -click/exceptions.py,sha256=CNpAjBAE7qjaV4WChxQeak95e5yUOau8AsvT-8m6wss,7663 -click/formatting.py,sha256=eh-cypTUAhpI3HD-K4ZpR3vCiURIO62xXvKkR3tNUTM,8889 -click/globals.py,sha256=oQkou3ZQ5DgrbVM6BwIBirwiqozbjfirzsLGAlLRRdg,1514 -click/parser.py,sha256=m-nGZz4VwprM42_qtFlWFGo7yRJQxkBlRcZodoH593Y,15510 -click/termui.py,sha256=o_ZXB2jyvL2Rce7P_bFGq452iyBq9ykJyRApIPMCZO0,23207 -click/testing.py,sha256=aYGqY_iWLu2p4k7lkuJ6t3fqpf6aPGqTsyLzNY_ngKg,13062 -click/types.py,sha256=2Q929p-aBP_ZYuMFJqJR-Ipucofv3fmDc5JzBDPmzJU,23287 -click/utils.py,sha256=6-D0WkAxvv9FkgHXSHwDIv0l9Gdx9Mm6Z5vuKNLIfZI,15763 -Click-7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click/__pycache__/parser.cpython-36.pyc,, -click/__pycache__/exceptions.cpython-36.pyc,, -click/__pycache__/formatting.cpython-36.pyc,, -click/__pycache__/testing.cpython-36.pyc,, -click/__pycache__/types.cpython-36.pyc,, -click/__pycache__/_bashcomplete.cpython-36.pyc,, -click/__pycache__/_compat.cpython-36.pyc,, -click/__pycache__/_winconsole.cpython-36.pyc,, -click/__pycache__/_unicodefun.cpython-36.pyc,, -click/__pycache__/utils.cpython-36.pyc,, -click/__pycache__/_textwrap.cpython-36.pyc,, -click/__pycache__/core.cpython-36.pyc,, -click/__pycache__/termui.cpython-36.pyc,, -click/__pycache__/__init__.cpython-36.pyc,, -click/__pycache__/globals.cpython-36.pyc,, -click/__pycache__/decorators.cpython-36.pyc,, -click/__pycache__/_termui_impl.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL deleted file mode 100644 index 1316c41..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.31.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt deleted file mode 100644 index dca9a90..0000000 --- a/venv/lib/python3.6/site-packages/Click-7.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -click diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA deleted file mode 100644 index 08fcc91..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/METADATA +++ /dev/null @@ -1,134 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask -Version: 1.1.1 -Summary: A simple framework for building complex web applications. -Home-page: https://palletsprojects.com/p/flask/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Code, https://github.com/pallets/flask -Project-URL: Issue tracker, https://github.com/pallets/flask/issues -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* -Requires-Dist: Werkzeug (>=0.15) -Requires-Dist: Jinja2 (>=2.10.1) -Requires-Dist: itsdangerous (>=0.24) -Requires-Dist: click (>=5.1) -Provides-Extra: dev -Requires-Dist: pytest ; extra == 'dev' -Requires-Dist: coverage ; extra == 'dev' -Requires-Dist: tox ; extra == 'dev' -Requires-Dist: sphinx ; extra == 'dev' -Requires-Dist: pallets-sphinx-themes ; extra == 'dev' -Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'dev' -Requires-Dist: sphinx-issues ; extra == 'dev' -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: pallets-sphinx-themes ; extra == 'docs' -Requires-Dist: sphinxcontrib-log-cabinet ; extra == 'docs' -Requires-Dist: sphinx-issues ; extra == 'docs' -Provides-Extra: dotenv -Requires-Dist: python-dotenv ; extra == 'dotenv' - -Flask -===== - -Flask is a lightweight `WSGI`_ web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around `Werkzeug`_ -and `Jinja`_ and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U Flask - - -A Simple Example ----------------- - -.. code-block:: python - - from flask import Flask - - app = Flask(__name__) - - @app.route("/") - def hello(): - return "Hello, World!" - -.. code-block:: text - - $ env FLASK_APP=hello.py flask run - * Serving Flask app "hello" - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) - - -Contributing ------------- - -For guidance on setting up a development environment and how to make a -contribution to Flask, see the `contributing guidelines`_. - -.. _contributing guidelines: https://github.com/pallets/flask/blob/master/CONTRIBUTING.rst - - -Donate ------- - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://psfmember.org/civicrm/contribute/transact?reset=1&id=20 - - -Links ------ - -* Website: https://palletsprojects.com/p/flask/ -* Documentation: https://flask.palletsprojects.com/ -* Releases: https://pypi.org/project/Flask/ -* Code: https://github.com/pallets/flask -* Issue tracker: https://github.com/pallets/flask/issues -* Test status: https://dev.azure.com/pallets/flask/_build -* Official chat: https://discord.gg/t6rrQZH - -.. _WSGI: https://wsgi.readthedocs.io -.. _Werkzeug: https://www.palletsprojects.com/p/werkzeug/ -.. _Jinja: https://www.palletsprojects.com/p/jinja/ -.. _pip: https://pip.pypa.io/en/stable/quickstart/ - - diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD deleted file mode 100644 index d815cdc..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/RECORD +++ /dev/null @@ -1,48 +0,0 @@ -flask/__init__.py,sha256=qaBW4gy9Xxmdc3ygYO0_H214H1VpF7fq8xRR4XbqRjE,1894 -flask/__main__.py,sha256=fjVtt3QTANXlpJCOv3Ha7d5H-76MwzSIOab7SFD9TEk,254 -flask/_compat.py,sha256=8KPT54Iig96TuLipdogLRHNYToIcg-xPhnSV5VRERnw,4099 -flask/app.py,sha256=gLZInxueeQ9dkBo1wrntZ-bZqiDT4rYxy_AQ1xraFDc,98066 -flask/blueprints.py,sha256=vkdm8NusGsfZUeIfPdCluj733QFmiQcT4Sk1tuZLUjw,21400 -flask/cli.py,sha256=_WhPG1bggNdrP0QO95Vex6VJpDqTsVK0z54Y5poljKU,30933 -flask/config.py,sha256=3dejvQRYfNHw_V7dCLMxU8UNFpL34xIKemN7gHZIZ8Y,10052 -flask/ctx.py,sha256=cks-omGedkxawHFo6bKIrdOHsJCAgg1i_NWw_htxb5U,16724 -flask/debughelpers.py,sha256=-whvPKuAoU8AZ9c1z_INuOeBgfYDqE1J2xNBsoriugU,6475 -flask/globals.py,sha256=OgcHb6_NCyX6-TldciOdKcyj4PNfyQwClxdMhvov6aA,1637 -flask/helpers.py,sha256=x2Pa85R5dV6uA5f5423JTb6x4u6ZaMGf8sfosUZ76dQ,43004 -flask/logging.py,sha256=WcY5UkqTysGfmosyygSlXyZYGwOp3y-VsE6ehoJ48dk,3250 -flask/sessions.py,sha256=G0KsEkr_i1LG_wOINwFSOW3ts7Xbv4bNgEZKc7TRloc,14360 -flask/signals.py,sha256=yYLOed2x8WnQ7pirGalQYfpYpCILJ0LJhmNSrnWvjqw,2212 -flask/templating.py,sha256=F8E_IZXn9BGsjMzUJ5N_ACMyZdiFBp_SSEaUunvfZ7g,4939 -flask/testing.py,sha256=b0QaEejx0UcXqfSFP43k5W57bTVeDyrNK3uPD8JUpCk,10146 -flask/views.py,sha256=eeWnadLAj0QdQPLtjKipDetRZyG62CT2y7fNOFDJz0g,5802 -flask/wrappers.py,sha256=kgsvtZuMM6RQaDqhRbc5Pcj9vqTnaERl2pmXcdGL7LU,4736 -flask/json/__init__.py,sha256=6nITbZYiYOPB8Qfi1-dvsblwn01KRz8VOsMBIZyaYek,11988 -flask/json/tag.py,sha256=vq9GOllg_0kTWKuVFrwmkeOQzR-jdBD23x-89JyCCQI,8306 -Flask-1.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -Flask-1.1.1.dist-info/METADATA,sha256=Ht4R6TpTKOaXOmmQHhEF3A0Obpzde2Ai0kzNdu6-VWQ,4400 -Flask-1.1.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -Flask-1.1.1.dist-info/entry_points.txt,sha256=gBLA1aKg0OYR8AhbAfg8lnburHtKcgJLDU52BBctN0k,42 -Flask-1.1.1.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6 -Flask-1.1.1.dist-info/RECORD,, -../../../bin/flask,sha256=-cLOd7vTAhTID_opTyZhKLiDYYDhP22ZZi5nO-mvSGg,241 -Flask-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask/__pycache__/blueprints.cpython-36.pyc,, -flask/__pycache__/logging.cpython-36.pyc,, -flask/__pycache__/sessions.cpython-36.pyc,, -flask/__pycache__/templating.cpython-36.pyc,, -flask/__pycache__/cli.cpython-36.pyc,, -flask/__pycache__/config.cpython-36.pyc,, -flask/__pycache__/signals.cpython-36.pyc,, -flask/__pycache__/testing.cpython-36.pyc,, -flask/__pycache__/views.cpython-36.pyc,, -flask/__pycache__/ctx.cpython-36.pyc,, -flask/__pycache__/app.cpython-36.pyc,, -flask/__pycache__/__main__.cpython-36.pyc,, -flask/__pycache__/_compat.cpython-36.pyc,, -flask/__pycache__/helpers.cpython-36.pyc,, -flask/__pycache__/wrappers.cpython-36.pyc,, -flask/__pycache__/__init__.cpython-36.pyc,, -flask/__pycache__/debughelpers.cpython-36.pyc,, -flask/__pycache__/globals.cpython-36.pyc,, -flask/json/__pycache__/tag.cpython-36.pyc,, -flask/json/__pycache__/__init__.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL deleted file mode 100644 index 78e6f69..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt deleted file mode 100644 index 1eb0252..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask = flask.cli:main - diff --git a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt deleted file mode 100644 index 7e10602..0000000 --- a/venv/lib/python3.6/site-packages/Flask-1.1.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/PKG-INFO b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/PKG-INFO deleted file mode 100644 index 0ae1dc7..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/PKG-INFO +++ /dev/null @@ -1,27 +0,0 @@ -Metadata-Version: 1.1 -Name: Flask-Classy -Version: 0.6.10 -Summary: Class based views for Flask -Home-page: https://github.com/apiguy/flask-classy -Author: Freedom Dumlao -Author-email: freedomdumlao@gmail.com -License: BSD -Description: - Flask-Classy - ------------- - - Class based views for Flask - -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/SOURCES.txt b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/SOURCES.txt deleted file mode 100644 index 5c46de7..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/SOURCES.txt +++ /dev/null @@ -1,10 +0,0 @@ -README.rst -flask_classy.py -setup.cfg -setup.py -Flask_Classy.egg-info/PKG-INFO -Flask_Classy.egg-info/SOURCES.txt -Flask_Classy.egg-info/dependency_links.txt -Flask_Classy.egg-info/not-zip-safe -Flask_Classy.egg-info/requires.txt -Flask_Classy.egg-info/top_level.txt \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/dependency_links.txt b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/installed-files.txt b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/installed-files.txt deleted file mode 100644 index fd0a4b0..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/installed-files.txt +++ /dev/null @@ -1,8 +0,0 @@ -../__pycache__/flask_classy.cpython-36.pyc -../flask_classy.py -PKG-INFO -SOURCES.txt -dependency_links.txt -not-zip-safe -requires.txt -top_level.txt diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/not-zip-safe b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/requires.txt b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/requires.txt deleted file mode 100644 index af05533..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -Flask>=0.9 diff --git a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/top_level.txt b/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/top_level.txt deleted file mode 100644 index bf3aed1..0000000 --- a/venv/lib/python3.6/site-packages/Flask_Classy-0.6.10-py3.6.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_classy diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/LICENSE b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/LICENSE deleted file mode 100644 index f5c10ab..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Miguel Grinberg - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/METADATA deleted file mode 100644 index c432d2d..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/METADATA +++ /dev/null @@ -1,28 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-SocketIO -Version: 4.2.1 -Summary: Socket.IO integration for Flask applications -Home-page: http://github.com/miguelgrinberg/Flask-SocketIO/ -Author: Miguel Grinberg -Author-email: miguelgrinberg50@gmail.com -License: MIT -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Dist: Flask (>=0.9) -Requires-Dist: python-socketio (>=4.3.0) - - -Flask-SocketIO --------------- - -Socket.IO integration for Flask applications. - - diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/RECORD deleted file mode 100644 index a08ca89..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -flask_socketio/__init__.py,sha256=aburhZcXf3Ubm5HWqg7nWOFNDCV4yXLsfHHo2u0iDfM,44245 -flask_socketio/cli.py,sha256=4ymPf4kmrmiFgr60Wt0ZDDm2QWPnrHtkx7xcHjo_Irg,2916 -flask_socketio/namespace.py,sha256=kXy10sQFl3nbQr2CWgxM9IxzcHzKTWTPLDYl3vzPLLQ,2020 -flask_socketio/test_client.py,sha256=t7cDFgkb2hVaKbbBZm2Vnzz8OEwNtyeXds7NUKxUIzM,9695 -Flask_SocketIO-4.2.1.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082 -Flask_SocketIO-4.2.1.dist-info/METADATA,sha256=L-PmXGiiDVZHu80GBY0KoXMP06EDnspJEJEadfpXP4w,873 -Flask_SocketIO-4.2.1.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -Flask_SocketIO-4.2.1.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15 -Flask_SocketIO-4.2.1.dist-info/RECORD,, -Flask_SocketIO-4.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask_socketio/__pycache__/cli.cpython-36.pyc,, -flask_socketio/__pycache__/namespace.cpython-36.pyc,, -flask_socketio/__pycache__/__init__.cpython-36.pyc,, -flask_socketio/__pycache__/test_client.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/WHEEL deleted file mode 100644 index 78e6f69..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/top_level.txt deleted file mode 100644 index ba82ec3..0000000 --- a/venv/lib/python3.6/site-packages/Flask_SocketIO-4.2.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_socketio diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/AUTHORS b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/AUTHORS deleted file mode 100644 index e91fd78..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/AUTHORS +++ /dev/null @@ -1,38 +0,0 @@ -GitPython was originally written by Michael Trier. -GitPython 0.2 was partially (re)written by Sebastian Thiel, based on 0.1.6 and git-dulwich. - -Contributors are: - --Michael Trier --Alan Briolat --Florian Apolloner --David Aguilar --Jelmer Vernooij --Steve Frécinaux --Kai Lautaportti --Paul Sowden --Sebastian Thiel --Jonathan Chu --Vincent Driessen --Phil Elson --Bernard `Guyzmo` Pratz --Timothy B. Hartman --Konstantin Popov --Peter Jones --Anson Mansfield --Ken Odegard --Alexis Horgix Chotard --Piotr Babij --Mikuláš Poul --Charles Bouchard-Légaré --Yaroslav Halchenko --Tim Swast --William Luc Ritchie --David Host --A. Jesse Jiryu Davis --Steven Whitman --Stefan Stancu --César Izurieta --Arthur Milchior - -Portions derived from other open source works and are clearly marked. diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/LICENSE b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/LICENSE deleted file mode 100644 index 5a9a6f8..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -Copyright (C) 2008, 2009 Michael Trier and contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -* Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution. - -* Neither the name of the GitPython project nor the names of -its contributors may be used to endorse or promote products derived -from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/METADATA deleted file mode 100644 index 00903f0..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/METADATA +++ /dev/null @@ -1,30 +0,0 @@ -Metadata-Version: 2.1 -Name: GitPython -Version: 3.0.1 -Summary: Python Git Library -Home-page: https://github.com/gitpython-developers/GitPython -Author: Sebastian Thiel, Michael Trier -Author-email: byronimo@gmail.com, mtrier@gmail.com -License: BSD License -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Operating System :: POSIX -Classifier: Operating System :: Microsoft :: Windows -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Requires-Python: >=3.0, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Requires-Dist: gitdb2 (>=2.0.0) -Requires-Dist: ddt (>=1.1.1) - -GitPython is a python library used to interact with Git repositories - - diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/RECORD deleted file mode 100644 index 1228ee8..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/RECORD +++ /dev/null @@ -1,205 +0,0 @@ -git/__init__.py,sha256=IlCyl1whk7laDXftgI_2KSrBpWcb22ceo1XDTW70sM4,2334 -git/cmd.py,sha256=LMQPoBNZyOioob082qOySDtEGJxQF1SdV9eHMQ-KWAQ,42624 -git/compat.py,sha256=DF_FNA4u3PFpcKbvw9aG9TTA0gcyrgBPcKKi64-bIWc,9478 -git/config.py,sha256=EEtOissM9JvGGTK9dlb0Lq6MJ7TBAdQgSjr6xS9rI9c,27011 -git/db.py,sha256=pgC0B4wCqRN7xrvLuMQhLwM252ilkZyFL6qq0cPA2j4,1980 -git/diff.py,sha256=n-NVLLUSWSOvznnA6BqwCly1k9tqn5MPCuoDIPgCYMg,18661 -git/exc.py,sha256=U01S3wgpn9gGfGG8fIvL6p127Uu2LI_Ho7Jcc24xOYI,4878 -git/remote.py,sha256=MfJvyq3WQkLiIkDSk6JivGUPDW23YTBiaN9JMAnNkoE,35689 -git/util.py,sha256=bDO_vxWSwjnr7eedM9906xSxIlOb7UbQ2b1TjR_KMgs,32300 -git/index/__init__.py,sha256=Wj5zgJggZkEXueEDXdijxXahzxhextC08k70n0lHRN0,129 -git/index/base.py,sha256=ACRqf_rarJULDQwl5G8QWCbMv4JmT0E9Dg_uVfaLQdo,52048 -git/index/fun.py,sha256=ZptIr1Wr7Zh1RlI1LliRxxGaxxwLsLHd94GSpk9ClGw,14312 -git/index/typ.py,sha256=GLBZbDS3yScHJs0U18CX-heLTDjjGu6fN7T2L_NQr4A,4976 -git/index/util.py,sha256=l6oh9_1KU1v5GQdpxqCOqs6WLt5xN1uWvkVHQqcCToA,2902 -git/objects/__init__.py,sha256=6C02LlMygiFwTYtncz3GxEQfzHZr2WvUId0fnJ8HfLo,683 -git/objects/base.py,sha256=UZiyzyzx4_OJ3bWnwqb3mqh0LXT7oo0biYaTm-sLuAw,6689 -git/objects/blob.py,sha256=evI3ptPmlln6gLpoQRvbIKjK4v59nT8ipd1vk1dGYtc,927 -git/objects/commit.py,sha256=9Q5BupWXf5mkvoF5v8xrK4Z-3mFsksoII-SL7fT4iNI,20743 -git/objects/fun.py,sha256=kEUFE2Q5kXeoxbjALRNl_jzRP_m9KKqQyxIcW-h6XGM,7415 -git/objects/tag.py,sha256=mDkbxAiyZJBf8LR8uwzXroufj0TWZPEO4z_V6lasO-E,3201 -git/objects/tree.py,sha256=Ta1qAkuwzn7lk54_d7knqF2WL6DOc2MQG1k8mKLel1s,11069 -git/objects/util.py,sha256=GRIAWLR1gaK2QE3s5TtmNmN_LqOjEBlUkZWiouJMVe8,12355 -git/objects/submodule/__init__.py,sha256=OsMeiex7cG6ev2f35IaJ5csH-eXchSoNKCt4HXUG5Ws,93 -git/objects/submodule/base.py,sha256=MH_U61iMnPTIlc9c9ZowRy59VTE_XWT5FWSOqGtXH50,53899 -git/objects/submodule/root.py,sha256=N2i0PjRcw5bNLLIDAkviQjXhf9RvGSfVnbav4FNzkXo,17656 -git/objects/submodule/util.py,sha256=VdgIG-cBo47b_7JcolAvjWaIMU0X5oImLjJ4wluc_iw,2745 -git/refs/__init__.py,sha256=3CRfAyE-Z78rJ3kSdKR1PNiXHEjHLw2VkU2JyDviNDU,242 -git/refs/head.py,sha256=MbO65f5SU0X3DvzmHpU0dY4h4SyvwqNaW2d8bUu3YSc,8724 -git/refs/log.py,sha256=_4JC--cqZc49Gm0ZLkE66ePsaxwP4TagiCQi53_xI88,10881 -git/refs/reference.py,sha256=OcQMwHJuelR1yKe1EF0IBfxeQZYv2kf0xunNSVwZV-M,4408 -git/refs/remote.py,sha256=6JOyIurnomM3tNXdKRXfMK_V75gJNgr9_2sdevKU_tI,1670 -git/refs/symbolic.py,sha256=2YA20sTqEP5IOqZi7TR9AxIBHrwAPRnR62wm9q8k5ko,26858 -git/refs/tag.py,sha256=qoHwJ9suHx8u8NNg-6GvNftK36RnCNkpElRjh2r9wcI,2964 -git/repo/__init__.py,sha256=ssUH4IVCoua5shI5h1l46P0X1kp82ydxVcH3PIVCnzg,108 -git/repo/base.py,sha256=l-nfQEbQl1rlNQ56dfV4u8jnUJkBxe0ccYOZ7mNsTtc,44229 -git/repo/fun.py,sha256=SuguBZs4sZE_SvAcfvn7yxXdoxKmgQdwUhgKAkeyISQ,11396 -git/test/__init__.py,sha256=q-WCITGqFKTHnRFjUvJz5hUJBi8SP4InaAZRXZ8qj8k,220 -git/test/test_actor.py,sha256=1bYmrTwWAYT_Qj9l9chbvuI8nNtHY6yGlDRJDDEq9A0,1242 -git/test/test_base.py,sha256=k6I5nG7ZeBCYpXwi3HX_mvURFelgvQFys5pWVQR6kjw,5649 -git/test/test_blob.py,sha256=Bs4FWke9Sjzx06EJuG9hh1T5qBgJEEz4aBCcr3cW9L0,878 -git/test/test_commit.py,sha256=IhFvgXZ05tT0MTdheIHjNCHjVXpWytORBdcgaunJ2HY,15460 -git/test/test_config.py,sha256=FDJi5_OEoVT_G9LbPh-xglCqtC0HdK1ZE7dKnfF3M9k,16604 -git/test/test_db.py,sha256=e9UNddyQfoa-kzZo-XyrwVuYiq887NUkYrK8wZkTu9M,939 -git/test/test_diff.py,sha256=D6JROrJiEmpvju_WZxkiO64ryU4yfA4viUdMN_3eBcg,12598 -git/test/test_docs.py,sha256=HZC6I1W4yF8YAGveIurEiTsWinDtnDP8lnwwIjLoKZw,25357 -git/test/test_exc.py,sha256=0DBYNiYVfPVlFKYRzqsoZUJnf0lQiUDmdrRIIHWeSlE,5123 -git/test/test_fun.py,sha256=a91XgGk-YPwlgJEc-gy2tI_ilSq29XSQEywwc-kDnG0,10456 -git/test/test_git.py,sha256=QiBmUVH1aro-GB_fIKoTFNB6t2Omdbdaut5W6AQ6U4g,11162 -git/test/test_index.py,sha256=WiHPJNgD5Y00YKJoklTZkPJH0LE-0XEOTrA_2O9rsRs,37014 -git/test/test_reflog.py,sha256=vfI-NQCtnGlJEUtYR0_k7Y1Hc4pZQ5F_T4T49hxSnNU,3505 -git/test/test_refs.py,sha256=2rNm9HdJZTWXx775JHG_R9Pd5X022IQ9C2CbP_9vDoE,23357 -git/test/test_remote.py,sha256=kVvdzfI2eQj-_9JUYY-s5Hak-S4_mu0y5bkOn8JNCs8,26706 -git/test/test_repo.py,sha256=pI82U2tAfAdOe7tLgJnu08lR1vM2ehkmBMdXO8jMiks,39594 -git/test/test_stats.py,sha256=qmF2lL1wW0tEd17E-tkjmpPFVXzjREf7KW5JMCTQ4Zg,971 -git/test/test_submodule.py,sha256=yyMisD-6UH0Im4sAKGgG1XTNMIBTbs5bRAz-3iZivOw,41981 -git/test/test_tree.py,sha256=nR5OAQZLhv7kISoL3RO8ppkXAbKFYz3XlPAxABU1b4o,4046 -git/test/test_util.py,sha256=gYdPEwowPzEdES7JPgCozIAac2PU-Fa8N3cf97ir__k,9092 -git/test/fixtures/blame,sha256=4EDRSXdgbRtxHU_2lASFXC7eNShL2cVq3IU43tLWlD4,3663 -git/test/fixtures/blame_binary,sha256=YLzoHqTAuv2Uv8IILh4ndQxJ_A1c09176E-3d5FMQsM,14807 -git/test/fixtures/blame_complex_revision,sha256=tPguLsqmLxjuZWg5nRcdZCZeaBi-LOeVQEHfTX6X_B0,7645 -git/test/fixtures/blame_incremental,sha256=3VXtrk8LVqfS5f2vsP5DTzFU3opeevUbENQUq22vTdw,982 -git/test/fixtures/blame_incremental_2.11.1_plus,sha256=JDA_xCevOrOMDeKW-U8svYeA0E8Pa3sI7G8GALpxOHw,1154 -git/test/fixtures/cat_file.py,sha256=7RDIymGyByw8I1OibenXM-DVsZ0_7gpazeYYG4C5GDM,136 -git/test/fixtures/cat_file_blob,sha256=ZOyIygCyaOW6GjVnihtTFtIS9PNmskdyMlNKiuyjfzw,11 -git/test/fixtures/cat_file_blob_nl,sha256=GJShnIW6FTrL90OsTkP8AEyJFgSyb4xp4eg-oq_HxI8,12 -git/test/fixtures/cat_file_blob_size,sha256=JdTyqG3rXiV0uzIQtnuyT8xK-xn5OntloFfaqHSp0Y4,3 -git/test/fixtures/commit_invalid_data,sha256=QlV-Pw5mw1Vhp6qivAQY5kcBP_BMJ_OIdLCinmes5Sw,242 -git/test/fixtures/commit_with_gpgsig,sha256=3in_tJPkQv2K1wFx-PGqaCZQe40liMnl9cMYOJ8krTA,1387 -git/test/fixtures/diff_2,sha256=sxE-xkV5lQrUEbpllp2X_AcFfPUmUr2wvSsc9qkZQLc,1994 -git/test/fixtures/diff_2f,sha256=na11T8R1dhJUOKeO-fEeHymOxhXNrjvzzmA_r7x6oJM,732 -git/test/fixtures/diff_abbrev-40_full-index_M_raw_no-color,sha256=AW-YEfutyH_RVyaP2nCTPhtjvkfqWi7NVL4s9Ab3Qww,109 -git/test/fixtures/diff_change_in_type,sha256=Wo1iCaT1YBfGn5ZSJ40H7iVeqXKm-v-qJnsBUBKrpsI,319 -git/test/fixtures/diff_change_in_type_raw,sha256=67KYtwIlQdTSwesABnIYTZxFgiwPhVyBXaDFoPXRFt4,108 -git/test/fixtures/diff_f,sha256=sNsG26bYvqU4pK_RwahaO-Lya8O9Xonwlyth8do_ptY,504 -git/test/fixtures/diff_file_with_spaces,sha256=BOvQkq4AjQ_cR1e0iLYDQdNq2BLa-P5xhI4Xal7hYcE,216 -git/test/fixtures/diff_i,sha256=792rEQvP9Q-MNxZ3_FsvhG5emE_q1nT9jpmQ_A1hFWE,5705 -git/test/fixtures/diff_index_patch,sha256=qd9jD_eAQY5I9OLsbqdz3-lm_ncL2ALJhVLyj3enAfk,4598 -git/test/fixtures/diff_index_raw,sha256=odNXPZQ4rlBnqYfJvvTKGS8QvfJE33WN_X-lIRMT8NI,101 -git/test/fixtures/diff_initial,sha256=1RJTg7QSTdMGlqLDvjFUhKtV0bAV2NFW8rHBgzlVfyg,76 -git/test/fixtures/diff_mode_only,sha256=pqDOHBLm09TWZ0orff-S7pCkQktD2sooW5mURG0vqLQ,46005 -git/test/fixtures/diff_new_mode,sha256=b70EDNoC_gfq_P_fVFCIqT3WHU_P0l-1jhuR2cSEJFg,546 -git/test/fixtures/diff_numstat,sha256=_Ls171vvsERXlRiJ1i1tA5vHyoYCzt3hKorFmic7UyE,22 -git/test/fixtures/diff_p,sha256=3YlhR3UNFIPDv90Zn1vCXC46kQCVDuepUZIzwzD8xmk,19273 -git/test/fixtures/diff_patch_binary,sha256=CLWigD0x0z3n_fpdh8LlkEyRUy7oDiWM-CJpGrqWPiM,155 -git/test/fixtures/diff_patch_unsafe_paths,sha256=jsc2GM8j56puEDnMEhlBHG4jIhziN0uY8cuzGTTtHmw,3145 -git/test/fixtures/diff_raw_binary,sha256=-PUPqf5wop8KkmubHnPK6RAVinlJuQf9Lqo4VBff23I,103 -git/test/fixtures/diff_rename,sha256=-f4kqw0Zt1lRZZOmt5I0w9Jenbr3PngyTH2QeUQfv8g,415 -git/test/fixtures/diff_rename_raw,sha256=VVBUjGEoXWWMYQFq-dyE708DijCnG974Qn79plVT39Q,112 -git/test/fixtures/diff_tree_numstat_root,sha256=NbBofQm3wGm-1hyz8XKIoxMtC_bzz4x8TlxxuF8LLDU,63 -git/test/fixtures/for_each_ref_with_path_component,sha256=hHVSiVHNEW5PKSPP4zFxxpYs4EYlPSJ9y-yykzkpWjk,84 -git/test/fixtures/git_config,sha256=_Igi3In2TsksvwUdn7YcusMv-069ftMdlV1G7ZCs8nU,1517 -git/test/fixtures/git_config-inc.cfg,sha256=jYjjNgfYBBkEAXYj5wLy7en-ISXbvVyOOfOmKsURYdc,92 -git/test/fixtures/git_config_global,sha256=_tFDHYTW1Hxue2WXqjafVm_b9eM-OjTV6WTD2yZ3aqM,366 -git/test/fixtures/git_config_multiple,sha256=xhyn_df95CrbWfA_YWV_Y1eR9bpbc-xZxWAnzCJTUU4,121 -git/test/fixtures/git_config_with_comments,sha256=Q9IHrB4KE3l15iXoYD9-4TIMyd_rFczQ1CPAu-CI8bU,3997 -git/test/fixtures/git_config_with_empty_value,sha256=686iisjxnex4YeT4qWdjsQh22X8UDw5yzKSerefFSTM,35 -git/test/fixtures/git_file,sha256=44Qr9_8TluxWGPiPjDT4dEyF8x3fvnA9W7moDNiFAKo,16 -git/test/fixtures/index,sha256=OBeM4XodizcBFgK_7S92fdjNTaitNxGzSBkcHXXWQvs,163616 -git/test/fixtures/index_merge,sha256=IdtRRV85gi9dGFC4LNuGrZU2yttGAAANeS0_qvNO85w,9192 -git/test/fixtures/issue-301_stderr,sha256=z6QL_UgCKQ1MMviNQNdhM22hOgp00zfJyc5LCm7Jl64,302879 -git/test/fixtures/ls_tree_a,sha256=uBvIY8-7HnaBvSsVYigYJdsbeslxrtfeXh-tWXKtOnc,429 -git/test/fixtures/ls_tree_b,sha256=pW3aIRcXMA1ZSE36049fJWeiVQl95qk_31U8Eh3Tc1c,119 -git/test/fixtures/ls_tree_commit,sha256=cOgzX5Qcqvy4LU4dIBkcc63ccrOPBLab5DsCQPVpz_E,173 -git/test/fixtures/ls_tree_empty,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -git/test/fixtures/reflog_HEAD,sha256=8J5zwsJRmdb8xdWyQoRUzJYJnDSaeo0rLa5uthBovyQ,114210 -git/test/fixtures/reflog_invalid_date,sha256=VlAYk2kGs3CySphJV0OmWwpWZK_uB9FxICTICZuKwSM,409 -git/test/fixtures/reflog_invalid_email,sha256=1OoNdoGKNcNKWVQAv5ZKSgVEt0zSkigvHOTs3MMhVW0,411 -git/test/fixtures/reflog_invalid_newsha,sha256=i-xph-C12dZT-dEKWS4VTDtX4AzQVUcCF3KXfMp9Gu0,404 -git/test/fixtures/reflog_invalid_oldsha,sha256=guzXH-wQOfz3yQJFMChzhuXcgQ6G6rGTSwlIdBVX8Wg,398 -git/test/fixtures/reflog_invalid_sep,sha256=0D9WHWpIGE2tQXD8utDcq-bbxdgVnWWCAMK_vwI3-zA,415 -git/test/fixtures/reflog_master,sha256=K1-VX1oQ3gM_23qTjVV-8yQOXeXuRtePgUXAE6D1TVo,31286 -git/test/fixtures/rev_list,sha256=pJPFZuJGwLzQ6m4P2d7VNaRLdMefGxxtztgU9fQfCCU,123 -git/test/fixtures/rev_list_bisect_all,sha256=r0gnyZwq-IVHxNss4qE6zMv29PEcLyE0t_fV4MKISHc,2172 -git/test/fixtures/rev_list_commit_diffs,sha256=n8qhU8FHEqr7Z8z8PvRGEODveuPbFIuaXB8UYGTqTPc,306 -git/test/fixtures/rev_list_commit_idabbrev,sha256=W_cHcxor5sFGeS8-nmIpWNim-wtFY7636Hwh04Sfve8,271 -git/test/fixtures/rev_list_commit_stats,sha256=1bZgYDN3iqjdIiZtYUuPNZXcyJYlDiusy3dw5utnr3M,244 -git/test/fixtures/rev_list_count,sha256=wyBmlaA46bFntXaF6nx28phdDPwTZVW5kJr71pRrmb0,26855 -git/test/fixtures/rev_list_delta_a,sha256=ikrcoYkO311vbCS_xoeyKE6myYKlKP5by88KU4oG6qI,328 -git/test/fixtures/rev_list_delta_b,sha256=iiTGJRF2nzZrsHLXB1oOcZaoLvnSGAB3B9PLt5acmno,451 -git/test/fixtures/rev_list_single,sha256=YqAJowQ_ujS8kUnNfBlm8ibKY7ki5vu2nXc_vt-4nq0,293 -git/test/fixtures/rev_parse,sha256=y9iM5H6QPxDLEoGO9D4qSMBuDw4nz196c5VMflC1rak,8 -git/test/fixtures/show_empty_commit,sha256=xeKoNCOFUPZcSztV3olKSs6u14fVdHwjnkGYLsEcZn8,252 -git/test/fixtures/uncommon_branch_prefix_FETCH_HEAD,sha256=NO36DB4HWl4sOisR6EdFroTDakA-4XOx2kk4lFQIsiQ,603 -git/test/fixtures/uncommon_branch_prefix_stderr,sha256=4-rJlXvPu-1ByjZzsUUJXFruPRxan7C5ssNtM7qZbeo,324 -git/test/lib/__init__.py,sha256=k2xMRT9FC0m3yX_iMKaDcyuuZe0tGSr95ork3VOaeWk,414 -git/test/lib/asserts.py,sha256=_9sOUHopeO-3PZOkxMXfTWaTxxPaWwmpnAVaDxpcaWk,2273 -git/test/lib/helper.py,sha256=TI69pdx0xIMhfzOzBDB3BwqPvPsykp9bUXiyw2B0Xd8,13592 -git/test/performance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -git/test/performance/lib.py,sha256=qSicSiyRI30rP3EFeVoevC_sBDgXDFtZKIFr_Ikz84g,2427 -git/test/performance/test_commit.py,sha256=ws8ORcvg3h0eXkI2G7a4OEl5QFG-9s2Agf0ut_8sUqU,3732 -git/test/performance/test_odb.py,sha256=knbDhq2sRagwyGHKQ7uNZLWN8bzYt_VF6bNucoON6dI,2651 -git/test/performance/test_streams.py,sha256=w5mqPX6Yjo-j3mAz9GGa0O_pzYCBgGhhdoKCAz-iMD0,5850 -GitPython-3.0.1.dist-info/AUTHORS,sha256=03VRYpqdYOsIlgdOybv95yCNoISZ512MXZqdz2TeAio,1523 -GitPython-3.0.1.dist-info/LICENSE,sha256=_WV__CzvY9JceMq3gI1BTdA6KC5jiTSR_RHDL5i-Z_s,1521 -GitPython-3.0.1.dist-info/METADATA,sha256=8fNgCOWWxPpu5L_dQKqX2rHeifR5tDoc3cg_NCoyDMo,1127 -GitPython-3.0.1.dist-info/WHEEL,sha256=S8S5VL-stOTSZDYxHyf0KP7eds0J72qrK0Evu3TfyAY,92 -GitPython-3.0.1.dist-info/top_level.txt,sha256=0hzDuIp8obv624V3GmbqsagBWkk8ohtGU-Bc1PmTT0o,4 -GitPython-3.0.1.dist-info/RECORD,, -GitPython-3.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -git/test/__pycache__/test_commit.cpython-36.pyc,, -git/test/__pycache__/test_repo.cpython-36.pyc,, -git/test/__pycache__/test_exc.cpython-36.pyc,, -git/test/__pycache__/test_docs.cpython-36.pyc,, -git/test/__pycache__/test_fun.cpython-36.pyc,, -git/test/__pycache__/test_refs.cpython-36.pyc,, -git/test/__pycache__/test_stats.cpython-36.pyc,, -git/test/__pycache__/test_base.cpython-36.pyc,, -git/test/__pycache__/test_git.cpython-36.pyc,, -git/test/__pycache__/test_config.cpython-36.pyc,, -git/test/__pycache__/test_diff.cpython-36.pyc,, -git/test/__pycache__/test_db.cpython-36.pyc,, -git/test/__pycache__/test_blob.cpython-36.pyc,, -git/test/__pycache__/test_remote.cpython-36.pyc,, -git/test/__pycache__/test_tree.cpython-36.pyc,, -git/test/__pycache__/test_util.cpython-36.pyc,, -git/test/__pycache__/test_index.cpython-36.pyc,, -git/test/__pycache__/test_reflog.cpython-36.pyc,, -git/test/__pycache__/__init__.cpython-36.pyc,, -git/test/__pycache__/test_actor.cpython-36.pyc,, -git/test/__pycache__/test_submodule.cpython-36.pyc,, -git/test/lib/__pycache__/helper.cpython-36.pyc,, -git/test/lib/__pycache__/asserts.cpython-36.pyc,, -git/test/lib/__pycache__/__init__.cpython-36.pyc,, -git/test/fixtures/__pycache__/cat_file.cpython-36.pyc,, -git/test/performance/__pycache__/test_odb.cpython-36.pyc,, -git/test/performance/__pycache__/test_commit.cpython-36.pyc,, -git/test/performance/__pycache__/test_streams.cpython-36.pyc,, -git/test/performance/__pycache__/lib.cpython-36.pyc,, -git/test/performance/__pycache__/__init__.cpython-36.pyc,, -git/objects/submodule/__pycache__/util.cpython-36.pyc,, -git/objects/submodule/__pycache__/root.cpython-36.pyc,, -git/objects/submodule/__pycache__/base.cpython-36.pyc,, -git/objects/submodule/__pycache__/__init__.cpython-36.pyc,, -git/objects/__pycache__/tree.cpython-36.pyc,, -git/objects/__pycache__/util.cpython-36.pyc,, -git/objects/__pycache__/tag.cpython-36.pyc,, -git/objects/__pycache__/blob.cpython-36.pyc,, -git/objects/__pycache__/base.cpython-36.pyc,, -git/objects/__pycache__/commit.cpython-36.pyc,, -git/objects/__pycache__/__init__.cpython-36.pyc,, -git/objects/__pycache__/fun.cpython-36.pyc,, -git/__pycache__/util.cpython-36.pyc,, -git/__pycache__/cmd.cpython-36.pyc,, -git/__pycache__/config.cpython-36.pyc,, -git/__pycache__/db.cpython-36.pyc,, -git/__pycache__/remote.cpython-36.pyc,, -git/__pycache__/diff.cpython-36.pyc,, -git/__pycache__/compat.cpython-36.pyc,, -git/__pycache__/exc.cpython-36.pyc,, -git/__pycache__/__init__.cpython-36.pyc,, -git/refs/__pycache__/tag.cpython-36.pyc,, -git/refs/__pycache__/head.cpython-36.pyc,, -git/refs/__pycache__/reference.cpython-36.pyc,, -git/refs/__pycache__/remote.cpython-36.pyc,, -git/refs/__pycache__/symbolic.cpython-36.pyc,, -git/refs/__pycache__/log.cpython-36.pyc,, -git/refs/__pycache__/__init__.cpython-36.pyc,, -git/index/__pycache__/util.cpython-36.pyc,, -git/index/__pycache__/typ.cpython-36.pyc,, -git/index/__pycache__/base.cpython-36.pyc,, -git/index/__pycache__/__init__.cpython-36.pyc,, -git/index/__pycache__/fun.cpython-36.pyc,, -git/repo/__pycache__/base.cpython-36.pyc,, -git/repo/__pycache__/__init__.cpython-36.pyc,, -git/repo/__pycache__/fun.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/WHEEL deleted file mode 100644 index c57a597..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/top_level.txt deleted file mode 100644 index 5664e30..0000000 --- a/venv/lib/python3.6/site-packages/GitPython-3.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -git diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE deleted file mode 100644 index 31bf900..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/LICENSE +++ /dev/null @@ -1,31 +0,0 @@ -Copyright (c) 2009 by the Jinja Team, see AUTHORS for more details. - -Some rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA deleted file mode 100644 index fb4a867..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/METADATA +++ /dev/null @@ -1,67 +0,0 @@ -Metadata-Version: 2.1 -Name: Jinja2 -Version: 2.10.1 -Summary: A small but fast and easy to use stand-alone template engine written in pure python. -Home-page: http://jinja.pocoo.org/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Dist: MarkupSafe (>=0.23) -Provides-Extra: i18n -Requires-Dist: Babel (>=0.8) ; extra == 'i18n' - - -Jinja2 -~~~~~~ - -Jinja2 is a template engine written in pure Python. It provides a -`Django`_ inspired non-XML syntax but supports inline expressions and -an optional `sandboxed`_ environment. - -Nutshell --------- - -Here a small example of a Jinja template:: - - {% extends 'base.html' %} - {% block title %}Memberlist{% endblock %} - {% block content %} - - {% endblock %} - -Philosophy ----------- - -Application logic is for the controller but don't try to make the life -for the template designer too hard by giving him too few functionality. - -For more informations visit the new `Jinja2 webpage`_ and `documentation`_. - -.. _sandboxed: https://en.wikipedia.org/wiki/Sandbox_(computer_security) -.. _Django: https://www.djangoproject.com/ -.. _Jinja2 webpage: http://jinja.pocoo.org/ -.. _documentation: http://jinja.pocoo.org/2/documentation/ - - diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD deleted file mode 100644 index 7231bdc..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/RECORD +++ /dev/null @@ -1,61 +0,0 @@ -jinja2/__init__.py,sha256=V1D-JHQKklZseXOMA-uAW7-BeKe_TfPpOFi9-dV04ZA,2616 -jinja2/_compat.py,sha256=xP60CE5Qr8FTYcDE1f54tbZLKGvMwYml4-8T7Q4KG9k,2596 -jinja2/_identifier.py,sha256=W1QBSY-iJsyt6oR_nKSuNNCzV95vLIOYgUNPUI1d5gU,1726 -jinja2/asyncfilters.py,sha256=cTDPvrS8Hp_IkwsZ1m9af_lr5nHysw7uTa5gV0NmZVE,4144 -jinja2/asyncsupport.py,sha256=UErQ3YlTLaSjFb94P4MVn08-aVD9jJxty2JVfMRb-1M,7878 -jinja2/bccache.py,sha256=nQldx0ZRYANMyfvOihRoYFKSlUdd5vJkS7BjxNwlOZM,12794 -jinja2/compiler.py,sha256=BqC5U6JxObSRhblyT_a6Tp5GtEU5z3US1a4jLQaxxgo,65386 -jinja2/constants.py,sha256=uwwV8ZUhHhacAuz5PTwckfsbqBaqM7aKfyJL7kGX5YQ,1626 -jinja2/debug.py,sha256=WTVeUFGUa4v6ReCsYv-iVPa3pkNB75OinJt3PfxNdXs,12045 -jinja2/defaults.py,sha256=Em-95hmsJxIenDCZFB1YSvf9CNhe9rBmytN3yUrBcWA,1400 -jinja2/environment.py,sha256=VnkAkqw8JbjZct4tAyHlpBrka2vqB-Z58RAP-32P1ZY,50849 -jinja2/exceptions.py,sha256=_Rj-NVi98Q6AiEjYQOsP8dEIdu5AlmRHzcSNOPdWix4,4428 -jinja2/ext.py,sha256=atMQydEC86tN1zUsdQiHw5L5cF62nDbqGue25Yiu3N4,24500 -jinja2/filters.py,sha256=yOAJk0MsH-_gEC0i0U6NweVQhbtYaC-uE8xswHFLF4w,36528 -jinja2/idtracking.py,sha256=2GbDSzIvGArEBGLkovLkqEfmYxmWsEf8c3QZwM4uNsw,9197 -jinja2/lexer.py,sha256=ySEPoXd1g7wRjsuw23uimS6nkGN5aqrYwcOKxCaVMBQ,28559 -jinja2/loaders.py,sha256=xiTuURKAEObyym0nU8PCIXu_Qp8fn0AJ5oIADUUm-5Q,17382 -jinja2/meta.py,sha256=fmKHxkmZYAOm9QyWWy8EMd6eefAIh234rkBMW2X4ZR8,4340 -jinja2/nativetypes.py,sha256=_sJhS8f-8Q0QMIC0dm1YEdLyxEyoO-kch8qOL5xUDfE,7308 -jinja2/nodes.py,sha256=L10L_nQDfubLhO3XjpF9qz46FSh2clL-3e49ogVlMmA,30853 -jinja2/optimizer.py,sha256=MsdlFACJ0FRdPtjmCAdt7JQ9SGrXFaDNUaslsWQaG3M,1722 -jinja2/parser.py,sha256=lPzTEbcpTRBLw8ii6OYyExHeAhaZLMA05Hpv4ll3ULk,35875 -jinja2/runtime.py,sha256=DHdD38Pq8gj7uWQC5usJyWFoNWL317A9AvXOW_CLB34,27755 -jinja2/sandbox.py,sha256=UmX8hVjdaERCbA3RXBwrV1f-beA23KmijG5kzPJyU4A,17106 -jinja2/tests.py,sha256=iJQLwbapZr-EKquTG_fVOVdwHUUKf3SX9eNkjQDF8oU,4237 -jinja2/utils.py,sha256=q24VupGZotQ-uOyrJxCaXtDWhZC1RgsQG7kcdmjck2Q,20629 -jinja2/visitor.py,sha256=JD1H1cANA29JcntFfN5fPyqQxB4bI4wC00BzZa-XHks,3316 -Jinja2-2.10.1.dist-info/LICENSE,sha256=JvzUNv3Io51EiWrAPm8d_SXjhJnEjyDYvB3Tvwqqils,1554 -Jinja2-2.10.1.dist-info/METADATA,sha256=rx0eN8lX8iq8-YVppmCzV1Qx4y3Pj9IWi08mXUCrewI,2227 -Jinja2-2.10.1.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -Jinja2-2.10.1.dist-info/entry_points.txt,sha256=NdzVcOrqyNyKDxD09aERj__3bFx2paZhizFDsKmVhiA,72 -Jinja2-2.10.1.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 -Jinja2-2.10.1.dist-info/RECORD,, -Jinja2-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jinja2/__pycache__/lexer.cpython-36.pyc,, -jinja2/__pycache__/sandbox.cpython-36.pyc,, -jinja2/__pycache__/debug.cpython-36.pyc,, -jinja2/__pycache__/constants.cpython-36.pyc,, -jinja2/__pycache__/idtracking.cpython-36.pyc,, -jinja2/__pycache__/parser.cpython-36.pyc,, -jinja2/__pycache__/exceptions.cpython-36.pyc,, -jinja2/__pycache__/ext.cpython-36.pyc,, -jinja2/__pycache__/meta.cpython-36.pyc,, -jinja2/__pycache__/environment.cpython-36.pyc,, -jinja2/__pycache__/loaders.cpython-36.pyc,, -jinja2/__pycache__/asyncsupport.cpython-36.pyc,, -jinja2/__pycache__/asyncfilters.cpython-36.pyc,, -jinja2/__pycache__/tests.cpython-36.pyc,, -jinja2/__pycache__/optimizer.cpython-36.pyc,, -jinja2/__pycache__/compiler.cpython-36.pyc,, -jinja2/__pycache__/bccache.cpython-36.pyc,, -jinja2/__pycache__/_identifier.cpython-36.pyc,, -jinja2/__pycache__/_compat.cpython-36.pyc,, -jinja2/__pycache__/defaults.cpython-36.pyc,, -jinja2/__pycache__/utils.cpython-36.pyc,, -jinja2/__pycache__/nodes.cpython-36.pyc,, -jinja2/__pycache__/filters.cpython-36.pyc,, -jinja2/__pycache__/runtime.cpython-36.pyc,, -jinja2/__pycache__/__init__.cpython-36.pyc,, -jinja2/__pycache__/nativetypes.cpython-36.pyc,, -jinja2/__pycache__/visitor.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL deleted file mode 100644 index c8240f0..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt deleted file mode 100644 index 32e6b75..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/entry_points.txt +++ /dev/null @@ -1,4 +0,0 @@ - - [babel.extractors] - jinja2 = jinja2.ext:babel_extract[i18n] - \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt deleted file mode 100644 index 7f7afbf..0000000 --- a/venv/lib/python3.6/site-packages/Jinja2-2.10.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -jinja2 diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA deleted file mode 100644 index b208d93..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/METADATA +++ /dev/null @@ -1,103 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 1.1.1 -Summary: Safely add untrusted strings to HTML/XML markup. -Home-page: https://palletsprojects.com/p/markupsafe/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: The Pallets Team -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Code, https://github.com/pallets/markupsafe -Project-URL: Issue tracker, https://github.com/pallets/markupsafe/issues -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* - -MarkupSafe -========== - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U MarkupSafe - -.. _pip: https://pip.pypa.io/en/stable/quickstart/ - - -Examples --------- - -.. code-block:: pycon - - >>> from markupsafe import Markup, escape - >>> # escape replaces special characters and wraps in Markup - >>> escape('') - Markup(u'<script>alert(document.cookie);</script>') - >>> # wrap in Markup to mark text "safe" and prevent escaping - >>> Markup('Hello') - Markup('hello') - >>> escape(Markup('Hello')) - Markup('hello') - >>> # Markup is a text subclass (str on Python 3, unicode on Python 2) - >>> # methods and operators escape their arguments - >>> template = Markup("Hello %s") - >>> template % '"World"' - Markup('Hello "World"') - - -Donate ------- - -The Pallets organization develops and supports MarkupSafe and other -libraries that use it. In order to grow the community of contributors -and users, and allow the maintainers to devote more time to the -projects, `please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -* Website: https://palletsprojects.com/p/markupsafe/ -* Documentation: https://markupsafe.palletsprojects.com/ -* License: `BSD-3-Clause `_ -* Releases: https://pypi.org/project/MarkupSafe/ -* Code: https://github.com/pallets/markupsafe -* Issue tracker: https://github.com/pallets/markupsafe/issues -* Test status: - - * Linux, Mac: https://travis-ci.org/pallets/markupsafe - * Windows: https://ci.appveyor.com/project/pallets/markupsafe - -* Test coverage: https://codecov.io/gh/pallets/markupsafe - - diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD deleted file mode 100644 index 58ac68e..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/RECORD +++ /dev/null @@ -1,16 +0,0 @@ -markupsafe/__init__.py,sha256=oTblO5f9KFM-pvnq9bB0HgElnqkJyqHnFN1Nx2NIvnY,10126 -markupsafe/_compat.py,sha256=uEW1ybxEjfxIiuTbRRaJpHsPFf4yQUMMKaPgYEC5XbU,558 -markupsafe/_constants.py,sha256=zo2ajfScG-l1Sb_52EP3MlDCqO7Y1BVHUXXKRsVDRNk,4690 -markupsafe/_native.py,sha256=d-8S_zzYt2y512xYcuSxq0NeG2DUUvG80wVdTn-4KI8,1873 -markupsafe/_speedups.c,sha256=k0fzEIK3CP6MmMqeY0ob43TP90mVN0DTyn7BAl3RqSg,9884 -markupsafe/_speedups.cpython-36m-darwin.so,sha256=gAFPy56Sic0xoW6ZX3vNlSIudH37VfkTL5p5huZTCKQ,35080 -MarkupSafe-1.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -MarkupSafe-1.1.1.dist-info/METADATA,sha256=nJHwJ4_4ka-V39QH883jPrslj6inNdyyNASBXbYgHXQ,3570 -MarkupSafe-1.1.1.dist-info/WHEEL,sha256=jbwwsYxUMSTI4YtVtq_YJIxL7IgPdHeCKUMmQlt2K6U,109 -MarkupSafe-1.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -MarkupSafe-1.1.1.dist-info/RECORD,, -MarkupSafe-1.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -markupsafe/__pycache__/_native.cpython-36.pyc,, -markupsafe/__pycache__/_constants.cpython-36.pyc,, -markupsafe/__pycache__/_compat.cpython-36.pyc,, -markupsafe/__pycache__/__init__.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL deleted file mode 100644 index 368d7aa..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: false -Tag: cp36-cp36m-macosx_10_6_intel - diff --git a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/venv/lib/python3.6/site-packages/MarkupSafe-1.1.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/PKG-INFO b/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/PKG-INFO deleted file mode 100644 index 03d9586..0000000 --- a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/PKG-INFO +++ /dev/null @@ -1,38 +0,0 @@ -Metadata-Version: 1.2 -Name: PyYAML -Version: 5.1.2 -Summary: YAML parser and emitter for Python -Home-page: https://github.com/yaml/pyyaml -Author: Kirill Simonov -Author-email: xi@resolvent.net -License: MIT -Download-URL: https://pypi.org/project/PyYAML/ -Description: YAML is a data serialization format designed for human readability - and interaction with scripting languages. PyYAML is a YAML parser - and emitter for Python. - - PyYAML features a complete YAML 1.1 parser, Unicode support, pickle - support, capable extension API, and sensible error messages. PyYAML - supports standard YAML tags and provides Python-specific tags that - allow to represent an arbitrary Python object. - - PyYAML is applicable for a broad range of tasks from complex - configuration files to object serialization and persistence. -Platform: Any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* diff --git a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/SOURCES.txt b/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/SOURCES.txt deleted file mode 100644 index e1a8a21..0000000 --- a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/SOURCES.txt +++ /dev/null @@ -1,27 +0,0 @@ -README -setup.cfg -ext/_yaml.c -ext/_yaml.h -ext/_yaml.pxd -ext/_yaml.pyx -lib3/PyYAML.egg-info/PKG-INFO -lib3/PyYAML.egg-info/SOURCES.txt -lib3/PyYAML.egg-info/dependency_links.txt -lib3/PyYAML.egg-info/top_level.txt -lib3/yaml/__init__.py -lib3/yaml/composer.py -lib3/yaml/constructor.py -lib3/yaml/cyaml.py -lib3/yaml/dumper.py -lib3/yaml/emitter.py -lib3/yaml/error.py -lib3/yaml/events.py -lib3/yaml/loader.py -lib3/yaml/nodes.py -lib3/yaml/parser.py -lib3/yaml/reader.py -lib3/yaml/representer.py -lib3/yaml/resolver.py -lib3/yaml/scanner.py -lib3/yaml/serializer.py -lib3/yaml/tokens.py \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/dependency_links.txt b/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/installed-files.txt b/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/installed-files.txt deleted file mode 100644 index 810f0cb..0000000 --- a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/installed-files.txt +++ /dev/null @@ -1,38 +0,0 @@ -../yaml/__init__.py -../yaml/__pycache__/__init__.cpython-36.pyc -../yaml/__pycache__/composer.cpython-36.pyc -../yaml/__pycache__/constructor.cpython-36.pyc -../yaml/__pycache__/cyaml.cpython-36.pyc -../yaml/__pycache__/dumper.cpython-36.pyc -../yaml/__pycache__/emitter.cpython-36.pyc -../yaml/__pycache__/error.cpython-36.pyc -../yaml/__pycache__/events.cpython-36.pyc -../yaml/__pycache__/loader.cpython-36.pyc -../yaml/__pycache__/nodes.cpython-36.pyc -../yaml/__pycache__/parser.cpython-36.pyc -../yaml/__pycache__/reader.cpython-36.pyc -../yaml/__pycache__/representer.cpython-36.pyc -../yaml/__pycache__/resolver.cpython-36.pyc -../yaml/__pycache__/scanner.cpython-36.pyc -../yaml/__pycache__/serializer.cpython-36.pyc -../yaml/__pycache__/tokens.cpython-36.pyc -../yaml/composer.py -../yaml/constructor.py -../yaml/cyaml.py -../yaml/dumper.py -../yaml/emitter.py -../yaml/error.py -../yaml/events.py -../yaml/loader.py -../yaml/nodes.py -../yaml/parser.py -../yaml/reader.py -../yaml/representer.py -../yaml/resolver.py -../yaml/scanner.py -../yaml/serializer.py -../yaml/tokens.py -PKG-INFO -SOURCES.txt -dependency_links.txt -top_level.txt diff --git a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/top_level.txt b/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/top_level.txt deleted file mode 100644 index e6475e9..0000000 --- a/venv/lib/python3.6/site-packages/PyYAML-5.1.2-py3.6.egg-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_yaml -yaml diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA deleted file mode 100644 index 34ce7ea..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/METADATA +++ /dev/null @@ -1,128 +0,0 @@ -Metadata-Version: 2.1 -Name: Werkzeug -Version: 0.15.5 -Summary: The comprehensive WSGI web application library. -Home-page: https://palletsprojects.com/p/werkzeug/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Documentation, https://werkzeug.palletsprojects.com/ -Project-URL: Code, https://github.com/pallets/werkzeug -Project-URL: Issue tracker, https://github.com/pallets/werkzeug/issues -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Provides-Extra: dev -Requires-Dist: pytest ; extra == 'dev' -Requires-Dist: coverage ; extra == 'dev' -Requires-Dist: tox ; extra == 'dev' -Requires-Dist: sphinx ; extra == 'dev' -Requires-Dist: pallets-sphinx-themes ; extra == 'dev' -Requires-Dist: sphinx-issues ; extra == 'dev' -Provides-Extra: termcolor -Requires-Dist: termcolor ; extra == 'termcolor' -Provides-Extra: watchdog -Requires-Dist: watchdog ; extra == 'watchdog' - -Werkzeug -======== - -*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") - -Werkzeug is a comprehensive `WSGI`_ web application library. It began as -a simple collection of various utilities for WSGI applications and has -become one of the most advanced WSGI utility libraries. - -It includes: - -- An interactive debugger that allows inspecting stack traces and - source code in the browser with an interactive interpreter for any - frame in the stack. -- A full-featured request object with objects to interact with - headers, query args, form data, files, and cookies. -- A response object that can wrap other WSGI applications and handle - streaming data. -- A routing system for matching URLs to endpoints and generating URLs - for endpoints, with an extensible system for capturing variables - from URLs. -- HTTP utilities to handle entity tags, cache control, dates, user - agents, cookies, files, and more. -- A threaded WSGI server for use while developing applications - locally. -- A test client for simulating HTTP requests during testing without - requiring running a server. - -Werkzeug is Unicode aware and doesn't enforce any dependencies. It is up -to the developer to choose a template engine, database adapter, and even -how to handle requests. It can be used to build all sorts of end user -applications such as blogs, wikis, or bulletin boards. - -`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while -providing more structure and patterns for defining powerful -applications. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U Werkzeug - - -A Simple Example ----------------- - -.. code-block:: python - - from werkzeug.wrappers import Request, Response - - @Request.application - def application(request): - return Response('Hello, World!') - - if __name__ == '__main__': - from werkzeug.serving import run_simple - run_simple('localhost', 4000, application) - - -Links ------ - -- Website: https://palletsprojects.com/p/werkzeug/ -- Documentation: https://werkzeug.palletsprojects.com/ -- Releases: https://pypi.org/project/Werkzeug/ -- Code: https://github.com/pallets/werkzeug -- Issue tracker: https://github.com/pallets/werkzeug/issues -- Test status: https://dev.azure.com/pallets/werkzeug/_build -- Official chat: https://discord.gg/t6rrQZH - -.. _WSGI: https://wsgi.readthedocs.io/en/latest/ -.. _Flask: https://www.palletsprojects.com/p/flask/ -.. _pip: https://pip.pypa.io/en/stable/quickstart/ - - diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD deleted file mode 100644 index 1f1233c..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/RECORD +++ /dev/null @@ -1,119 +0,0 @@ -werkzeug/__init__.py,sha256=TqqguxCOVD9JRfiTjo_JNHcEmtQsdK1B9FxwDNchE18,6805 -werkzeug/_compat.py,sha256=oBEVVrJT4sqYdIZbUWmgV9T9w257RhTSDBlTjh0Zbb0,6431 -werkzeug/_internal.py,sha256=Wx7cpTRWqeBd0LAqobo0lCO4pNUW4oav6XKf7Taumgk,14590 -werkzeug/_reloader.py,sha256=J0EYq3YrGtC3bNDRqWCHf4yBPuVYoZBy8qp69eHO3YQ,11281 -werkzeug/datastructures.py,sha256=8HoA4Gu9i7ZWi5OBjx244OLWvDEE4JTQQUUTRoAYKog,91761 -werkzeug/exceptions.py,sha256=UzmMCkt5PCn5-TDNI0iGxGz07d3sKHQoArJrvurqVBE,23638 -werkzeug/filesystem.py,sha256=HzKl-j0Hd8Jl66j778UbPTAYNnY6vUZgYLlBZ0e7uw0,2101 -werkzeug/formparser.py,sha256=tN6SO4mn6RUsxRZq4qVBWXbNWNuasn2KaBznTieMaVk,21790 -werkzeug/http.py,sha256=t0ET2tySAf9ZWdEelVWJoLaZzFViYpjoUmiYHPz10-E,43304 -werkzeug/local.py,sha256=USVEcgIg-oCiUJFPIecFIW9jkIejfw4Fjf1u5yN-Np4,14456 -werkzeug/posixemulation.py,sha256=gSSiv1SCmOyzOM_nq1ZaZCtxP__C5MeDJl_4yXJmi4Q,3541 -werkzeug/routing.py,sha256=BSgjrYNwj2j5dAHQtK4INEp2TOf4OJP8hBncYSRO2ps,73410 -werkzeug/security.py,sha256=81149MplFq7-hD4RK4sKp9kzXXejjV9D4lWBzaRyeQ8,8106 -werkzeug/serving.py,sha256=tUFUMg7Bj9iw3nA8ZgC_czMDJJKN7vFskajEmgEFhzE,36597 -werkzeug/test.py,sha256=Cnb5xa3vLDL0hzFCH1fkG_YRpndViGQgCh4D744iSQk,40645 -werkzeug/testapp.py,sha256=hcKBzorVlSHC-uGvGXXjCm3FzCwGWq4yjbTG3Pr7MV8,9301 -werkzeug/urls.py,sha256=8yHdYI99N__-isoTwvGqvuj9QhOh66dd1Xh1DIp0q0g,39261 -werkzeug/useragents.py,sha256=FIonyUF790Ro8OG8cJqG1zixhg5YzXdHmkZbrnK0QRo,5965 -werkzeug/utils.py,sha256=O20Y0qWk5O1IWamC_A5gkmzR5cgBd3yDIHviwBTfNB0,27387 -werkzeug/wsgi.py,sha256=h-zyAeInwE6X6ciSnHI14ImA85adV-F861PmR7UGtRk,36681 -werkzeug/contrib/__init__.py,sha256=EvNyiiCF49j5P0fZYJ3ZGe82ofXdSBvUNqWFwwBMibQ,553 -werkzeug/contrib/atom.py,sha256=KpPJcTfzNW1J0VNQckCbVtVGBe3V8s451tOUya4qByI,15415 -werkzeug/contrib/cache.py,sha256=AEh5UIw-Ui7sHZnlpvrD7ueOKUhCaAD55FXiPtXbbRs,32115 -werkzeug/contrib/fixers.py,sha256=peEtAiIWYT5bh00EWEPOGKzGZXivOzVhhzKPvvzk1RM,9193 -werkzeug/contrib/iterio.py,sha256=KKHa_8aCF_uhoeQVyPGUwrivuB6y6nNdXYo2D2vzOA8,10928 -werkzeug/contrib/lint.py,sha256=NdIxP0E2kVt1xDIxoaIz3Rcl8ZdgmHaFbGTOaybGpN4,296 -werkzeug/contrib/profiler.py,sha256=k_oMLU-AtsVvQ9TxNdermY6FuzSTYr-WE-ZmWb_DMyU,1229 -werkzeug/contrib/securecookie.py,sha256=xbtElskGmtbiApgOJ5WhGgqGDs_68_PcWzqDIAY_QZY,13076 -werkzeug/contrib/sessions.py,sha256=oVXh_7-6_CWOMxDKqcaK05H8RpYoWqAd3al-KzMFPYs,13042 -werkzeug/contrib/wrappers.py,sha256=ZmNk0wpzD66yomPnQxapndZQs4c0kNJaRzqI-BVxeQk,13199 -werkzeug/debug/__init__.py,sha256=Bo3HvgTNY4NQ_2jROTSk3r1ScZcT_g_4EnuHTjKyrKM,18275 -werkzeug/debug/console.py,sha256=HoBL21bbcmtiCLqiLDJLZi1LYnWMZxjoXYH5WaZB1XY,5469 -werkzeug/debug/repr.py,sha256=lIwuhbyrMwVe3P_cFqNyqzHL7P93TLKod7lw9clydEw,9621 -werkzeug/debug/tbtools.py,sha256=SkAAA4KKfwsXJinUbf-AEP4GqONTsR4uU7WPUloXcSE,20318 -werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 -werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 -werkzeug/debug/shared/debugger.js,sha256=rOhqZMRfpZnnu6_XCGn6wMWPhtfwRAcyZKksdIxPJas,6400 -werkzeug/debug/shared/jquery.js,sha256=CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo,88145 -werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 -werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 -werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 -werkzeug/debug/shared/style.css,sha256=gZ9uhmb5zj3XLuT9RvnMp6jMINgQ-VVBCp-2AZbG3YQ,6604 -werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 -werkzeug/middleware/__init__.py,sha256=f1SFZo67IlW4k1uqKzNHxYQlsakUS-D6KK_j0e3jjwQ,549 -werkzeug/middleware/dispatcher.py,sha256=_-KoMzHtcISHS7ouWKAOraqlCLprdh83YOAn_8DjLp8,2240 -werkzeug/middleware/http_proxy.py,sha256=lRjTdMmghHiZuZrS7_UJ3gZc-vlFizhBbFZ-XZPLwIA,7117 -werkzeug/middleware/lint.py,sha256=ItTwuWJnflF8xMT1uqU_Ty1ryhux-CjeUfskqaUpxsw,12967 -werkzeug/middleware/profiler.py,sha256=8B_s23d6BGrU_q54gJsm6kcCbOJbTSqrXCsioHON0Xs,4471 -werkzeug/middleware/proxy_fix.py,sha256=Y86VcU2oAQ--x0mi4iFVJyEFMzp3Ao8q0zvr_SsrpNw,8506 -werkzeug/middleware/shared_data.py,sha256=WtSphPrsUdpEk4E-_09CAILhfOBJ1YtcX1LrxcQfIzw,8224 -werkzeug/wrappers/__init__.py,sha256=S4VioKAmF_av9Ec9zQvG71X1EOkYfPx1TYck9jyDiyY,1384 -werkzeug/wrappers/accept.py,sha256=TIvjUc0g73fhTWX54wg_D9NNzKvpnG1X8u1w26tK1o8,1760 -werkzeug/wrappers/auth.py,sha256=Pmn6iaGHBrUyHbJpW0lZhO_q9RVoAa5QalaTqcavdAI,1158 -werkzeug/wrappers/base_request.py,sha256=aknREwqVT7WJUxm4weUGdBj90H6rDR3DvsIvmYhaC8A,26943 -werkzeug/wrappers/base_response.py,sha256=ZA1XlxtsbvG4SpbdOEMT5--z7aZM0w6C5y33W8wOXa4,27906 -werkzeug/wrappers/common_descriptors.py,sha256=OJ8jOwMun4L-BxCuFPkK1vaefx_-Y5IndVXvvn_ems4,12089 -werkzeug/wrappers/etag.py,sha256=TwMO1fvluXbBqnFTj2DvrCNa3mYhbHYe1UZAVzfXvuU,12533 -werkzeug/wrappers/json.py,sha256=HvK_A4NpO0sLqgb10sTJcoZydYOwyNiPCJPV7SVgcgE,4343 -werkzeug/wrappers/request.py,sha256=qPo2zmmBv4HxboywtWZb2pJL8OPXo07BUXBKw2j9Fi8,1338 -werkzeug/wrappers/response.py,sha256=vDZFEGzDOG0jjmS0uVVjeT3hqRt1hFaf15npnx7RD28,2329 -werkzeug/wrappers/user_agent.py,sha256=4bTgQKTLQmGUyxOREYOzbeiFP2VwIOE7E14AhUB5NqM,444 -Werkzeug-0.15.5.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Werkzeug-0.15.5.dist-info/METADATA,sha256=QMmXGaI5L-K5NH-S1q3ATVwWxqU0tQgUCdrJ7_OzqwU,4712 -Werkzeug-0.15.5.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -Werkzeug-0.15.5.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 -Werkzeug-0.15.5.dist-info/RECORD,, -Werkzeug-0.15.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -werkzeug/middleware/__pycache__/http_proxy.cpython-36.pyc,, -werkzeug/middleware/__pycache__/lint.cpython-36.pyc,, -werkzeug/middleware/__pycache__/dispatcher.cpython-36.pyc,, -werkzeug/middleware/__pycache__/shared_data.cpython-36.pyc,, -werkzeug/middleware/__pycache__/profiler.cpython-36.pyc,, -werkzeug/middleware/__pycache__/proxy_fix.cpython-36.pyc,, -werkzeug/middleware/__pycache__/__init__.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/response.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/base_response.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/request.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/base_request.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/auth.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/user_agent.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/etag.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/__init__.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/json.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/accept.cpython-36.pyc,, -werkzeug/wrappers/__pycache__/common_descriptors.cpython-36.pyc,, -werkzeug/__pycache__/posixemulation.cpython-36.pyc,, -werkzeug/__pycache__/exceptions.cpython-36.pyc,, -werkzeug/__pycache__/datastructures.cpython-36.pyc,, -werkzeug/__pycache__/filesystem.cpython-36.pyc,, -werkzeug/__pycache__/useragents.cpython-36.pyc,, -werkzeug/__pycache__/serving.cpython-36.pyc,, -werkzeug/__pycache__/testapp.cpython-36.pyc,, -werkzeug/__pycache__/_compat.cpython-36.pyc,, -werkzeug/__pycache__/security.cpython-36.pyc,, -werkzeug/__pycache__/_internal.cpython-36.pyc,, -werkzeug/__pycache__/routing.cpython-36.pyc,, -werkzeug/__pycache__/_reloader.cpython-36.pyc,, -werkzeug/__pycache__/local.cpython-36.pyc,, -werkzeug/__pycache__/http.cpython-36.pyc,, -werkzeug/__pycache__/utils.cpython-36.pyc,, -werkzeug/__pycache__/wsgi.cpython-36.pyc,, -werkzeug/__pycache__/formparser.cpython-36.pyc,, -werkzeug/__pycache__/__init__.cpython-36.pyc,, -werkzeug/__pycache__/test.cpython-36.pyc,, -werkzeug/__pycache__/urls.cpython-36.pyc,, -werkzeug/contrib/__pycache__/sessions.cpython-36.pyc,, -werkzeug/contrib/__pycache__/lint.cpython-36.pyc,, -werkzeug/contrib/__pycache__/iterio.cpython-36.pyc,, -werkzeug/contrib/__pycache__/securecookie.cpython-36.pyc,, -werkzeug/contrib/__pycache__/fixers.cpython-36.pyc,, -werkzeug/contrib/__pycache__/profiler.cpython-36.pyc,, -werkzeug/contrib/__pycache__/cache.cpython-36.pyc,, -werkzeug/contrib/__pycache__/wrappers.cpython-36.pyc,, -werkzeug/contrib/__pycache__/__init__.cpython-36.pyc,, -werkzeug/contrib/__pycache__/atom.cpython-36.pyc,, -werkzeug/debug/__pycache__/repr.cpython-36.pyc,, -werkzeug/debug/__pycache__/console.cpython-36.pyc,, -werkzeug/debug/__pycache__/tbtools.cpython-36.pyc,, -werkzeug/debug/__pycache__/__init__.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL deleted file mode 100644 index 78e6f69..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt deleted file mode 100644 index 6fe8da8..0000000 --- a/venv/lib/python3.6/site-packages/Werkzeug-0.15.5.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -werkzeug diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst deleted file mode 100644 index 0b0953d..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,50 +0,0 @@ -Certifi: Python SSL Certificates -================================ - -`Certifi`_ is a carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python2.7/site-packages/certifi/cacert.pem - -Enjoy! - -1024-bit Root Certificates -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Browsers and certificate authorities have concluded that 1024-bit keys are -unacceptably weak for certificates, particularly root certificates. For this -reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its -bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) -certificate from the same CA. Because Mozilla removed these certificates from -its bundle, ``certifi`` removed them as well. - -In previous versions, ``certifi`` provided the ``certifi.old_where()`` function -to intentionally re-add the 1024-bit roots back into your bundle. This was not -recommended in production and therefore was removed at the end of 2018. - -.. _`Certifi`: https://certifi.io/en/latest/ -.. _`Requests`: http://docs.python-requests.org/en/latest/ - - diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt deleted file mode 100644 index 802b53f..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/LICENSE.txt +++ /dev/null @@ -1,21 +0,0 @@ -This packge contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA deleted file mode 100644 index 29540e3..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/METADATA +++ /dev/null @@ -1,74 +0,0 @@ -Metadata-Version: 2.0 -Name: certifi -Version: 2019.6.16 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://certifi.io/ -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 - -Certifi: Python SSL Certificates -================================ - -`Certifi`_ is a carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python2.7/site-packages/certifi/cacert.pem - -Enjoy! - -1024-bit Root Certificates -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Browsers and certificate authorities have concluded that 1024-bit keys are -unacceptably weak for certificates, particularly root certificates. For this -reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its -bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) -certificate from the same CA. Because Mozilla removed these certificates from -its bundle, ``certifi`` removed them as well. - -In previous versions, ``certifi`` provided the ``certifi.old_where()`` function -to intentionally re-add the 1024-bit roots back into your bundle. This was not -recommended in production and therefore was removed at the end of 2018. - -.. _`Certifi`: https://certifi.io/en/latest/ -.. _`Requests`: http://docs.python-requests.org/en/latest/ - - diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD deleted file mode 100644 index 79798dd..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -certifi/__init__.py,sha256=phsMyKTQP7MMe1wAHfhXPbQVxL3wXixOomxzNh5Cwa4,52 -certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 -certifi/cacert.pem,sha256=DddOv7pQyMB8zNNgiXSSFrPVn7EN8qbe7P6h_IYyuek,282085 -certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218 -certifi-2019.6.16.dist-info/DESCRIPTION.rst,sha256=aLNHONztn2ZiBpSTivVFy6EDIWmuNYSsEQwx4NWbvB4,1580 -certifi-2019.6.16.dist-info/LICENSE.txt,sha256=anCkv2sBABbVmmS4rkrY3H9e8W8ftFPMLs13HFo0ETE,1048 -certifi-2019.6.16.dist-info/METADATA,sha256=bmxei5fIjQJCT_5_2k2ReQ1IDvrguA5Qan26BXRcbN0,2522 -certifi-2019.6.16.dist-info/RECORD,, -certifi-2019.6.16.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 -certifi-2019.6.16.dist-info/metadata.json,sha256=soH2Ke2dIXqmSFFz1swsK3Uno_9ed57OqPIXuOxFXYA,1048 -certifi-2019.6.16.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi-2019.6.16.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi/__pycache__/__main__.cpython-36.pyc,, -certifi/__pycache__/core.cpython-36.pyc,, -certifi/__pycache__/__init__.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL deleted file mode 100644 index 7bf9daa..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0.a0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json deleted file mode 100644 index afe3503..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "extensions": {"python.details": {"contacts": [{"email": "me@kennethreitz.com", "name": "Kenneth Reitz", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://certifi.io/"}}}, "generator": "bdist_wheel (0.30.0.a0)", "license": "MPL-2.0", "metadata_version": "2.0", "name": "certifi", "summary": "Python package for providing Mozilla's CA Bundle.", "version": "2019.6.16"} \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/venv/lib/python3.6/site-packages/certifi-2019.6.16.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/venv/lib/python3.6/site-packages/certifi/__init__.py b/venv/lib/python3.6/site-packages/certifi/__init__.py deleted file mode 100644 index 8ccb14e..0000000 --- a/venv/lib/python3.6/site-packages/certifi/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .core import where - -__version__ = "2019.06.16" diff --git a/venv/lib/python3.6/site-packages/certifi/__main__.py b/venv/lib/python3.6/site-packages/certifi/__main__.py deleted file mode 100644 index 5f1da0d..0000000 --- a/venv/lib/python3.6/site-packages/certifi/__main__.py +++ /dev/null @@ -1,2 +0,0 @@ -from certifi import where -print(where()) diff --git a/venv/lib/python3.6/site-packages/certifi/cacert.pem b/venv/lib/python3.6/site-packages/certifi/cacert.pem deleted file mode 100644 index 9ca290f..0000000 --- a/venv/lib/python3.6/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4618 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=Class 2 Primary CA O=Certplus -# Subject: CN=Class 2 Primary CA O=Certplus -# Label: "Certplus Class 2 Primary CA" -# Serial: 177770208045934040241468760488327595043 -# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b -# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb -# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb ------BEGIN CERTIFICATE----- -MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw -PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz -cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 -MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz -IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ -ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR -VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL -kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd -EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas -H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 -HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud -DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 -QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu -Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ -AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 -yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR -FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA -ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB -kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 -l7+ijrRU ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center -# Label: "Deutsche Telekom Root CA 2" -# Serial: 38 -# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 -# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf -# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 ------BEGIN CERTIFICATE----- -MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc -MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj -IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB -IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE -RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl -U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 -IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU -ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC -QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr -rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S -NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc -QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH -txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP -BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC -AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp -tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa -IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl -6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ -xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU -Cm26OWMohpLzGITY+9HPBVZkVw== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- diff --git a/venv/lib/python3.6/site-packages/certifi/core.py b/venv/lib/python3.6/site-packages/certifi/core.py deleted file mode 100644 index 7271acf..0000000 --- a/venv/lib/python3.6/site-packages/certifi/core.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem. -""" -import os - - -def where(): - f = os.path.dirname(__file__) - - return os.path.join(f, 'cacert.pem') diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst deleted file mode 100644 index c0f044d..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,70 +0,0 @@ -Chardet: The Universal Character Encoding Detector --------------------------------------------------- - -.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg - :alt: Build status - :target: https://travis-ci.org/chardet/chardet - -.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg - :target: https://coveralls.io/r/chardet/chardet - -.. image:: https://img.shields.io/pypi/v/chardet.svg - :target: https://warehouse.python.org/project/chardet/ - :alt: Latest version on PyPI - -.. image:: https://img.shields.io/pypi/l/chardet.svg - :alt: License - - -Detects - - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) - - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) - - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) - - EUC-KR, ISO-2022-KR (Korean) - - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) - - ISO-8859-5, windows-1251 (Bulgarian) - - ISO-8859-1, windows-1252 (Western European languages) - - ISO-8859-7, windows-1253 (Greek) - - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) - - TIS-620 (Thai) - -.. note:: - Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily - disabled until we can retrain the models. - -Requires Python 2.6, 2.7, or 3.3+. - -Installation ------------- - -Install from `PyPI `_:: - - pip install chardet - -Documentation -------------- - -For users, docs are now available at https://chardet.readthedocs.io/. - -Command-line Tool ------------------ - -chardet comes with a command-line script which reports on the encodings of one -or more files:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -About ------ - -This is a continuation of Mark Pilgrim's excellent chardet. Previously, two -versions needed to be maintained: one that supported python 2.x and one that -supported python 3.x. We've recently merged with `Ian Cordasco `_'s -`charade `_ fork, so now we have one -coherent version that works for Python 2.6+. - -:maintainer: Dan Blanchard - - diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA deleted file mode 100644 index 1427867..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/METADATA +++ /dev/null @@ -1,96 +0,0 @@ -Metadata-Version: 2.0 -Name: chardet -Version: 3.0.4 -Summary: Universal encoding detector for Python 2 and 3 -Home-page: https://github.com/chardet/chardet -Author: Daniel Blanchard -Author-email: dan.blanchard@gmail.com -License: LGPL -Keywords: encoding,i18n,xml -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Linguistic - -Chardet: The Universal Character Encoding Detector --------------------------------------------------- - -.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg - :alt: Build status - :target: https://travis-ci.org/chardet/chardet - -.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg - :target: https://coveralls.io/r/chardet/chardet - -.. image:: https://img.shields.io/pypi/v/chardet.svg - :target: https://warehouse.python.org/project/chardet/ - :alt: Latest version on PyPI - -.. image:: https://img.shields.io/pypi/l/chardet.svg - :alt: License - - -Detects - - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) - - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) - - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) - - EUC-KR, ISO-2022-KR (Korean) - - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) - - ISO-8859-5, windows-1251 (Bulgarian) - - ISO-8859-1, windows-1252 (Western European languages) - - ISO-8859-7, windows-1253 (Greek) - - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) - - TIS-620 (Thai) - -.. note:: - Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily - disabled until we can retrain the models. - -Requires Python 2.6, 2.7, or 3.3+. - -Installation ------------- - -Install from `PyPI `_:: - - pip install chardet - -Documentation -------------- - -For users, docs are now available at https://chardet.readthedocs.io/. - -Command-line Tool ------------------ - -chardet comes with a command-line script which reports on the encodings of one -or more files:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -About ------ - -This is a continuation of Mark Pilgrim's excellent chardet. Previously, two -versions needed to be maintained: one that supported python 2.x and one that -supported python 3.x. We've recently merged with `Ian Cordasco `_'s -`charade `_ fork, so now we have one -coherent version that works for Python 2.6+. - -:maintainer: Dan Blanchard - - diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD deleted file mode 100644 index 7914493..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/RECORD +++ /dev/null @@ -1,91 +0,0 @@ -chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 -chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 -chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 -chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 -chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 -chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 -chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 -chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 -chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 -chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 -chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 -chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 -chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 -chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 -chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 -chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 -chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 -chardet-3.0.4.dist-info/RECORD,, -chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 -chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 -chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 -../../../bin/chardetect,sha256=tdd0S2YJVvT6yubIapfjDA-fyiySzs92Asb2iuXGH2U,254 -chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -chardet/__pycache__/langgreekmodel.cpython-36.pyc,, -chardet/__pycache__/sjisprober.cpython-36.pyc,, -chardet/__pycache__/mbcharsetprober.cpython-36.pyc,, -chardet/__pycache__/eucjpprober.cpython-36.pyc,, -chardet/__pycache__/cp949prober.cpython-36.pyc,, -chardet/__pycache__/euctwfreq.cpython-36.pyc,, -chardet/__pycache__/gb2312prober.cpython-36.pyc,, -chardet/__pycache__/sbcsgroupprober.cpython-36.pyc,, -chardet/__pycache__/langturkishmodel.cpython-36.pyc,, -chardet/__pycache__/langhebrewmodel.cpython-36.pyc,, -chardet/__pycache__/version.cpython-36.pyc,, -chardet/__pycache__/euckrprober.cpython-36.pyc,, -chardet/__pycache__/euckrfreq.cpython-36.pyc,, -chardet/__pycache__/chardistribution.cpython-36.pyc,, -chardet/__pycache__/escsm.cpython-36.pyc,, -chardet/__pycache__/euctwprober.cpython-36.pyc,, -chardet/__pycache__/big5freq.cpython-36.pyc,, -chardet/__pycache__/compat.cpython-36.pyc,, -chardet/__pycache__/hebrewprober.cpython-36.pyc,, -chardet/__pycache__/latin1prober.cpython-36.pyc,, -chardet/__pycache__/universaldetector.cpython-36.pyc,, -chardet/__pycache__/escprober.cpython-36.pyc,, -chardet/__pycache__/codingstatemachine.cpython-36.pyc,, -chardet/__pycache__/utf8prober.cpython-36.pyc,, -chardet/__pycache__/langhungarianmodel.cpython-36.pyc,, -chardet/__pycache__/gb2312freq.cpython-36.pyc,, -chardet/__pycache__/enums.cpython-36.pyc,, -chardet/__pycache__/langthaimodel.cpython-36.pyc,, -chardet/__pycache__/jpcntx.cpython-36.pyc,, -chardet/__pycache__/mbcssm.cpython-36.pyc,, -chardet/__pycache__/big5prober.cpython-36.pyc,, -chardet/__pycache__/sbcharsetprober.cpython-36.pyc,, -chardet/__pycache__/langcyrillicmodel.cpython-36.pyc,, -chardet/__pycache__/__init__.cpython-36.pyc,, -chardet/__pycache__/jisfreq.cpython-36.pyc,, -chardet/__pycache__/charsetgroupprober.cpython-36.pyc,, -chardet/__pycache__/langbulgarianmodel.cpython-36.pyc,, -chardet/__pycache__/mbcsgroupprober.cpython-36.pyc,, -chardet/__pycache__/charsetprober.cpython-36.pyc,, -chardet/cli/__pycache__/chardetect.cpython-36.pyc,, -chardet/cli/__pycache__/__init__.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL deleted file mode 100644 index 8b6dd1b..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt deleted file mode 100644 index a884269..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -chardetect = chardet.cli.chardetect:main - diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json deleted file mode 100644 index 8cdf025..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt deleted file mode 100644 index 79236f2..0000000 --- a/venv/lib/python3.6/site-packages/chardet-3.0.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -chardet diff --git a/venv/lib/python3.6/site-packages/chardet/__init__.py b/venv/lib/python3.6/site-packages/chardet/__init__.py deleted file mode 100644 index 0f9f820..0000000 --- a/venv/lib/python3.6/site-packages/chardet/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .compat import PY2, PY3 -from .universaldetector import UniversalDetector -from .version import __version__, VERSION - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{0}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() diff --git a/venv/lib/python3.6/site-packages/chardet/big5freq.py b/venv/lib/python3.6/site-packages/chardet/big5freq.py deleted file mode 100644 index 38f3251..0000000 --- a/venv/lib/python3.6/site-packages/chardet/big5freq.py +++ /dev/null @@ -1,386 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -BIG5_CHAR_TO_FREQ_ORDER = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 -) - diff --git a/venv/lib/python3.6/site-packages/chardet/big5prober.py b/venv/lib/python3.6/site-packages/chardet/big5prober.py deleted file mode 100644 index 98f9970..0000000 --- a/venv/lib/python3.6/site-packages/chardet/big5prober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import BIG5_SM_MODEL - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - super(Big5Prober, self).__init__() - self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) - self.distribution_analyzer = Big5DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "Big5" - - @property - def language(self): - return "Chinese" diff --git a/venv/lib/python3.6/site-packages/chardet/chardistribution.py b/venv/lib/python3.6/site-packages/chardet/chardistribution.py deleted file mode 100644 index c0395f4..0000000 --- a/venv/lib/python3.6/site-packages/chardet/chardistribution.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) - - -class CharDistributionAnalysis(object): - ENOUGH_DATA_THRESHOLD = 1024 - SURE_YES = 0.99 - SURE_NO = 0.01 - MINIMUM_DATA_THRESHOLD = 3 - - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._char_to_freq_order = None - self._table_size = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - self._total_chars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._freq_chars = 0 - - def feed(self, char, char_len): - """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 - if order >= 0: - self._total_chars += 1 - # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: - return self.SURE_NO - - if self._total_chars != self._freq_chars: - r = (self._freq_chars / ((self._total_chars - self._freq_chars) - * self.typical_distribution_ratio)) - if r < self.SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return self.SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._total_chars > self.ENOUGH_DATA_THRESHOLD - - def get_order(self, byte_str): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCTWDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER - self._table_size = EUCTW_TABLE_SIZE - self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCKRDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER - self._table_size = EUCKR_TABLE_SIZE - self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(GB2312DistributionAnalysis, self).__init__() - self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER - self._table_size = GB2312_TABLE_SIZE - self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(Big5DistributionAnalysis, self).__init__() - self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER - self._table_size = BIG5_TABLE_SIZE - self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(SJISDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCJPDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 diff --git a/venv/lib/python3.6/site-packages/chardet/charsetgroupprober.py b/venv/lib/python3.6/site-packages/chardet/charsetgroupprober.py deleted file mode 100644 index 8b3738e..0000000 --- a/venv/lib/python3.6/site-packages/chardet/charsetgroupprober.py +++ /dev/null @@ -1,106 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): - super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) - self._active_num = 0 - self.probers = [] - self._best_guess_prober = None - - def reset(self): - super(CharSetGroupProber, self).reset() - self._active_num = 0 - for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 - self._best_guess_prober = None - - @property - def charset_name(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name - - @property - def language(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language - - def feed(self, byte_str): - for prober in self.probers: - if not prober: - continue - if not prober.active: - continue - state = prober.feed(byte_str) - if not state: - continue - if state == ProbingState.FOUND_IT: - self._best_guess_prober = prober - return self.state - elif state == ProbingState.NOT_ME: - prober.active = False - self._active_num -= 1 - if self._active_num <= 0: - self._state = ProbingState.NOT_ME - return self.state - return self.state - - def get_confidence(self): - state = self.state - if state == ProbingState.FOUND_IT: - return 0.99 - elif state == ProbingState.NOT_ME: - return 0.01 - best_conf = 0.0 - self._best_guess_prober = None - for prober in self.probers: - if not prober: - continue - if not prober.active: - self.logger.debug('%s not active', prober.charset_name) - continue - conf = prober.get_confidence() - self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) - if best_conf < conf: - best_conf = conf - self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf diff --git a/venv/lib/python3.6/site-packages/chardet/charsetprober.py b/venv/lib/python3.6/site-packages/chardet/charsetprober.py deleted file mode 100644 index eac4e59..0000000 --- a/venv/lib/python3.6/site-packages/chardet/charsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging -import re - -from .enums import ProbingState - - -class CharSetProber(object): - - SHORTCUT_THRESHOLD = 0.95 - - def __init__(self, lang_filter=None): - self._state = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - - def reset(self): - self._state = ProbingState.DETECTING - - @property - def charset_name(self): - return None - - def feed(self, buf): - pass - - @property - def state(self): - return self._state - - def get_confidence(self): - return 0.0 - - @staticmethod - def filter_high_byte_only(buf): - buf = re.sub(b'([\x00-\x7F])+', b' ', buf) - return buf - - @staticmethod - def filter_international_words(buf): - """ - We define three types of bytes: - alphabet: english alphabets [a-zA-Z] - international: international characters [\x80-\xFF] - marker: everything else [^a-zA-Z\x80-\xFF] - - The input buffer can be thought to contain a series of words delimited - by markers. This function works to filter all words that contain at - least one international character. All contiguous sequences of markers - are replaced by a single space ascii character. - - This filter applies to all scripts which do not use English characters. - """ - filtered = bytearray() - - # This regex expression filters out only words that have at-least one - # international character. The word may include one marker character at - # the end. - words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', - buf) - - for word in words: - filtered.extend(word[:-1]) - - # If the last character in the word is a marker, replace it with a - # space as markers shouldn't affect our analysis (they are used - # similarly across all languages and may thus have similar - # frequencies). - last_char = word[-1:] - if not last_char.isalpha() and last_char < b'\x80': - last_char = b' ' - filtered.extend(last_char) - - return filtered - - @staticmethod - def filter_with_english_letters(buf): - """ - Returns a copy of ``buf`` that retains only the sequences of English - alphabet and high byte characters that are not between <> characters. - Also retains English alphabet and high byte characters immediately - before occurrences of >. - - This filter can be applied to all scripts which contain both English - characters and extended ASCII characters, but is currently only used by - ``Latin1Prober``. - """ - filtered = bytearray() - in_tag = False - prev = 0 - - for curr in range(len(buf)): - # Slice here to get bytes instead of an int with Python 3 - buf_char = buf[curr:curr + 1] - # Check if we're coming out of or entering an HTML tag - if buf_char == b'>': - in_tag = False - elif buf_char == b'<': - in_tag = True - - # If current character is not extended-ASCII and not alphabetic... - if buf_char < b'\x80' and not buf_char.isalpha(): - # ...and we're not in a tag - if curr > prev and not in_tag: - # Keep everything after last non-extended-ASCII, - # non-alphabetic character - filtered.extend(buf[prev:curr]) - # Output a space to delimit stretch we kept - filtered.extend(b' ') - prev = curr + 1 - - # If we're not in a tag... - if not in_tag: - # Keep everything after last non-extended-ASCII, non-alphabetic - # character - filtered.extend(buf[prev:]) - - return filtered diff --git a/venv/lib/python3.6/site-packages/chardet/cli/__init__.py b/venv/lib/python3.6/site-packages/chardet/cli/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/venv/lib/python3.6/site-packages/chardet/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/venv/lib/python3.6/site-packages/chardet/cli/chardetect.py b/venv/lib/python3.6/site-packages/chardet/cli/chardetect.py deleted file mode 100644 index f0a4cc5..0000000 --- a/venv/lib/python3.6/site-packages/chardet/cli/chardetect.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys - -from chardet import __version__ -from chardet.compat import PY2 -from chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - line = bytearray(line) - u.feed(line) - # shortcut out of the loop to save reading further - particularly useful if we read a BOM. - if u.done: - break - u.close() - result = u.result - if PY2: - name = name.decode(sys.getfilesystemencoding(), 'ignore') - if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], - result['confidence']) - else: - return '{0}: no result'.format(name) - - -def main(argv=None): - """ - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - """ - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings") - parser.add_argument('input', - help='File whose encoding we would like to determine. \ - (default: stdin)', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/venv/lib/python3.6/site-packages/chardet/codingstatemachine.py b/venv/lib/python3.6/site-packages/chardet/codingstatemachine.py deleted file mode 100644 index 68fba44..0000000 --- a/venv/lib/python3.6/site-packages/chardet/codingstatemachine.py +++ /dev/null @@ -1,88 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging - -from .enums import MachineState - - -class CodingStateMachine(object): - """ - A state machine to verify a byte sequence for a particular encoding. For - each byte the detector receives, it will feed that byte to every active - state machine available, one byte at a time. The state machine changes its - state based on its previous state and the byte it receives. There are 3 - states in a state machine that are of interest to an auto-detector: - - START state: This is the state to start with, or a legal byte sequence - (i.e. a valid code point) for character has been identified. - - ME state: This indicates that the state machine identified a byte sequence - that is specific to the charset it is designed for and that - there is no other possible encoding which can contain this byte - sequence. This will to lead to an immediate positive answer for - the detector. - - ERROR state: This indicates the state machine identified an illegal byte - sequence for that encoding. This will lead to an immediate - negative answer for this encoding. Detector will exclude this - encoding from consideration from here on. - """ - def __init__(self, sm): - self._model = sm - self._curr_byte_pos = 0 - self._curr_char_len = 0 - self._curr_state = None - self.logger = logging.getLogger(__name__) - self.reset() - - def reset(self): - self._curr_state = MachineState.START - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - byte_class = self._model['class_table'][c] - if self._curr_state == MachineState.START: - self._curr_byte_pos = 0 - self._curr_char_len = self._model['char_len_table'][byte_class] - # from byte's class and state_table, we get its next state - curr_state = (self._curr_state * self._model['class_factor'] - + byte_class) - self._curr_state = self._model['state_table'][curr_state] - self._curr_byte_pos += 1 - return self._curr_state - - def get_current_charlen(self): - return self._curr_char_len - - def get_coding_state_machine(self): - return self._model['name'] - - @property - def language(self): - return self._model['language'] diff --git a/venv/lib/python3.6/site-packages/chardet/compat.py b/venv/lib/python3.6/site-packages/chardet/compat.py deleted file mode 100644 index ddd7468..0000000 --- a/venv/lib/python3.6/site-packages/chardet/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - base_str = (str, unicode) - text_type = unicode -else: - PY2 = False - PY3 = True - base_str = (bytes, str) - text_type = str diff --git a/venv/lib/python3.6/site-packages/chardet/cp949prober.py b/venv/lib/python3.6/site-packages/chardet/cp949prober.py deleted file mode 100644 index efd793a..0000000 --- a/venv/lib/python3.6/site-packages/chardet/cp949prober.py +++ /dev/null @@ -1,49 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .chardistribution import EUCKRDistributionAnalysis -from .codingstatemachine import CodingStateMachine -from .mbcharsetprober import MultiByteCharSetProber -from .mbcssm import CP949_SM_MODEL - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - super(CP949Prober, self).__init__() - self.coding_sm = CodingStateMachine(CP949_SM_MODEL) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "CP949" - - @property - def language(self): - return "Korean" diff --git a/venv/lib/python3.6/site-packages/chardet/enums.py b/venv/lib/python3.6/site-packages/chardet/enums.py deleted file mode 100644 index 0451207..0000000 --- a/venv/lib/python3.6/site-packages/chardet/enums.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -All of the Enums that are used throughout the chardet package. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - - -class InputState(object): - """ - This enum represents the different states a universal detector can be in. - """ - PURE_ASCII = 0 - ESC_ASCII = 1 - HIGH_BYTE = 2 - - -class LanguageFilter(object): - """ - This enum represents the different language filters we can apply to a - ``UniversalDetector``. - """ - CHINESE_SIMPLIFIED = 0x01 - CHINESE_TRADITIONAL = 0x02 - JAPANESE = 0x04 - KOREAN = 0x08 - NON_CJK = 0x10 - ALL = 0x1F - CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL - CJK = CHINESE | JAPANESE | KOREAN - - -class ProbingState(object): - """ - This enum represents the different states a prober can be in. - """ - DETECTING = 0 - FOUND_IT = 1 - NOT_ME = 2 - - -class MachineState(object): - """ - This enum represents the different states a state machine can be in. - """ - START = 0 - ERROR = 1 - ITS_ME = 2 - - -class SequenceLikelihood(object): - """ - This enum represents the likelihood of a character following the previous one. - """ - NEGATIVE = 0 - UNLIKELY = 1 - LIKELY = 2 - POSITIVE = 3 - - @classmethod - def get_num_categories(cls): - """:returns: The number of likelihood categories in the enum.""" - return 4 - - -class CharacterCategory(object): - """ - This enum represents the different categories language models for - ``SingleByteCharsetProber`` put characters into. - - Anything less than CONTROL is considered a letter. - """ - UNDEFINED = 255 - LINE_BREAK = 254 - SYMBOL = 253 - DIGIT = 252 - CONTROL = 251 diff --git a/venv/lib/python3.6/site-packages/chardet/escprober.py b/venv/lib/python3.6/site-packages/chardet/escprober.py deleted file mode 100644 index c70493f..0000000 --- a/venv/lib/python3.6/site-packages/chardet/escprober.py +++ /dev/null @@ -1,101 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .enums import LanguageFilter, ProbingState, MachineState -from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, - ISO2022KR_SM_MODEL) - - -class EscCharSetProber(CharSetProber): - """ - This CharSetProber uses a "code scheme" approach for detecting encodings, - whereby easily recognizable escape or shift sequences are relied on to - identify these encodings. - """ - - def __init__(self, lang_filter=None): - super(EscCharSetProber, self).__init__(lang_filter=lang_filter) - self.coding_sm = [] - if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: - self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) - self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) - if self.lang_filter & LanguageFilter.JAPANESE: - self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) - if self.lang_filter & LanguageFilter.KOREAN: - self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None - self.reset() - - def reset(self): - super(EscCharSetProber, self).reset() - for coding_sm in self.coding_sm: - if not coding_sm: - continue - coding_sm.active = True - coding_sm.reset() - self.active_sm_count = len(self.coding_sm) - self._detected_charset = None - self._detected_language = None - - @property - def charset_name(self): - return self._detected_charset - - @property - def language(self): - return self._detected_language - - def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 - - def feed(self, byte_str): - for c in byte_str: - for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: - continue - coding_state = coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - coding_sm.active = False - self.active_sm_count -= 1 - if self.active_sm_count <= 0: - self._state = ProbingState.NOT_ME - return self.state - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - self._detected_charset = coding_sm.get_coding_state_machine() - self._detected_language = coding_sm.language - return self.state - - return self.state diff --git a/venv/lib/python3.6/site-packages/chardet/escsm.py b/venv/lib/python3.6/site-packages/chardet/escsm.py deleted file mode 100644 index 0069523..0000000 --- a/venv/lib/python3.6/site-packages/chardet/escsm.py +++ /dev/null @@ -1,246 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -HZ_CLS = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_ST = ( -MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 - 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f - 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 - 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f -) - -HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -HZ_SM_MODEL = {'class_table': HZ_CLS, - 'class_factor': 6, - 'state_table': HZ_ST, - 'char_len_table': HZ_CHAR_LEN_TABLE, - 'name': "HZ-GB-2312", - 'language': 'Chinese'} - -ISO2022CN_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 - 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f -) - -ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, - 'class_factor': 9, - 'state_table': ISO2022CN_ST, - 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, - 'name': "ISO-2022-CN", - 'language': 'Chinese'} - -ISO2022JP_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 -) - -ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, - 'class_factor': 10, - 'state_table': ISO2022JP_ST, - 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, - 'name': "ISO-2022-JP", - 'language': 'Japanese'} - -ISO2022KR_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 -) - -ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, - 'class_factor': 6, - 'state_table': ISO2022KR_ST, - 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, - 'name': "ISO-2022-KR", - 'language': 'Korean'} - - diff --git a/venv/lib/python3.6/site-packages/chardet/eucjpprober.py b/venv/lib/python3.6/site-packages/chardet/eucjpprober.py deleted file mode 100644 index 20ce8f7..0000000 --- a/venv/lib/python3.6/site-packages/chardet/eucjpprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState, MachineState -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJP_SM_MODEL - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - super(EUCJPProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) - self.distribution_analyzer = EUCJPDistributionAnalysis() - self.context_analyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - super(EUCJPProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return "EUC-JP" - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char, char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/venv/lib/python3.6/site-packages/chardet/euckrfreq.py b/venv/lib/python3.6/site-packages/chardet/euckrfreq.py deleted file mode 100644 index b68078c..0000000 --- a/venv/lib/python3.6/site-packages/chardet/euckrfreq.py +++ /dev/null @@ -1,195 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKR_CHAR_TO_FREQ_ORDER = ( - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -) - diff --git a/venv/lib/python3.6/site-packages/chardet/euckrprober.py b/venv/lib/python3.6/site-packages/chardet/euckrprober.py deleted file mode 100644 index 345a060..0000000 --- a/venv/lib/python3.6/site-packages/chardet/euckrprober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKR_SM_MODEL - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - super(EUCKRProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-KR" - - @property - def language(self): - return "Korean" diff --git a/venv/lib/python3.6/site-packages/chardet/euctwfreq.py b/venv/lib/python3.6/site-packages/chardet/euctwfreq.py deleted file mode 100644 index ed7a995..0000000 --- a/venv/lib/python3.6/site-packages/chardet/euctwfreq.py +++ /dev/null @@ -1,387 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 5376 - -EUCTW_CHAR_TO_FREQ_ORDER = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -) - diff --git a/venv/lib/python3.6/site-packages/chardet/euctwprober.py b/venv/lib/python3.6/site-packages/chardet/euctwprober.py deleted file mode 100644 index 35669cc..0000000 --- a/venv/lib/python3.6/site-packages/chardet/euctwprober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTW_SM_MODEL - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - super(EUCTWProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) - self.distribution_analyzer = EUCTWDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-TW" - - @property - def language(self): - return "Taiwan" diff --git a/venv/lib/python3.6/site-packages/chardet/gb2312freq.py b/venv/lib/python3.6/site-packages/chardet/gb2312freq.py deleted file mode 100644 index 697837b..0000000 --- a/venv/lib/python3.6/site-packages/chardet/gb2312freq.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312_CHAR_TO_FREQ_ORDER = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 -) - diff --git a/venv/lib/python3.6/site-packages/chardet/gb2312prober.py b/venv/lib/python3.6/site-packages/chardet/gb2312prober.py deleted file mode 100644 index 8446d2d..0000000 --- a/venv/lib/python3.6/site-packages/chardet/gb2312prober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312_SM_MODEL - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - super(GB2312Prober, self).__init__() - self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) - self.distribution_analyzer = GB2312DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "GB2312" - - @property - def language(self): - return "Chinese" diff --git a/venv/lib/python3.6/site-packages/chardet/hebrewprober.py b/venv/lib/python3.6/site-packages/chardet/hebrewprober.py deleted file mode 100644 index b0e1bf4..0000000 --- a/venv/lib/python3.6/site-packages/chardet/hebrewprober.py +++ /dev/null @@ -1,292 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -class HebrewProber(CharSetProber): - # windows-1255 / ISO-8859-8 code points of interest - FINAL_KAF = 0xea - NORMAL_KAF = 0xeb - FINAL_MEM = 0xed - NORMAL_MEM = 0xee - FINAL_NUN = 0xef - NORMAL_NUN = 0xf0 - FINAL_PE = 0xf3 - NORMAL_PE = 0xf4 - FINAL_TSADI = 0xf5 - NORMAL_TSADI = 0xf6 - - # Minimum Visual vs Logical final letter score difference. - # If the difference is below this, don't rely solely on the final letter score - # distance. - MIN_FINAL_CHAR_DISTANCE = 5 - - # Minimum Visual vs Logical model score difference. - # If the difference is below this, don't rely at all on the model score - # distance. - MIN_MODEL_DISTANCE = 0.01 - - VISUAL_HEBREW_NAME = "ISO-8859-8" - LOGICAL_HEBREW_NAME = "windows-1255" - - def __init__(self): - super(HebrewProber, self).__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None - self.reset() - - def reset(self): - self._final_char_logical_score = 0 - self._final_char_visual_score = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._prev = ' ' - self._before_prev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._logical_prober = logicalProber - self._visual_prober = visualProber - - def is_final(self, c): - return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, - self.FINAL_PE, self.FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return c in [self.NORMAL_KAF, self.NORMAL_MEM, - self.NORMAL_NUN, self.NORMAL_PE] - - def feed(self, byte_str): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.state == ProbingState.NOT_ME: - # Both model probers say it's not them. No reason to continue. - return ProbingState.NOT_ME - - byte_str = self.filter_high_byte_only(byte_str) - - for cur in byte_str: - if cur == ' ': - # We stand on a space - a word just ended - if self._before_prev != ' ': - # next-to-last char was not a space so self._prev is not a - # 1 letter word - if self.is_final(self._prev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._final_char_logical_score += 1 - elif self.is_non_final(self._prev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and - (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 - self._before_prev = self._prev - self._prev = cur - - # Forever detecting, till the end or until both model probers return - # ProbingState.NOT_ME (handled above) - return ProbingState.DETECTING - - @property - def charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._final_char_logical_score - self._final_char_visual_score - if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._logical_prober.get_confidence() - - self._visual_prober.get_confidence()) - if modelsub > self.MIN_MODEL_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if modelsub < -self.MIN_MODEL_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME - - @property - def language(self): - return 'Hebrew' - - @property - def state(self): - # Remain active as long as any of the model probers are active. - if (self._logical_prober.state == ProbingState.NOT_ME) and \ - (self._visual_prober.state == ProbingState.NOT_ME): - return ProbingState.NOT_ME - return ProbingState.DETECTING diff --git a/venv/lib/python3.6/site-packages/chardet/jisfreq.py b/venv/lib/python3.6/site-packages/chardet/jisfreq.py deleted file mode 100644 index 83fc082..0000000 --- a/venv/lib/python3.6/site-packages/chardet/jisfreq.py +++ /dev/null @@ -1,325 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JIS_CHAR_TO_FREQ_ORDER = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -) - - diff --git a/venv/lib/python3.6/site-packages/chardet/jpcntx.py b/venv/lib/python3.6/site-packages/chardet/jpcntx.py deleted file mode 100644 index 20044e4..0000000 --- a/venv/lib/python3.6/site-packages/chardet/jpcntx.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis(object): - NUM_OF_CATEGORY = 6 - DONT_KNOW = -1 - ENOUGH_REL_THRESHOLD = 100 - MAX_REL_THRESHOLD = 1000 - MINIMUM_DATA_THRESHOLD = 4 - - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None - self.reset() - - def reset(self): - self._total_rel = 0 # total sequence received - # category counters, each integer counts sequence in its category - self._rel_sample = [0] * self.NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._need_to_skip_char_num = 0 - self._last_char_order = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - - def feed(self, byte_str, num_bytes): - if self._done: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._need_to_skip_char_num - while i < num_bytes: - order, char_len = self.get_order(byte_str[i:i + 2]) - i += char_len - if i > num_bytes: - self._need_to_skip_char_num = i - num_bytes - self._last_char_order = -1 - else: - if (order != -1) and (self._last_char_order != -1): - self._total_rel += 1 - if self._total_rel > self.MAX_REL_THRESHOLD: - self._done = True - break - self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 - self._last_char_order = order - - def got_enough_data(self): - return self._total_rel > self.ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._total_rel > self.MINIMUM_DATA_THRESHOLD: - return (self._total_rel - self._rel_sample[0]) / self._total_rel - else: - return self.DONT_KNOW - - def get_order(self, byte_str): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): - super(SJISContextAnalysis, self).__init__() - self._charset_name = "SHIFT_JIS" - - @property - def charset_name(self): - return self._charset_name - - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): - char_len = 2 - if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): - self._charset_name = "CP932" - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, char_len - - return -1, char_len - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - char_len = 2 - elif first_char == 0x8F: - char_len = 3 - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, char_len - - return -1, char_len - - diff --git a/venv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py b/venv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py deleted file mode 100644 index 2aa4fb2..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,228 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'char_to_order_map': Latin5_BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Bulgairan', -} - -Win1251BulgarianModel = { - 'char_to_order_map': win1251BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Bulgarian', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py b/venv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py deleted file mode 100644 index e5f9a1f..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langcyrillicmodel.py +++ /dev/null @@ -1,333 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'char_to_order_map': KOI8R_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "KOI8-R", - 'language': 'Russian', -} - -Win1251CyrillicModel = { - 'char_to_order_map': win1251_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Russian', -} - -Latin5CyrillicModel = { - 'char_to_order_map': latin5_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Russian', -} - -MacCyrillicModel = { - 'char_to_order_map': macCyrillic_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "MacCyrillic", - 'language': 'Russian', -} - -Ibm866Model = { - 'char_to_order_map': IBM866_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM866", - 'language': 'Russian', -} - -Ibm855Model = { - 'char_to_order_map': IBM855_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM855", - 'language': 'Russian', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langgreekmodel.py b/venv/lib/python3.6/site-packages/chardet/langgreekmodel.py deleted file mode 100644 index 5332221..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'char_to_order_map': Latin7_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-7", - 'language': 'Greek', -} - -Win1253GreekModel = { - 'char_to_order_map': win1253_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "windows-1253", - 'language': 'Greek', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langhebrewmodel.py b/venv/lib/python3.6/site-packages/chardet/langhebrewmodel.py deleted file mode 100644 index 58f4c87..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langhebrewmodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -WIN1255_CHAR_TO_ORDER_MAP = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HEBREW_LANG_MODEL = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, - 'precedence_matrix': HEBREW_LANG_MODEL, - 'typical_positive_ratio': 0.984004, - 'keep_english_letter': False, - 'charset_name': "windows-1255", - 'language': 'Hebrew', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langhungarianmodel.py b/venv/lib/python3.6/site-packages/chardet/langhungarianmodel.py deleted file mode 100644 index bb7c095..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'char_to_order_map': Latin2_HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-2", - 'language': 'Hungarian', -} - -Win1250HungarianModel = { - 'char_to_order_map': win1250HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "windows-1250", - 'language': 'Hungarian', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langthaimodel.py b/venv/lib/python3.6/site-packages/chardet/langthaimodel.py deleted file mode 100644 index 15f94c2..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langthaimodel.py +++ /dev/null @@ -1,199 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'char_to_order_map': TIS620CharToOrderMap, - 'precedence_matrix': ThaiLangModel, - 'typical_positive_ratio': 0.926386, - 'keep_english_letter': False, - 'charset_name': "TIS-620", - 'language': 'Thai', -} diff --git a/venv/lib/python3.6/site-packages/chardet/langturkishmodel.py b/venv/lib/python3.6/site-packages/chardet/langturkishmodel.py deleted file mode 100644 index a427a45..0000000 --- a/venv/lib/python3.6/site-packages/chardet/langturkishmodel.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Özgür Baskın - Turkish Language Model -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin5_TurkishCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, - 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, -255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, - 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, -180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, -164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, -150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, - 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, -124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, - 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, - 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, - 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, -) - -TurkishLangModel = ( -3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, -3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, -3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, -3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, -3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, -3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, -2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, -3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, -1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, -3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, -3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, -2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, -2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, -3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, -0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, -3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, -3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, -0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, -1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, -3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, -1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, -3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, -0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, -3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, -1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, -1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, -2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, -2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, -3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, -1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, -0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, -3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, -0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, -3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, -1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, -2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, -0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, -3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, -0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, -0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, -3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, -0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, -0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, -3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, -0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, -3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, -0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, -0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, -3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, -0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, -3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, -0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, -0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, -0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, -0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, -0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, -0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, -1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, -0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, -0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, -3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, -0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, -2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, -2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, -0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, -0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin5TurkishModel = { - 'char_to_order_map': Latin5_TurkishCharToOrderMap, - 'precedence_matrix': TurkishLangModel, - 'typical_positive_ratio': 0.970290, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-9", - 'language': 'Turkish', -} diff --git a/venv/lib/python3.6/site-packages/chardet/latin1prober.py b/venv/lib/python3.6/site-packages/chardet/latin1prober.py deleted file mode 100644 index 7d1e8c2..0000000 --- a/venv/lib/python3.6/site-packages/chardet/latin1prober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( -# UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - super(Latin1Prober, self).__init__() - self._last_char_class = None - self._freq_counter = None - self.reset() - - def reset(self): - self._last_char_class = OTH - self._freq_counter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - @property - def charset_name(self): - return "ISO-8859-1" - - @property - def language(self): - return "" - - def feed(self, byte_str): - byte_str = self.filter_with_english_letters(byte_str) - for c in byte_str: - char_class = Latin1_CharToClass[c] - freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) - + char_class] - if freq == 0: - self._state = ProbingState.NOT_ME - break - self._freq_counter[freq] += 1 - self._last_char_class = char_class - - return self.state - - def get_confidence(self): - if self.state == ProbingState.NOT_ME: - return 0.01 - - total = sum(self._freq_counter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) - / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence diff --git a/venv/lib/python3.6/site-packages/chardet/mbcharsetprober.py b/venv/lib/python3.6/site-packages/chardet/mbcharsetprober.py deleted file mode 100644 index 6256ecf..0000000 --- a/venv/lib/python3.6/site-packages/chardet/mbcharsetprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState - - -class MultiByteCharSetProber(CharSetProber): - """ - MultiByteCharSetProber - """ - - def __init__(self, lang_filter=None): - super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] - - def reset(self): - super(MultiByteCharSetProber, self).reset() - if self.coding_sm: - self.coding_sm.reset() - if self.distribution_analyzer: - self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError - - @property - def language(self): - raise NotImplementedError - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - return self.distribution_analyzer.get_confidence() diff --git a/venv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py b/venv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py deleted file mode 100644 index 530abe7..0000000 --- a/venv/lib/python3.6/site-packages/chardet/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): - super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) - self.probers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/venv/lib/python3.6/site-packages/chardet/mbcssm.py b/venv/lib/python3.6/site-packages/chardet/mbcssm.py deleted file mode 100644 index 8360d0f..0000000 --- a/venv/lib/python3.6/site-packages/chardet/mbcssm.py +++ /dev/null @@ -1,572 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -# BIG5 - -BIG5_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 -) - -BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) - -BIG5_SM_MODEL = {'class_table': BIG5_CLS, - 'class_factor': 5, - 'state_table': BIG5_ST, - 'char_len_table': BIG5_CHAR_LEN_TABLE, - 'name': 'Big5'} - -# CP949 - -CP949_CLS = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_ST = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 -) - -CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949_SM_MODEL = {'class_table': CP949_CLS, - 'class_factor': 10, - 'state_table': CP949_ST, - 'char_len_table': CP949_CHAR_LEN_TABLE, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_CLS = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_ST = ( - 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f - 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 -) - -EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) - -EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, - 'class_factor': 6, - 'state_table': EUCJP_ST, - 'char_len_table': EUCJP_CHAR_LEN_TABLE, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_ST = ( - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f -) - -EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) - -EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, - 'class_factor': 4, - 'state_table': EUCKR_ST, - 'char_len_table': EUCKR_CHAR_LEN_TABLE, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_CLS = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_ST = ( - MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 - MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 - MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) - -EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, - 'class_factor': 7, - 'state_table': EUCTW_ST, - 'char_len_table': EUCTW_CHAR_LEN_TABLE, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 - 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validating -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) - -GB2312_SM_MODEL = {'class_table': GB2312_CLS, - 'class_factor': 7, - 'state_table': GB2312_ST, - 'char_len_table': GB2312_CHAR_LEN_TABLE, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,2,2,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,0,0,0) # f8 - ff - - -SJIS_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 -) - -SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) - -SJIS_SM_MODEL = {'class_table': SJIS_CLS, - 'class_factor': 6, - 'state_table': SJIS_ST, - 'char_len_table': SJIS_CHAR_LEN_TABLE, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_ST = ( - 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 - 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 - 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f - 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) - -UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, - 'class_factor': 6, - 'state_table': UCS2BE_ST, - 'char_len_table': UCS2BE_CHAR_LEN_TABLE, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_ST = ( - 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 - 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) - -UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, - 'class_factor': 6, - 'state_table': UCS2LE_ST, - 'char_len_table': UCS2LE_CHAR_LEN_TABLE, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_ST = ( - MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f - MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f - MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f - MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f - MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af - MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf -) - -UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8_SM_MODEL = {'class_table': UTF8_CLS, - 'class_factor': 16, - 'state_table': UTF8_ST, - 'char_len_table': UTF8_CHAR_LEN_TABLE, - 'name': 'UTF-8'} diff --git a/venv/lib/python3.6/site-packages/chardet/sbcharsetprober.py b/venv/lib/python3.6/site-packages/chardet/sbcharsetprober.py deleted file mode 100644 index 0adb51d..0000000 --- a/venv/lib/python3.6/site-packages/chardet/sbcharsetprober.py +++ /dev/null @@ -1,132 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import CharacterCategory, ProbingState, SequenceLikelihood - - -class SingleByteCharSetProber(CharSetProber): - SAMPLE_SIZE = 64 - SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 - POSITIVE_SHORTCUT_THRESHOLD = 0.95 - NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - - def __init__(self, model, reversed=False, name_prober=None): - super(SingleByteCharSetProber, self).__init__() - self._model = model - # TRUE if we need to reverse every pair in the model lookup - self._reversed = reversed - # Optional auxiliary prober for name decision - self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._freq_char = None - self.reset() - - def reset(self): - super(SingleByteCharSetProber, self).reset() - # char order of last character - self._last_order = 255 - self._seq_counters = [0] * SequenceLikelihood.get_num_categories() - self._total_seqs = 0 - self._total_char = 0 - # characters that fall in our sampling range - self._freq_char = 0 - - @property - def charset_name(self): - if self._name_prober: - return self._name_prober.charset_name - else: - return self._model['charset_name'] - - @property - def language(self): - if self._name_prober: - return self._name_prober.language - else: - return self._model.get('language') - - def feed(self, byte_str): - if not self._model['keep_english_letter']: - byte_str = self.filter_international_words(byte_str) - if not byte_str: - return self.state - char_to_order_map = self._model['char_to_order_map'] - for i, c in enumerate(byte_str): - # XXX: Order is in range 1-64, so one would think we want 0-63 here, - # but that leads to 27 more test failures than before. - order = char_to_order_map[c] - # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but - # CharacterCategory.SYMBOL is actually 253, so we use CONTROL - # to make it closer to the original intent. The only difference - # is whether or not we count digits and control characters for - # _total_char purposes. - if order < CharacterCategory.CONTROL: - self._total_char += 1 - if order < self.SAMPLE_SIZE: - self._freq_char += 1 - if self._last_order < self.SAMPLE_SIZE: - self._total_seqs += 1 - if not self._reversed: - i = (self._last_order * self.SAMPLE_SIZE) + order - model = self._model['precedence_matrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * self.SAMPLE_SIZE) + self._last_order - model = self._model['precedence_matrix'][i] - self._seq_counters[model] += 1 - self._last_order = order - - charset_name = self._model['charset_name'] - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME - - return self.state - - def get_confidence(self): - r = 0.01 - if self._total_seqs > 0: - r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model['typical_positive_ratio']) - r = r * self._freq_char / self._total_char - if r >= 1.0: - r = 0.99 - return r diff --git a/venv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py b/venv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py deleted file mode 100644 index 98e95dc..0000000 --- a/venv/lib/python3.6/site-packages/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,73 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber -from .langturkishmodel import Latin5TurkishModel - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - self.probers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(Latin2HungarianModel), - # SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - SingleByteCharSetProber(Latin5TurkishModel), - ] - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) - visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) - self.probers.extend([hebrew_prober, logical_hebrew_prober, - visual_hebrew_prober]) - - self.reset() diff --git a/venv/lib/python3.6/site-packages/chardet/sjisprober.py b/venv/lib/python3.6/site-packages/chardet/sjisprober.py deleted file mode 100644 index 9e29623..0000000 --- a/venv/lib/python3.6/site-packages/chardet/sjisprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJIS_SM_MODEL -from .enums import ProbingState, MachineState - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - super(SJISProber, self).__init__() - self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) - self.distribution_analyzer = SJISDistributionAnalysis() - self.context_analyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - super(SJISProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return self.context_analyzer.charset_name - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char[2 - char_len:], - char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - - char_len], char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/venv/lib/python3.6/site-packages/chardet/universaldetector.py b/venv/lib/python3.6/site-packages/chardet/universaldetector.py deleted file mode 100644 index 7b4e92d..0000000 --- a/venv/lib/python3.6/site-packages/chardet/universaldetector.py +++ /dev/null @@ -1,286 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### -""" -Module containing the UniversalDetector detector class, which is the primary -class a user of ``chardet`` should use. - -:author: Mark Pilgrim (initial port to Python) -:author: Shy Shalom (original C code) -:author: Dan Blanchard (major refactoring for 3.0) -:author: Ian Cordasco -""" - - -import codecs -import logging -import re - -from .charsetgroupprober import CharSetGroupProber -from .enums import InputState, LanguageFilter, ProbingState -from .escprober import EscCharSetProber -from .latin1prober import Latin1Prober -from .mbcsgroupprober import MBCSGroupProber -from .sbcsgroupprober import SBCSGroupProber - - -class UniversalDetector(object): - """ - The ``UniversalDetector`` class underlies the ``chardet.detect`` function - and coordinates all of the different charset probers. - - To get a ``dict`` containing an encoding and its confidence, you can simply - run: - - .. code:: - - u = UniversalDetector() - u.feed(some_bytes) - u.close() - detected = u.result - - """ - - MINIMUM_THRESHOLD = 0.20 - HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') - ESC_DETECTOR = re.compile(b'(\033|~{)') - WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') - ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', - 'iso-8859-2': 'Windows-1250', - 'iso-8859-5': 'Windows-1251', - 'iso-8859-6': 'Windows-1256', - 'iso-8859-7': 'Windows-1253', - 'iso-8859-8': 'Windows-1255', - 'iso-8859-9': 'Windows-1254', - 'iso-8859-13': 'Windows-1257'} - - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - self._has_win_bytes = None - self.reset() - - def reset(self): - """ - Reset the UniversalDetector and all of its probers back to their - initial states. This is called by ``__init__``, so you only need to - call this directly in between analyses of different documents. - """ - self.result = {'encoding': None, 'confidence': 0.0, 'language': None} - self.done = False - self._got_data = False - self._has_win_bytes = False - self._input_state = InputState.PURE_ASCII - self._last_char = b'' - if self._esc_charset_prober: - self._esc_charset_prober.reset() - for prober in self._charset_probers: - prober.reset() - - def feed(self, byte_str): - """ - Takes a chunk of a document and feeds it through all of the relevant - charset probers. - - After calling ``feed``, you can check the value of the ``done`` - attribute to see if you need to continue feeding the - ``UniversalDetector`` more data, or if it has made a prediction - (in the ``result`` attribute). - - .. note:: - You should always call ``close`` when you're done feeding in your - document if ``done`` is not already ``True``. - """ - if self.done: - return - - if not len(byte_str): - return - - if not isinstance(byte_str, bytearray): - byte_str = bytearray(byte_str) - - # First check for known BOMs, since these are guaranteed to be correct - if not self._got_data: - # If the data starts with BOM, we know it is UTF - if byte_str.startswith(codecs.BOM_UTF8): - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8-SIG", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_UTF32_LE, - codecs.BOM_UTF32_BE)): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\xFE\xFF\x00\x00'): - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = {'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\x00\x00\xFF\xFE'): - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = {'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", - 'confidence': 1.0, - 'language': ''} - - self._got_data = True - if self.result['encoding'] is not None: - self.done = True - return - - # If none of those matched and we've only see ASCII so far, check - # for high bytes and escape sequences - if self._input_state == InputState.PURE_ASCII: - if self.HIGH_BYTE_DETECTOR.search(byte_str): - self._input_state = InputState.HIGH_BYTE - elif self._input_state == InputState.PURE_ASCII and \ - self.ESC_DETECTOR.search(self._last_char + byte_str): - self._input_state = InputState.ESC_ASCII - - self._last_char = byte_str[-1:] - - # If we've seen escape sequences, use the EscCharSetProber, which - # uses a simple state machine to check for known escape sequences in - # HZ and ISO-2022 encodings, since those are the only encodings that - # use such sequences. - if self._input_state == InputState.ESC_ASCII: - if not self._esc_charset_prober: - self._esc_charset_prober = EscCharSetProber(self.lang_filter) - if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': - self._esc_charset_prober.charset_name, - 'confidence': - self._esc_charset_prober.get_confidence(), - 'language': - self._esc_charset_prober.language} - self.done = True - # If we've seen high bytes (i.e., those with values greater than 127), - # we need to do more complicated checks using all our multi-byte and - # single-byte probers that are left. The single-byte probers - # use character bigram distributions to determine the encoding, whereas - # the multi-byte probers use a combination of character unigram and - # bigram distributions. - elif self._input_state == InputState.HIGH_BYTE: - if not self._charset_probers: - self._charset_probers = [MBCSGroupProber(self.lang_filter)] - # If we're checking non-CJK encodings, use single-byte prober - if self.lang_filter & LanguageFilter.NON_CJK: - self._charset_probers.append(SBCSGroupProber()) - self._charset_probers.append(Latin1Prober()) - for prober in self._charset_probers: - if prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': prober.charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language} - self.done = True - break - if self.WIN_BYTE_DETECTOR.search(byte_str): - self._has_win_bytes = True - - def close(self): - """ - Stop analyzing the current document and come up with a final - prediction. - - :returns: The ``result`` attribute, a ``dict`` with the keys - `encoding`, `confidence`, and `language`. - """ - # Don't bother with checks if we're already done - if self.done: - return self.result - self.done = True - - if not self._got_data: - self.logger.debug('no data received!') - - # Default to ASCII if it is all we've seen so far - elif self._input_state == InputState.PURE_ASCII: - self.result = {'encoding': 'ascii', - 'confidence': 1.0, - 'language': ''} - - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD - elif self._input_state == InputState.HIGH_BYTE: - prober_confidence = None - max_prober_confidence = 0.0 - max_prober = None - for prober in self._charset_probers: - if not prober: - continue - prober_confidence = prober.get_confidence() - if prober_confidence > max_prober_confidence: - max_prober_confidence = prober_confidence - max_prober = prober - if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): - charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() - confidence = max_prober.get_confidence() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - self.result = {'encoding': charset_name, - 'confidence': confidence, - 'language': max_prober.language} - - # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() == logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for group_prober in self._charset_probers: - if not group_prober: - continue - if isinstance(group_prober, CharSetGroupProber): - for prober in group_prober.probers: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - else: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - return self.result diff --git a/venv/lib/python3.6/site-packages/chardet/utf8prober.py b/venv/lib/python3.6/site-packages/chardet/utf8prober.py deleted file mode 100644 index 6c3196c..0000000 --- a/venv/lib/python3.6/site-packages/chardet/utf8prober.py +++ /dev/null @@ -1,82 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8_SM_MODEL - - - -class UTF8Prober(CharSetProber): - ONE_CHAR_PROB = 0.5 - - def __init__(self): - super(UTF8Prober, self).__init__() - self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None - self.reset() - - def reset(self): - super(UTF8Prober, self).reset() - self.coding_sm.reset() - self._num_mb_chars = 0 - - @property - def charset_name(self): - return "utf-8" - - @property - def language(self): - return "" - - def feed(self, byte_str): - for c in byte_str: - coding_state = self.coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - if self.coding_sm.get_current_charlen() >= 2: - self._num_mb_chars += 1 - - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: - return unlike diff --git a/venv/lib/python3.6/site-packages/chardet/version.py b/venv/lib/python3.6/site-packages/chardet/version.py deleted file mode 100644 index bb2a34a..0000000 --- a/venv/lib/python3.6/site-packages/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "3.0.4" -VERSION = __version__.split('.') diff --git a/venv/lib/python3.6/site-packages/click/__init__.py b/venv/lib/python3.6/site-packages/click/__init__.py deleted file mode 100644 index d3c3366..0000000 --- a/venv/lib/python3.6/site-packages/click/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -""" -click -~~~~~ - -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. - -:copyright: © 2014 by the Pallets team. -:license: BSD, see LICENSE.rst for more details. -""" - -# Core classes -from .core import Context, BaseCommand, Command, MultiCommand, Group, \ - CommandCollection, Parameter, Option, Argument - -# Globals -from .globals import get_current_context - -# Decorators -from .decorators import pass_context, pass_obj, make_pass_decorator, \ - command, group, argument, option, confirmation_option, \ - password_option, version_option, help_option - -# Types -from .types import ParamType, File, Path, Choice, IntRange, Tuple, \ - DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange - -# Utilities -from .utils import echo, get_binary_stream, get_text_stream, open_file, \ - format_filename, get_app_dir, get_os_args - -# Terminal functions -from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \ - progressbar, clear, style, unstyle, secho, edit, launch, getchar, \ - pause - -# Exceptions -from .exceptions import ClickException, UsageError, BadParameter, \ - FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \ - MissingParameter - -# Formatting -from .formatting import HelpFormatter, wrap_text - -# Parsing -from .parser import OptionParser - - -__all__ = [ - # Core classes - 'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group', - 'CommandCollection', 'Parameter', 'Option', 'Argument', - - # Globals - 'get_current_context', - - # Decorators - 'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group', - 'argument', 'option', 'confirmation_option', 'password_option', - 'version_option', 'help_option', - - # Types - 'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple', - 'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED', - 'FloatRange', - - # Utilities - 'echo', 'get_binary_stream', 'get_text_stream', 'open_file', - 'format_filename', 'get_app_dir', 'get_os_args', - - # Terminal functions - 'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager', - 'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch', - 'getchar', 'pause', - - # Exceptions - 'ClickException', 'UsageError', 'BadParameter', 'FileError', - 'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage', - 'MissingParameter', - - # Formatting - 'HelpFormatter', 'wrap_text', - - # Parsing - 'OptionParser', -] - - -# Controls if click should emit the warning about the use of unicode -# literals. -disable_unicode_literals_warning = False - - -__version__ = '7.0' diff --git a/venv/lib/python3.6/site-packages/click/_bashcomplete.py b/venv/lib/python3.6/site-packages/click/_bashcomplete.py deleted file mode 100644 index a5f1084..0000000 --- a/venv/lib/python3.6/site-packages/click/_bashcomplete.py +++ /dev/null @@ -1,293 +0,0 @@ -import copy -import os -import re - -from .utils import echo -from .parser import split_arg_string -from .core import MultiCommand, Option, Argument -from .types import Choice - -try: - from collections import abc -except ImportError: - import collections as abc - -WORDBREAK = '=' - -# Note, only BASH version 4.4 and later have the nosort option. -COMPLETION_SCRIPT_BASH = ''' -%(complete_func)s() { - local IFS=$'\n' - COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\ - COMP_CWORD=$COMP_CWORD \\ - %(autocomplete_var)s=complete $1 ) ) - return 0 -} - -%(complete_func)setup() { - local COMPLETION_OPTIONS="" - local BASH_VERSION_ARR=(${BASH_VERSION//./ }) - # Only BASH version 4.4 and later have the nosort option. - if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then - COMPLETION_OPTIONS="-o nosort" - fi - - complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s -} - -%(complete_func)setup -''' - -COMPLETION_SCRIPT_ZSH = ''' -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\ - COMP_CWORD=$((CURRENT-1)) \\ - %(autocomplete_var)s=\"complete_zsh\" \\ - %(script_names)s )}") - - for key descr in ${(kv)response}; do - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U -Q - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -Q -a completions - fi - compstate[insert]="automenu" -} - -compdef %(complete_func)s %(script_names)s -''' - -_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]') - - -def get_completion_script(prog_name, complete_var, shell): - cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_')) - script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH - return (script % { - 'complete_func': '_%s_completion' % cf_name, - 'script_names': prog_name, - 'autocomplete_var': complete_var, - }).strip() + ';' - - -def resolve_ctx(cli, prog_name, args): - """ - Parse into a hierarchy of contexts. Contexts are connected through the parent variable. - :param cli: command definition - :param prog_name: the program that is running - :param args: full list of args - :return: the final context/command parsed - """ - ctx = cli.make_context(prog_name, args, resilient_parsing=True) - args = ctx.protected_args + ctx.args - while args: - if isinstance(ctx.command, MultiCommand): - if not ctx.command.chain: - cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) - if cmd is None: - return ctx - ctx = cmd.make_context(cmd_name, args, parent=ctx, - resilient_parsing=True) - args = ctx.protected_args + ctx.args - else: - # Walk chained subcommand contexts saving the last one. - while args: - cmd_name, cmd, args = ctx.command.resolve_command(ctx, args) - if cmd is None: - return ctx - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True) - args = sub_ctx.args - ctx = sub_ctx - args = sub_ctx.protected_args + sub_ctx.args - else: - break - return ctx - - -def start_of_option(param_str): - """ - :param param_str: param_str to check - :return: whether or not this is the start of an option declaration (i.e. starts "-" or "--") - """ - return param_str and param_str[:1] == '-' - - -def is_incomplete_option(all_args, cmd_param): - """ - :param all_args: the full original list of args supplied - :param cmd_param: the current command paramter - :return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and - corresponds to this cmd_param. In other words whether this cmd_param option can still accept - values - """ - if not isinstance(cmd_param, Option): - return False - if cmd_param.is_flag: - return False - last_option = None - for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])): - if index + 1 > cmd_param.nargs: - break - if start_of_option(arg_str): - last_option = arg_str - - return True if last_option and last_option in cmd_param.opts else False - - -def is_incomplete_argument(current_params, cmd_param): - """ - :param current_params: the current params and values for this argument as already entered - :param cmd_param: the current command parameter - :return: whether or not the last argument is incomplete and corresponds to this cmd_param. In - other words whether or not the this cmd_param argument can still accept values - """ - if not isinstance(cmd_param, Argument): - return False - current_param_values = current_params[cmd_param.name] - if current_param_values is None: - return True - if cmd_param.nargs == -1: - return True - if isinstance(current_param_values, abc.Iterable) \ - and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs: - return True - return False - - -def get_user_autocompletions(ctx, args, incomplete, cmd_param): - """ - :param ctx: context associated with the parsed command - :param args: full list of args - :param incomplete: the incomplete text to autocomplete - :param cmd_param: command definition - :return: all the possible user-specified completions for the param - """ - results = [] - if isinstance(cmd_param.type, Choice): - # Choices don't support descriptions. - results = [(c, None) - for c in cmd_param.type.choices if str(c).startswith(incomplete)] - elif cmd_param.autocompletion is not None: - dynamic_completions = cmd_param.autocompletion(ctx=ctx, - args=args, - incomplete=incomplete) - results = [c if isinstance(c, tuple) else (c, None) - for c in dynamic_completions] - return results - - -def get_visible_commands_starting_with(ctx, starts_with): - """ - :param ctx: context associated with the parsed command - :starts_with: string that visible commands must start with. - :return: all visible (not hidden) commands that start with starts_with. - """ - for c in ctx.command.list_commands(ctx): - if c.startswith(starts_with): - command = ctx.command.get_command(ctx, c) - if not command.hidden: - yield command - - -def add_subcommand_completions(ctx, incomplete, completions_out): - # Add subcommand completions. - if isinstance(ctx.command, MultiCommand): - completions_out.extend( - [(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)]) - - # Walk up the context list and add any other completion possibilities from chained commands - while ctx.parent is not None: - ctx = ctx.parent - if isinstance(ctx.command, MultiCommand) and ctx.command.chain: - remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete) - if c.name not in ctx.protected_args] - completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands]) - - -def get_choices(cli, prog_name, args, incomplete): - """ - :param cli: command definition - :param prog_name: the program that is running - :param args: full list of args - :param incomplete: the incomplete text to autocomplete - :return: all the possible completions for the incomplete - """ - all_args = copy.deepcopy(args) - - ctx = resolve_ctx(cli, prog_name, args) - if ctx is None: - return [] - - # In newer versions of bash long opts with '='s are partitioned, but it's easier to parse - # without the '=' - if start_of_option(incomplete) and WORDBREAK in incomplete: - partition_incomplete = incomplete.partition(WORDBREAK) - all_args.append(partition_incomplete[0]) - incomplete = partition_incomplete[2] - elif incomplete == WORDBREAK: - incomplete = '' - - completions = [] - if start_of_option(incomplete): - # completions for partial options - for param in ctx.command.params: - if isinstance(param, Option) and not param.hidden: - param_opts = [param_opt for param_opt in param.opts + - param.secondary_opts if param_opt not in all_args or param.multiple] - completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)]) - return completions - # completion for option values from user supplied values - for param in ctx.command.params: - if is_incomplete_option(all_args, param): - return get_user_autocompletions(ctx, all_args, incomplete, param) - # completion for argument values from user supplied values - for param in ctx.command.params: - if is_incomplete_argument(ctx.params, param): - return get_user_autocompletions(ctx, all_args, incomplete, param) - - add_subcommand_completions(ctx, incomplete, completions) - # Sort before returning so that proper ordering can be enforced in custom types. - return sorted(completions) - - -def do_complete(cli, prog_name, include_descriptions): - cwords = split_arg_string(os.environ['COMP_WORDS']) - cword = int(os.environ['COMP_CWORD']) - args = cwords[1:cword] - try: - incomplete = cwords[cword] - except IndexError: - incomplete = '' - - for item in get_choices(cli, prog_name, args, incomplete): - echo(item[0]) - if include_descriptions: - # ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present. - echo(item[1] if item[1] else '_') - - return True - - -def bashcomplete(cli, prog_name, complete_var, complete_instr): - if complete_instr.startswith('source'): - shell = 'zsh' if complete_instr == 'source_zsh' else 'bash' - echo(get_completion_script(prog_name, complete_var, shell)) - return True - elif complete_instr == 'complete' or complete_instr == 'complete_zsh': - return do_complete(cli, prog_name, complete_instr == 'complete_zsh') - return False diff --git a/venv/lib/python3.6/site-packages/click/_compat.py b/venv/lib/python3.6/site-packages/click/_compat.py deleted file mode 100644 index 937e230..0000000 --- a/venv/lib/python3.6/site-packages/click/_compat.py +++ /dev/null @@ -1,703 +0,0 @@ -import re -import io -import os -import sys -import codecs -from weakref import WeakKeyDictionary - - -PY2 = sys.version_info[0] == 2 -CYGWIN = sys.platform.startswith('cygwin') -# Determine local App Engine environment, per Google's own suggestion -APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and - 'Development/' in os.environ['SERVER_SOFTWARE']) -WIN = sys.platform.startswith('win') and not APP_ENGINE -DEFAULT_COLUMNS = 80 - - -_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])') - - -def get_filesystem_encoding(): - return sys.getfilesystemencoding() or sys.getdefaultencoding() - - -def _make_text_stream(stream, encoding, errors, - force_readable=False, force_writable=False): - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = 'replace' - return _NonClosingTextIOWrapper(stream, encoding, errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable) - - -def is_ascii_encoding(encoding): - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == 'ascii' - except LookupError: - return False - - -def get_best_encoding(stream): - """Returns the default stream encoding if not found.""" - rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return 'utf-8' - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - - def __init__(self, stream, encoding, errors, - force_readable=False, force_writable=False, **extra): - self._stream = stream = _FixupStream(stream, force_readable, - force_writable) - io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra) - - # The io module is a place where the Python 3 text behavior - # was forced upon Python 2, so we need to unbreak - # it to look like Python 2. - if PY2: - def write(self, x): - if isinstance(x, str) or is_bytes(x): - try: - self.flush() - except Exception: - pass - return self.buffer.write(str(x)) - return io.TextIOWrapper.write(self, x) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def __del__(self): - try: - self.detach() - except Exception: - pass - - def isatty(self): - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream(object): - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__(self, stream, force_readable=False, force_writable=False): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name): - return getattr(self._stream, name) - - def read1(self, size): - f = getattr(self._stream, 'read1', None) - if f is not None: - return f(size) - # We only dispatch to readline instead of read in Python 2 as we - # do not want cause problems with the different implementation - # of line buffering. - if PY2: - return self._stream.readline(size) - return self._stream.read(size) - - def readable(self): - if self._force_readable: - return True - x = getattr(self._stream, 'readable', None) - if x is not None: - return x() - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self): - if self._force_writable: - return True - x = getattr(self._stream, 'writable', None) - if x is not None: - return x() - try: - self._stream.write('') - except Exception: - try: - self._stream.write(b'') - except Exception: - return False - return True - - def seekable(self): - x = getattr(self._stream, 'seekable', None) - if x is not None: - return x() - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -if PY2: - text_type = unicode - bytes = str - raw_input = raw_input - string_types = (str, unicode) - int_types = (int, long) - iteritems = lambda x: x.iteritems() - range_type = xrange - - def is_bytes(x): - return isinstance(x, (buffer, bytearray)) - - _identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$') - - # For Windows, we need to force stdout/stdin/stderr to binary if it's - # fetched for that. This obviously is not the most correct way to do - # it as it changes global state. Unfortunately, there does not seem to - # be a clear better way to do it as just reopening the file in binary - # mode does not change anything. - # - # An option would be to do what Python 3 does and to open the file as - # binary only, patch it back to the system, and then use a wrapper - # stream that converts newlines. It's not quite clear what's the - # correct option here. - # - # This code also lives in _winconsole for the fallback to the console - # emulation stream. - # - # There are also Windows environments where the `msvcrt` module is not - # available (which is why we use try-catch instead of the WIN variable - # here), such as the Google App Engine development server on Windows. In - # those cases there is just nothing we can do. - def set_binary_mode(f): - return f - - try: - import msvcrt - except ImportError: - pass - else: - def set_binary_mode(f): - try: - fileno = f.fileno() - except Exception: - pass - else: - msvcrt.setmode(fileno, os.O_BINARY) - return f - - try: - import fcntl - except ImportError: - pass - else: - def set_binary_mode(f): - try: - fileno = f.fileno() - except Exception: - pass - else: - flags = fcntl.fcntl(fileno, fcntl.F_GETFL) - fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK) - return f - - def isidentifier(x): - return _identifier_re.search(x) is not None - - def get_binary_stdin(): - return set_binary_mode(sys.stdin) - - def get_binary_stdout(): - _wrap_std_stream('stdout') - return set_binary_mode(sys.stdout) - - def get_binary_stderr(): - _wrap_std_stream('stderr') - return set_binary_mode(sys.stderr) - - def get_text_stdin(encoding=None, errors=None): - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _make_text_stream(sys.stdin, encoding, errors, - force_readable=True) - - def get_text_stdout(encoding=None, errors=None): - _wrap_std_stream('stdout') - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _make_text_stream(sys.stdout, encoding, errors, - force_writable=True) - - def get_text_stderr(encoding=None, errors=None): - _wrap_std_stream('stderr') - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _make_text_stream(sys.stderr, encoding, errors, - force_writable=True) - - def filename_to_ui(value): - if isinstance(value, bytes): - value = value.decode(get_filesystem_encoding(), 'replace') - return value -else: - import io - text_type = str - raw_input = input - string_types = (str,) - int_types = (int,) - range_type = range - isidentifier = lambda x: x.isidentifier() - iteritems = lambda x: iter(x.items()) - - def is_bytes(x): - return isinstance(x, (bytes, memoryview, bytearray)) - - def _is_binary_reader(stream, default=False): - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - def _is_binary_writer(stream, default=False): - try: - stream.write(b'') - except Exception: - try: - stream.write('') - return False - except Exception: - pass - return default - return True - - def _find_binary_reader(stream): - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return stream - - buf = getattr(stream, 'buffer', None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return buf - - def _find_binary_writer(stream): - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detatching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return stream - - buf = getattr(stream, 'buffer', None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return buf - - def _stream_is_misconfigured(stream): - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii') - - def _is_compatible_text_stream(stream, encoding, errors): - stream_encoding = getattr(stream, 'encoding', None) - stream_errors = getattr(stream, 'errors', None) - - # Perfect match. - if stream_encoding == encoding and stream_errors == errors: - return True - - # Otherwise, it's only a compatible stream if we did not ask for - # an encoding. - if encoding is None: - return stream_encoding is not None - - return False - - def _force_correct_text_reader(text_reader, encoding, errors, - force_readable=False): - if _is_binary_reader(text_reader, False): - binary_reader = text_reader - else: - # If there is no target encoding set, we need to verify that the - # reader is not actually misconfigured. - if encoding is None and not _stream_is_misconfigured(text_reader): - return text_reader - - if _is_compatible_text_stream(text_reader, encoding, errors): - return text_reader - - # If the reader has no encoding, we try to find the underlying - # binary reader for it. If that fails because the environment is - # misconfigured, we silently go with the same reader because this - # is too common to happen. In that case, mojibake is better than - # exceptions. - binary_reader = _find_binary_reader(text_reader) - if binary_reader is None: - return text_reader - - # At this point, we default the errors to replace instead of strict - # because nobody handles those errors anyways and at this point - # we're so fundamentally fucked that nothing can repair it. - if errors is None: - errors = 'replace' - return _make_text_stream(binary_reader, encoding, errors, - force_readable=force_readable) - - def _force_correct_text_writer(text_writer, encoding, errors, - force_writable=False): - if _is_binary_writer(text_writer, False): - binary_writer = text_writer - else: - # If there is no target encoding set, we need to verify that the - # writer is not actually misconfigured. - if encoding is None and not _stream_is_misconfigured(text_writer): - return text_writer - - if _is_compatible_text_stream(text_writer, encoding, errors): - return text_writer - - # If the writer has no encoding, we try to find the underlying - # binary writer for it. If that fails because the environment is - # misconfigured, we silently go with the same writer because this - # is too common to happen. In that case, mojibake is better than - # exceptions. - binary_writer = _find_binary_writer(text_writer) - if binary_writer is None: - return text_writer - - # At this point, we default the errors to replace instead of strict - # because nobody handles those errors anyways and at this point - # we're so fundamentally fucked that nothing can repair it. - if errors is None: - errors = 'replace' - return _make_text_stream(binary_writer, encoding, errors, - force_writable=force_writable) - - def get_binary_stdin(): - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError('Was not able to determine binary ' - 'stream for sys.stdin.') - return reader - - def get_binary_stdout(): - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError('Was not able to determine binary ' - 'stream for sys.stdout.') - return writer - - def get_binary_stderr(): - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError('Was not able to determine binary ' - 'stream for sys.stderr.') - return writer - - def get_text_stdin(encoding=None, errors=None): - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, - force_readable=True) - - def get_text_stdout(encoding=None, errors=None): - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, - force_writable=True) - - def get_text_stderr(encoding=None, errors=None): - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, - force_writable=True) - - def filename_to_ui(value): - if isinstance(value, bytes): - value = value.decode(get_filesystem_encoding(), 'replace') - else: - value = value.encode('utf-8', 'surrogateescape') \ - .decode('utf-8', 'replace') - return value - - -def get_streerror(e, default=None): - if hasattr(e, 'strerror'): - msg = e.strerror - else: - if default is not None: - msg = default - else: - msg = str(e) - if isinstance(msg, bytes): - msg = msg.decode('utf-8', 'replace') - return msg - - -def open_stream(filename, mode='r', encoding=None, errors='strict', - atomic=False): - # Standard streams first. These are simple because they don't need - # special handling for the atomic flag. It's entirely ignored. - if filename == '-': - if any(m in mode for m in ['w', 'a', 'x']): - if 'b' in mode: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if 'b' in mode: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - if encoding is None: - return open(filename, mode), True - return io.open(filename, mode, encoding=encoding, errors=errors), True - - # Some usability stuff for atomic writes - if 'a' in mode: - raise ValueError( - 'Appending to an existing file is not supported, because that ' - 'would involve an expensive `copy`-operation to a temporary ' - 'file. Open the file in normal `w`-mode and copy explicitly ' - 'if that\'s what you\'re after.' - ) - if 'x' in mode: - raise ValueError('Use the `overwrite`-parameter instead.') - if 'w' not in mode: - raise ValueError('Atomic writes only make sense with `w`-mode.') - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import tempfile - fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename), - prefix='.__atomic-write') - - if encoding is not None: - f = io.open(fd, mode, encoding=encoding, errors=errors) - else: - f = os.fdopen(fd, mode) - - return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True - - -# Used in a destructor call, needs extra protection from interpreter cleanup. -if hasattr(os, 'replace'): - _replace = os.replace - _can_replace = True -else: - _replace = os.rename - _can_replace = not WIN - - -class _AtomicFile(object): - - def __init__(self, f, tmp_filename, real_filename): - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self): - return self._real_filename - - def close(self, delete=False): - if self.closed: - return - self._f.close() - if not _can_replace: - try: - os.remove(self._real_filename) - except OSError: - pass - _replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name): - return getattr(self._f, name) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - self.close(delete=exc_type is not None) - - def __repr__(self): - return repr(self._f) - - -auto_wrap_for_ansi = None -colorama = None -get_winterm_size = None - - -def strip_ansi(value): - return _ansi_re.sub('', value) - - -def should_strip_ansi(stream=None, color=None): - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) - return not color - - -# If we're on Windows, we provide transparent integration through -# colorama. This will make ANSI colors through the echo function -# work automatically. -if WIN: - # Windows has a smaller terminal - DEFAULT_COLUMNS = 79 - - from ._winconsole import _get_windows_console_stream, _wrap_std_stream - - def _get_argv_encoding(): - import locale - return locale.getpreferredencoding() - - if PY2: - def raw_input(prompt=''): - sys.stderr.flush() - if prompt: - stdout = _default_text_stdout() - stdout.write(prompt) - stdin = _default_text_stdin() - return stdin.readline().rstrip('\r\n') - - try: - import colorama - except ImportError: - pass - else: - _ansi_stream_wrappers = WeakKeyDictionary() - - def auto_wrap_for_ansi(stream, color=None): - """This function wraps a stream so that calls through colorama - are issued to the win32 console API to recolor on demand. It - also ensures to reset the colors if a write call is interrupted - to not destroy the console afterwards. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - if cached is not None: - return cached - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = ansi_wrapper.stream - _write = rv.write - - def _safe_write(s): - try: - return _write(s) - except: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - return rv - - def get_winterm_size(): - win = colorama.win32.GetConsoleScreenBufferInfo( - colorama.win32.STDOUT).srWindow - return win.Right - win.Left, win.Bottom - win.Top -else: - def _get_argv_encoding(): - return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding() - - _get_windows_console_stream = lambda *x: None - _wrap_std_stream = lambda *x: None - - -def term_len(x): - return len(strip_ansi(x)) - - -def isatty(stream): - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func(src_func, wrapper_func): - cache = WeakKeyDictionary() - def func(): - stream = src_func() - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - stream = src_func() # In case wrapper_func() modified the stream - cache[stream] = rv - except Exception: - pass - return rv - return func - - -_default_text_stdin = _make_cached_stream_func( - lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func( - lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func( - lambda: sys.stderr, get_text_stderr) - - -binary_streams = { - 'stdin': get_binary_stdin, - 'stdout': get_binary_stdout, - 'stderr': get_binary_stderr, -} - -text_streams = { - 'stdin': get_text_stdin, - 'stdout': get_text_stdout, - 'stderr': get_text_stderr, -} diff --git a/venv/lib/python3.6/site-packages/click/_termui_impl.py b/venv/lib/python3.6/site-packages/click/_termui_impl.py deleted file mode 100644 index 00a8e5e..0000000 --- a/venv/lib/python3.6/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,621 +0,0 @@ -# -*- coding: utf-8 -*- -""" -click._termui_impl -~~~~~~~~~~~~~~~~~~ - -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. - -:copyright: © 2014 by the Pallets team. -:license: BSD, see LICENSE.rst for more details. -""" - -import os -import sys -import time -import math -import contextlib -from ._compat import _default_text_stdout, range_type, PY2, isatty, \ - open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \ - CYGWIN -from .utils import echo -from .exceptions import ClickException - - -if os.name == 'nt': - BEFORE_BAR = '\r' - AFTER_BAR = '\n' -else: - BEFORE_BAR = '\r\033[?25l' - AFTER_BAR = '\033[?25h\n' - - -def _length_hint(obj): - """Returns the length hint of an object.""" - try: - return len(obj) - except (AttributeError, TypeError): - try: - get_hint = type(obj).__length_hint__ - except AttributeError: - return None - try: - hint = get_hint(obj) - except TypeError: - return None - if hint is NotImplemented or \ - not isinstance(hint, int_types) or \ - hint < 0: - return None - return hint - - -class ProgressBar(object): - - def __init__(self, iterable, length=None, fill_char='#', empty_char=' ', - bar_template='%(bar)s', info_sep=' ', show_eta=True, - show_percent=None, show_pos=False, item_show_func=None, - label=None, file=None, color=None, width=30): - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label = label or '' - if file is None: - file = _default_text_stdout() - self.file = file - self.color = color - self.width = width - self.autowidth = width == 0 - - if length is None: - length = _length_hint(iterable) - if iterable is None: - if length is None: - raise TypeError('iterable or length is required') - iterable = range_type(length) - self.iter = iter(iterable) - self.length = length - self.length_known = length is not None - self.pos = 0 - self.avg = [] - self.start = self.last_eta = time.time() - self.eta_known = False - self.finished = False - self.max_width = None - self.entered = False - self.current_item = None - self.is_hidden = not isatty(self.file) - self._last_line = None - self.short_limit = 0.5 - - def __enter__(self): - self.entered = True - self.render_progress() - return self - - def __exit__(self, exc_type, exc_value, tb): - self.render_finish() - - def __iter__(self): - if not self.entered: - raise RuntimeError('You need to use progress bars in a with block.') - self.render_progress() - return self.generator() - - def is_fast(self): - return time.time() - self.start <= self.short_limit - - def render_finish(self): - if self.is_hidden or self.is_fast(): - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self): - if self.finished: - return 1.0 - return min(self.pos / (float(self.length) or 1), 1.0) - - @property - def time_per_iteration(self): - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self): - if self.length_known and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self): - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - days = t - return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds) - else: - return '%02d:%02d:%02d' % (hours, minutes, seconds) - return '' - - def format_pos(self): - pos = str(self.pos) - if self.length_known: - pos += '/%s' % self.length - return pos - - def format_pct(self): - return ('% 4d%%' % int(self.pct * 100))[1:] - - def format_bar(self): - if self.length_known: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - bar = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - bar[int((math.cos(self.pos * self.time_per_iteration) - / 2.0 + 0.5) * self.width)] = self.fill_char - bar = ''.join(bar) - return bar - - def format_progress_line(self): - show_percent = self.show_percent - - info_bits = [] - if self.length_known and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return (self.bar_template % { - 'label': self.label, - 'bar': self.format_bar(), - 'info': self.info_sep.join(info_bits) - }).rstrip() - - def render_progress(self): - from .termui import get_terminal_size - - if self.is_hidden: - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, get_terminal_size()[0] - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(' ' * self.max_width) - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(' ' * (clear_width - line_len)) - line = ''.join(buf) - # Render the line only if it changed. - - if line != self._last_line and not self.is_fast(): - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps): - self.pos += n_steps - if self.length_known and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length_known - - def update(self, n_steps): - self.make_step(n_steps) - self.render_progress() - - def finish(self): - self.eta_known = 0 - self.current_item = None - self.finished = True - - def generator(self): - """ - Returns a generator which yields the items added to the bar during - construction, and updates the progress bar *after* the yielded block - returns. - """ - if not self.entered: - raise RuntimeError('You need to use progress bars in a with block.') - - if self.is_hidden: - for rv in self.iter: - yield rv - else: - for rv in self.iter: - self.current_item = rv - yield rv - self.update(1) - self.finish() - self.render_progress() - - -def pager(generator, color=None): - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - pager_cmd = (os.environ.get('PAGER', None) or '').strip() - if pager_cmd: - if WIN: - return _tempfilepager(generator, pager_cmd, color) - return _pipepager(generator, pager_cmd, color) - if os.environ.get('TERM') in ('dumb', 'emacs'): - return _nullpager(stdout, generator, color) - if WIN or sys.platform.startswith('os2'): - return _tempfilepager(generator, 'more <', color) - if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0: - return _pipepager(generator, 'less', color) - - import tempfile - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0: - return _pipepager(generator, 'more', color) - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager(generator, cmd, color): - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - """ - import subprocess - env = dict(os.environ) - - # If we're piping to less we might support colors under the - # condition that - cmd_detail = cmd.rsplit('/', 1)[-1].split() - if color is None and cmd_detail[0] == 'less': - less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:]) - if not less_flags: - env['LESS'] = '-R' - color = True - elif 'r' in less_flags or 'R' in less_flags: - color = True - - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, - env=env) - encoding = get_best_encoding(c.stdin) - try: - for text in generator: - if not color: - text = strip_ansi(text) - - c.stdin.write(text.encode(encoding, 'replace')) - except (IOError, KeyboardInterrupt): - pass - else: - c.stdin.close() - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - -def _tempfilepager(generator, cmd, color): - """Page through text by invoking a program on a temporary file.""" - import tempfile - filename = tempfile.mktemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, 'wb')[0] as f: - f.write(text.encode(encoding)) - try: - os.system(cmd + ' "' + filename + '"') - finally: - os.unlink(filename) - - -def _nullpager(stream, generator, color): - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor(object): - - def __init__(self, editor=None, env=None, require_save=True, - extension='.txt'): - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self): - if self.editor is not None: - return self.editor - for key in 'VISUAL', 'EDITOR': - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return 'notepad' - for editor in 'vim', 'nano': - if os.system('which %s >/dev/null 2>&1' % editor) == 0: - return editor - return 'vi' - - def edit_file(self, filename): - import subprocess - editor = self.get_editor() - if self.env: - environ = os.environ.copy() - environ.update(self.env) - else: - environ = None - try: - c = subprocess.Popen('%s "%s"' % (editor, filename), - env=environ, shell=True) - exit_code = c.wait() - if exit_code != 0: - raise ClickException('%s: Editing failed!' % editor) - except OSError as e: - raise ClickException('%s: Editing failed: %s' % (editor, e)) - - def edit(self, text): - import tempfile - - text = text or '' - if text and not text.endswith('\n'): - text += '\n' - - fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension) - try: - if WIN: - encoding = 'utf-8-sig' - text = text.replace('\n', '\r\n') - else: - encoding = 'utf-8' - text = text.encode(encoding) - - f = os.fdopen(fd, 'wb') - f.write(text) - f.close() - timestamp = os.path.getmtime(name) - - self.edit_file(name) - - if self.require_save \ - and os.path.getmtime(name) == timestamp: - return None - - f = open(name, 'rb') - try: - rv = f.read() - finally: - f.close() - return rv.decode('utf-8-sig').replace('\r\n', '\n') - finally: - os.unlink(name) - - -def open_url(url, wait=False, locate=False): - import subprocess - - def _unquote_file(url): - try: - import urllib - except ImportError: - import urllib - if url.startswith('file://'): - url = urllib.unquote(url[7:]) - return url - - if sys.platform == 'darwin': - args = ['open'] - if wait: - args.append('-W') - if locate: - args.append('-R') - args.append(_unquote_file(url)) - null = open('/dev/null', 'w') - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url) - args = 'explorer /select,"%s"' % _unquote_file( - url.replace('"', '')) - else: - args = 'start %s "" "%s"' % ( - wait and '/WAIT' or '', url.replace('"', '')) - return os.system(args) - elif CYGWIN: - if locate: - url = _unquote_file(url) - args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', '')) - else: - args = 'cygstart %s "%s"' % ( - wait and '-w' or '', url.replace('"', '')) - return os.system(args) - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or '.' - else: - url = _unquote_file(url) - c = subprocess.Popen(['xdg-open', url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(('http://', 'https://')) and not locate and not wait: - import webbrowser - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch): - if ch == u'\x03': - raise KeyboardInterrupt() - if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D - raise EOFError() - if ch == u'\x1a' and WIN: # Windows, Ctrl+Z - raise EOFError() - - -if WIN: - import msvcrt - - @contextlib.contextmanager - def raw_terminal(): - yield - - def getchar(echo): - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - if echo: - func = msvcrt.getwche - else: - func = msvcrt.getwch - - rv = func() - if rv in (u'\x00', u'\xe0'): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - _translate_ch_to_exc(rv) - return rv -else: - import tty - import termios - - @contextlib.contextmanager - def raw_terminal(): - if not isatty(sys.stdin): - f = open('/dev/tty') - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - try: - old_settings = termios.tcgetattr(fd) - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo): - with raw_terminal() as fd: - ch = os.read(fd, 32) - ch = ch.decode(get_best_encoding(sys.stdin), 'replace') - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - _translate_ch_to_exc(ch) - return ch diff --git a/venv/lib/python3.6/site-packages/click/_textwrap.py b/venv/lib/python3.6/site-packages/click/_textwrap.py deleted file mode 100644 index 7e77603..0000000 --- a/venv/lib/python3.6/site-packages/click/_textwrap.py +++ /dev/null @@ -1,38 +0,0 @@ -import textwrap -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - - def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent): - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text): - rv = [] - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - if idx > 0: - indent = self.subsequent_indent - rv.append(indent + line) - return '\n'.join(rv) diff --git a/venv/lib/python3.6/site-packages/click/_unicodefun.py b/venv/lib/python3.6/site-packages/click/_unicodefun.py deleted file mode 100644 index 620edff..0000000 --- a/venv/lib/python3.6/site-packages/click/_unicodefun.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -import sys -import codecs - -from ._compat import PY2 - - -# If someone wants to vendor click, we want to ensure the -# correct package is discovered. Ideally we could use a -# relative import here but unfortunately Python does not -# support that. -click = sys.modules[__name__.rsplit('.', 1)[0]] - - -def _find_unicode_literals_frame(): - import __future__ - if not hasattr(sys, '_getframe'): # not all Python implementations have it - return 0 - frm = sys._getframe(1) - idx = 1 - while frm is not None: - if frm.f_globals.get('__name__', '').startswith('click.'): - frm = frm.f_back - idx += 1 - elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag: - return idx - else: - break - return 0 - - -def _check_for_unicode_literals(): - if not __debug__: - return - if not PY2 or click.disable_unicode_literals_warning: - return - bad_frame = _find_unicode_literals_frame() - if bad_frame <= 0: - return - from warnings import warn - warn(Warning('Click detected the use of the unicode_literals ' - '__future__ import. This is heavily discouraged ' - 'because it can introduce subtle bugs in your ' - 'code. You should instead use explicit u"" literals ' - 'for your unicode strings. For more information see ' - 'https://click.palletsprojects.com/python3/'), - stacklevel=bad_frame) - - -def _verify_python3_env(): - """Ensures that the environment is good for unicode on Python 3.""" - if PY2: - return - try: - import locale - fs_enc = codecs.lookup(locale.getpreferredencoding()).name - except Exception: - fs_enc = 'ascii' - if fs_enc != 'ascii': - return - - extra = '' - if os.name == 'posix': - import subprocess - try: - rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE).communicate()[0] - except OSError: - rv = b'' - good_locales = set() - has_c_utf8 = False - - # Make sure we're operating on text here. - if isinstance(rv, bytes): - rv = rv.decode('ascii', 'replace') - - for line in rv.splitlines(): - locale = line.strip() - if locale.lower().endswith(('.utf-8', '.utf8')): - good_locales.add(locale) - if locale.lower() in ('c.utf8', 'c.utf-8'): - has_c_utf8 = True - - extra += '\n\n' - if not good_locales: - extra += ( - 'Additional information: on this system no suitable UTF-8\n' - 'locales were discovered. This most likely requires resolving\n' - 'by reconfiguring the locale system.' - ) - elif has_c_utf8: - extra += ( - 'This system supports the C.UTF-8 locale which is recommended.\n' - 'You might be able to resolve your issue by exporting the\n' - 'following environment variables:\n\n' - ' export LC_ALL=C.UTF-8\n' - ' export LANG=C.UTF-8' - ) - else: - extra += ( - 'This system lists a couple of UTF-8 supporting locales that\n' - 'you can pick from. The following suitable locales were\n' - 'discovered: %s' - ) % ', '.join(sorted(good_locales)) - - bad_locale = None - for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): - if locale and locale.lower().endswith(('.utf-8', '.utf8')): - bad_locale = locale - if locale is not None: - break - if bad_locale is not None: - extra += ( - '\n\nClick discovered that you exported a UTF-8 locale\n' - 'but the locale system could not pick up from it because\n' - 'it does not exist. The exported locale is "%s" but it\n' - 'is not supported' - ) % bad_locale - - raise RuntimeError( - 'Click will abort further execution because Python 3 was' - ' configured to use ASCII as encoding for the environment.' - ' Consult https://click.palletsprojects.com/en/7.x/python3/ for' - ' mitigation steps.' + extra - ) diff --git a/venv/lib/python3.6/site-packages/click/_winconsole.py b/venv/lib/python3.6/site-packages/click/_winconsole.py deleted file mode 100644 index bbb080d..0000000 --- a/venv/lib/python3.6/site-packages/click/_winconsole.py +++ /dev/null @@ -1,307 +0,0 @@ -# -*- coding: utf-8 -*- -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prmopt. - -import io -import os -import sys -import zlib -import time -import ctypes -import msvcrt -from ._compat import _NonClosingTextIOWrapper, text_type, PY2 -from ctypes import byref, POINTER, c_int, c_char, c_char_p, \ - c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE -try: - from ctypes import pythonapi - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release -except ImportError: - pythonapi = None -from ctypes.wintypes import LPWSTR, LPCWSTR - - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)( - ('GetCommandLineW', windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE( - POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ('CommandLineToArgvW', windll.shell32)) - - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b'\x1a' -MAX_BYTES_WRITTEN = 32767 - - -class Py_buffer(ctypes.Structure): - _fields_ = [ - ('buf', c_void_p), - ('obj', py_object), - ('len', c_ssize_t), - ('itemsize', c_ssize_t), - ('readonly', c_int), - ('ndim', c_int), - ('format', c_char_p), - ('shape', c_ssize_p), - ('strides', c_ssize_p), - ('suboffsets', c_ssize_p), - ('internal', c_void_p) - ] - - if PY2: - _fields_.insert(-1, ('smalltable', c_ssize_t * 2)) - - -# On PyPy we cannot get buffers so our ability to operate here is -# serverly limited. -if pythonapi is None: - get_buffer = None -else: - def get_buffer(obj, writable=False): - buf = Py_buffer() - flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - try: - buffer_type = c_char * buf.len - return buffer_type.from_address(buf.buf) - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - - def __init__(self, handle): - self.handle = handle - - def isatty(self): - io.RawIOBase.isatty(self) - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - - def readable(self): - return True - - def readinto(self, b): - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError('cannot read odd number of bytes from ' - 'UTF-16-LE encoded console') - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read, - byref(code_units_read), None) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError('Windows error: %s' % GetLastError()) - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - - def writable(self): - return True - - @staticmethod - def _get_error_message(errno): - if errno == ERROR_SUCCESS: - return 'ERROR_SUCCESS' - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return 'ERROR_NOT_ENOUGH_MEMORY' - return 'Windows error %s' % errno - - def write(self, b): - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, - MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW(self.handle, buf, code_units_to_be_written, - byref(code_units_written), None) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream(object): - - def __init__(self, text_stream, byte_stream): - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self): - return self.buffer.name - - def write(self, x): - if isinstance(x, text_type): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines): - for line in lines: - self.write(line) - - def __getattr__(self, name): - return getattr(self._text_stream, name) - - def isatty(self): - return self.buffer.isatty() - - def __repr__(self): - return '' % ( - self.name, - self.encoding, - ) - - -class WindowsChunkedWriter(object): - """ - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()' which we wrap to write in - limited chunks due to a Windows limitation on binary console streams. - """ - def __init__(self, wrapped): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def write(self, text): - total_to_write = len(text) - written = 0 - - while written < total_to_write: - to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) - self.__wrapped.write(text[written:written+to_write]) - written += to_write - - -_wrapped_std_streams = set() - - -def _wrap_std_stream(name): - # Python 2 & Windows 7 and below - if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams: - setattr(sys, name, WindowsChunkedWriter(getattr(sys, name))) - _wrapped_std_streams.add(name) - - -def _get_text_stdin(buffer_stream): - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - 'utf-16-le', 'strict', line_buffering=True) - return ConsoleStream(text_stream, buffer_stream) - - -def _get_text_stdout(buffer_stream): - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - 'utf-16-le', 'strict', line_buffering=True) - return ConsoleStream(text_stream, buffer_stream) - - -def _get_text_stderr(buffer_stream): - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - 'utf-16-le', 'strict', line_buffering=True) - return ConsoleStream(text_stream, buffer_stream) - - -if PY2: - def _hash_py_argv(): - return zlib.crc32('\x00'.join(sys.argv[1:])) - - _initial_argv_hash = _hash_py_argv() - - def _get_windows_argv(): - argc = c_int(0) - argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) - argv = [argv_unicode[i] for i in range(0, argc.value)] - - if not hasattr(sys, 'frozen'): - argv = argv[1:] - while len(argv) > 0: - arg = argv[0] - if not arg.startswith('-') or arg == '-': - break - argv = argv[1:] - if arg.startswith(('-c', '-m')): - break - - return argv[1:] - - -_stream_factories = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _get_windows_console_stream(f, encoding, errors): - if get_buffer is not None and \ - encoding in ('utf-16-le', None) \ - and errors in ('strict', None) and \ - hasattr(f, 'isatty') and f.isatty(): - func = _stream_factories.get(f.fileno()) - if func is not None: - if not PY2: - f = getattr(f, 'buffer', None) - if f is None: - return None - else: - # If we are on Python 2 we need to set the stream that we - # deal with to binary mode as otherwise the exercise if a - # bit moot. The same problems apply as for - # get_binary_stdin and friends from _compat. - msvcrt.setmode(f.fileno(), os.O_BINARY) - return func(f) diff --git a/venv/lib/python3.6/site-packages/click/core.py b/venv/lib/python3.6/site-packages/click/core.py deleted file mode 100644 index 7a1e342..0000000 --- a/venv/lib/python3.6/site-packages/click/core.py +++ /dev/null @@ -1,1856 +0,0 @@ -import errno -import inspect -import os -import sys -from contextlib import contextmanager -from itertools import repeat -from functools import update_wrapper - -from .types import convert_type, IntRange, BOOL -from .utils import PacifyFlushWrapper, make_str, make_default_short_help, \ - echo, get_os_args -from .exceptions import ClickException, UsageError, BadParameter, Abort, \ - MissingParameter, Exit -from .termui import prompt, confirm, style -from .formatting import HelpFormatter, join_options -from .parser import OptionParser, split_opt -from .globals import push_context, pop_context - -from ._compat import PY2, isidentifier, iteritems, string_types -from ._unicodefun import _check_for_unicode_literals, _verify_python3_env - - -_missing = object() - - -SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...' -SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...' - -DEPRECATED_HELP_NOTICE = ' (DEPRECATED)' -DEPRECATED_INVOKE_NOTICE = 'DeprecationWarning: ' + \ - 'The command %(name)s is deprecated.' - - -def _maybe_show_deprecated_notice(cmd): - if cmd.deprecated: - echo(style(DEPRECATED_INVOKE_NOTICE % {'name': cmd.name}, fg='red'), err=True) - - -def fast_exit(code): - """Exit without garbage collection, this speeds up exit by about 10ms for - things like bash completion. - """ - sys.stdout.flush() - sys.stderr.flush() - os._exit(code) - - -def _bashcomplete(cmd, prog_name, complete_var=None): - """Internal handler for the bash completion support.""" - if complete_var is None: - complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper() - complete_instr = os.environ.get(complete_var) - if not complete_instr: - return - - from ._bashcomplete import bashcomplete - if bashcomplete(cmd, prog_name, complete_var, complete_instr): - fast_exit(1) - - -def _check_multicommand(base_command, cmd_name, cmd, register=False): - if not base_command.chain or not isinstance(cmd, MultiCommand): - return - if register: - hint = 'It is not possible to add multi commands as children to ' \ - 'another multi command that is in chain mode' - else: - hint = 'Found a multi command as subcommand to a multi command ' \ - 'that is in chain mode. This is not supported' - raise RuntimeError('%s. Command "%s" is set to chain and "%s" was ' - 'added as subcommand but it in itself is a ' - 'multi command. ("%s" is a %s within a chained ' - '%s named "%s").' % ( - hint, base_command.name, cmd_name, - cmd_name, cmd.__class__.__name__, - base_command.__class__.__name__, - base_command.name)) - - -def batch(iterable, batch_size): - return list(zip(*repeat(iter(iterable), batch_size))) - - -def invoke_param_callback(callback, ctx, param, value): - code = getattr(callback, '__code__', None) - args = getattr(code, 'co_argcount', 3) - - if args < 3: - # This will become a warning in Click 3.0: - from warnings import warn - warn(Warning('Invoked legacy parameter callback "%s". The new ' - 'signature for such callbacks starting with ' - 'click 2.0 is (ctx, param, value).' - % callback), stacklevel=3) - return callback(ctx, value) - return callback(ctx, param, value) - - -@contextmanager -def augment_usage_errors(ctx, param=None): - """Context manager that attaches extra information to exceptions that - fly. - """ - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing(invocation_order, declaration_order): - """Given a sequence of parameters in the order as should be considered - for processing and an iterable of parameters that exist, this returns - a list in the correct order as they should be processed. - """ - def sort_key(item): - try: - idx = invocation_order.index(item) - except ValueError: - idx = float('inf') - return (not item.is_eager, idx) - - return sorted(declaration_order, key=sort_key) - - -class Context(object): - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - .. versionadded:: 2.0 - Added the `resilient_parsing`, `help_option_names`, - `token_normalize_func` parameters. - - .. versionadded:: 3.0 - Added the `allow_extra_args` and `allow_interspersed_args` - parameters. - - .. versionadded:: 4.0 - Added the `color`, `ignore_unknown_options`, and - `max_content_width` parameters. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - """ - - def __init__(self, command, parent=None, info_name=None, obj=None, - auto_envvar_prefix=None, default_map=None, - terminal_width=None, max_content_width=None, - resilient_parsing=False, allow_extra_args=None, - allow_interspersed_args=None, - ignore_unknown_options=None, help_option_names=None, - token_normalize_func=None, color=None): - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: the parsed parameters except if the value is hidden in which - #: case it's not remembered. - self.params = {} - #: the leftover arguments. - self.args = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self.protected_args = [] - if obj is None and parent is not None: - obj = parent.obj - #: the user object stored. - self.obj = obj - self._meta = getattr(parent, 'meta', {}) - - #: A dictionary (-like object) with defaults for parameters. - if default_map is None \ - and parent is not None \ - and parent.default_map is not None: - default_map = parent.default_map.get(info_name) - self.default_map = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`resultcallback`. - self.invoked_subcommand = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - #: The width of the terminal (None is autodetection). - self.terminal_width = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ['--help'] - - #: The names for the help options. - self.help_option_names = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if parent is not None \ - and parent.auto_envvar_prefix is not None and \ - self.info_name is not None: - auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix, - self.info_name.upper()) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - self.auto_envvar_prefix = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color = color - - self._close_callbacks = [] - self._depth = 0 - - def __enter__(self): - self._depth += 1 - push_context(self) - return self - - def __exit__(self, exc_type, exc_value, tb): - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup=True): - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self): - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = __name__ + '.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self): - """Creates the formatter for the help and usage output.""" - return HelpFormatter(width=self.terminal_width, - max_width=self.max_content_width) - - def call_on_close(self, f): - """This decorator remembers a function as callback that should be - executed when the context tears down. This is most useful to bind - resource handling to the script execution. For instance, file objects - opened by the :class:`File` type will register their close callbacks - here. - - :param f: the function to execute on teardown. - """ - self._close_callbacks.append(f) - return f - - def close(self): - """Invokes all close callbacks.""" - for cb in self._close_callbacks: - cb() - self._close_callbacks = [] - - @property - def command_path(self): - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = '' - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - rv = self.parent.command_path + ' ' + rv - return rv.lstrip() - - def find_root(self): - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type): - """Finds the closest object of a given type.""" - node = self - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - node = node.parent - - def ensure_object(self, object_type): - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - def lookup_default(self, name): - """Looks up the default for a parameter name. This by default - looks into the :attr:`default_map` if available. - """ - if self.default_map is not None: - rv = self.default_map.get(name) - if callable(rv): - rv = rv() - return rv - - def fail(self, message): - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self): - """Aborts the script.""" - raise Abort() - - def exit(self, code=0): - """Exits the application with a given exit code.""" - raise Exit(code) - - def get_usage(self): - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self): - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def invoke(*args, **kwargs): - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - Note that before Click 3.2 keyword arguments were not properly filled - in against the intention of this code and no context was created. For - more information about this change and why it was done in a bugfix - release see :ref:`upgrade-to-3.2`. - """ - self, callback = args[:2] - ctx = self - - # It's also possible to invoke another command which might or - # might not have a callback. In that case we also fill - # in defaults and make a new context for this command. - if isinstance(callback, Command): - other_cmd = callback - callback = other_cmd.callback - ctx = Context(other_cmd, info_name=other_cmd.name, parent=self) - if callback is None: - raise TypeError('The given command does not have a ' - 'callback that can be invoked.') - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.get_default(ctx) - - args = args[2:] - with augment_usage_errors(self): - with ctx: - return callback(*args, **kwargs) - - def forward(*args, **kwargs): - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - """ - self, cmd = args[:2] - - # It's also possible to invoke another command which might or - # might not have a callback. - if not isinstance(cmd, Command): - raise TypeError('Callback is not a command.') - - for param in self.params: - if param not in kwargs: - kwargs[param] = self.params[param] - - return self.invoke(cmd, **kwargs) - - -class BaseCommand(object): - """The base command implements the minimal API contract of commands. - Most code will never use this as it does not implement a lot of useful - functionality but it can act as the direct subclass of alternative - parsing methods that do not depend on the Click parser. - - For instance, this can be used to bridge Click and other systems like - argparse or docopt. - - Because base commands do not implement a lot of the API that other - parts of Click take for granted, they are not supported for all - operations. For instance, they cannot be used with the decorators - usually and they have no built-in callback system. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - """ - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__(self, name, context_settings=None): - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - if context_settings is None: - context_settings = {} - #: an optional dictionary with defaults passed to the context. - self.context_settings = context_settings - - def get_usage(self, ctx): - raise NotImplementedError('Base commands cannot get usage') - - def get_help(self, ctx): - raise NotImplementedError('Base commands cannot get help') - - def make_context(self, info_name, args, parent=None, **extra): - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - :param info_name: the info name for this invokation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it it's - the name of the script. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - """ - for key, value in iteritems(self.context_settings): - if key not in extra: - extra[key] = value - ctx = Context(self, info_name=info_name, parent=parent, **extra) - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx, args): - """Given a context and a list of arguments this creates the parser - and parses the arguments, then modifies the context as necessary. - This is automatically invoked by :meth:`make_context`. - """ - raise NotImplementedError('Base commands do not know how to parse ' - 'arguments.') - - def invoke(self, ctx): - """Given a context, this invokes the command. The default - implementation is raising a not implemented error. - """ - raise NotImplementedError('Base commands are not invokable by default') - - def main(self, args=None, prog_name=None, complete_var=None, - standalone_mode=True, **extra): - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - .. versionadded:: 3.0 - Added the `standalone_mode` flag to control the standalone mode. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - """ - # If we are in Python 3, we will verify that the environment is - # sane at this point or reject further execution to avoid a - # broken script. - if not PY2: - _verify_python3_env() - else: - _check_for_unicode_literals() - - if args is None: - args = get_os_args() - else: - args = list(args) - - if prog_name is None: - prog_name = make_str(os.path.basename( - sys.argv and sys.argv[0] or __file__)) - - # Hook for the Bash completion. This only activates if the Bash - # completion is actually enabled, otherwise this is quite a fast - # noop. - _bashcomplete(self, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt): - echo(file=sys.stderr) - raise Abort() - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except IOError as e: - if e.errno == errno.EPIPE: - sys.stdout = PacifyFlushWrapper(sys.stdout) - sys.stderr = PacifyFlushWrapper(sys.stderr) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo('Aborted!', file=sys.stderr) - sys.exit(1) - - def __call__(self, *args, **kwargs): - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class Command(BaseCommand): - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param hidden: hide this command from help outputs. - - :param deprecated: issues a message indicating that - the command is deprecated. - """ - - def __init__(self, name, context_settings=None, callback=None, - params=None, help=None, epilog=None, short_help=None, - options_metavar='[OPTIONS]', add_help_option=True, - hidden=False, deprecated=False): - BaseCommand.__init__(self, name, context_settings) - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params = params or [] - # if a form feed (page break) is found in the help text, truncate help - # text to the content preceding the first form feed - if help and '\f' in help: - help = help.split('\f', 1)[0] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self.hidden = hidden - self.deprecated = deprecated - - def get_usage(self, ctx): - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip('\n') - - def get_params(self, ctx): - rv = self.params - help_option = self.get_help_option(ctx) - if help_option is not None: - rv = rv + [help_option] - return rv - - def format_usage(self, ctx, formatter): - """Writes the usage line into the formatter.""" - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, ' '.join(pieces)) - - def collect_usage_pieces(self, ctx): - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - return rv - - def get_help_option_names(self, ctx): - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return all_names - - def get_help_option(self, ctx): - """Returns the help option object.""" - help_options = self.get_help_option_names(ctx) - if not help_options or not self.add_help_option: - return - - def show_help(ctx, param, value): - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - return Option(help_options, is_flag=True, - is_eager=True, expose_value=False, - callback=show_help, - help='Show this message and exit.') - - def make_parser(self, ctx): - """Creates the underlying option parser for this command.""" - parser = OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx): - """Formats the help into a string and returns it. This creates a - formatter and will call into the following formatting methods: - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip('\n') - - def get_short_help_str(self, limit=45): - """Gets short help for the command or makes it by shortening the long help string.""" - return self.short_help or self.help and make_default_short_help(self.help, limit) or '' - - def format_help(self, ctx, formatter): - """Writes the help into the formatter if it exists. - - This calls into the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx, formatter): - """Writes the help text to the formatter if it exists.""" - if self.help: - formatter.write_paragraph() - with formatter.indentation(): - help_text = self.help - if self.deprecated: - help_text += DEPRECATED_HELP_NOTICE - formatter.write_text(help_text) - elif self.deprecated: - formatter.write_paragraph() - with formatter.indentation(): - formatter.write_text(DEPRECATED_HELP_NOTICE) - - def format_options(self, ctx, formatter): - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section('Options'): - formatter.write_dl(opts) - - def format_epilog(self, ctx, formatter): - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - formatter.write_paragraph() - with formatter.indentation(): - formatter.write_text(self.epilog) - - def parse_args(self, ctx, args): - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing( - param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail('Got unexpected extra argument%s (%s)' - % (len(args) != 1 and 's' or '', - ' '.join(map(make_str, args)))) - - ctx.args = args - return args - - def invoke(self, ctx): - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - _maybe_show_deprecated_notice(self) - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - -class MultiCommand(Command): - """A multi command is the basic implementation of a command that - dispatches to subcommands. The most common version is the - :class:`Group`. - - :param invoke_without_command: this controls how the multi command itself - is invoked. By default it's only invoked - if a subcommand is provided. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is enabled by default if - `invoke_without_command` is disabled or disabled - if it's enabled. If enabled this will add - ``--help`` as argument if no arguments are - passed. - :param subcommand_metavar: the string that is used in the documentation - to indicate the subcommand place. - :param chain: if this is set to `True` chaining of multiple subcommands - is enabled. This restricts the form of commands in that - they cannot have optional arguments but it allows - multiple commands to be chained together. - :param result_callback: the result callback to attach to this multi - command. - """ - allow_extra_args = True - allow_interspersed_args = False - - def __init__(self, name=None, invoke_without_command=False, - no_args_is_help=None, subcommand_metavar=None, - chain=False, result_callback=None, **attrs): - Command.__init__(self, name, **attrs) - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - if subcommand_metavar is None: - if chain: - subcommand_metavar = SUBCOMMANDS_METAVAR - else: - subcommand_metavar = SUBCOMMAND_METAVAR - self.subcommand_metavar = subcommand_metavar - self.chain = chain - #: The result callback that is stored. This can be set or - #: overridden with the :func:`resultcallback` decorator. - self.result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError('Multi commands in chain mode cannot ' - 'have optional arguments.') - - def collect_usage_pieces(self, ctx): - rv = Command.collect_usage_pieces(self, ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx, formatter): - Command.format_options(self, ctx, formatter) - self.format_commands(ctx, formatter) - - def resultcallback(self, replace=False): - """Adds a result callback to the chain command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.resultcallback() - def process_result(result, input): - return result + input - - .. versionadded:: 3.0 - - :param replace: if set to `True` an already existing result - callback will be removed. - """ - def decorator(f): - old_callback = self.result_callback - if old_callback is None or replace: - self.result_callback = f - return f - def function(__value, *args, **kwargs): - return f(old_callback(__value, *args, **kwargs), - *args, **kwargs) - self.result_callback = rv = update_wrapper(function, f) - return rv - return decorator - - def format_commands(self, ctx, formatter): - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section('Commands'): - formatter.write_dl(rows) - - def parse_args(self, ctx, args): - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - rest = Command.parse_args(self, ctx, args) - if self.chain: - ctx.protected_args = rest - ctx.args = [] - elif rest: - ctx.protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx): - def _process_result(value): - if self.result_callback is not None: - value = ctx.invoke(self.result_callback, value, - **ctx.params) - return value - - if not ctx.protected_args: - # If we are invoked without command the chain flag controls - # how this happens. If we are not in chain mode, the return - # value here is the return value of the command. - # If however we are in chain mode, the return value is the - # return value of the result processor invoked with an empty - # list (which means that no subcommand actually was executed). - if self.invoke_without_command: - if not self.chain: - return Command.invoke(self, ctx) - with ctx: - Command.invoke(self, ctx) - return _process_result([]) - ctx.fail('Missing command.') - - # Fetch args back out - args = ctx.protected_args + ctx.args - ctx.args = [] - ctx.protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - ctx.invoked_subcommand = cmd_name - Command.invoke(self, ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = args and '*' or None - Command.invoke(self, ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command(self, ctx, args): - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if split_opt(cmd_name)[0]: - self.parse_args(ctx, ctx.args) - ctx.fail('No such command "%s".' % original_cmd_name) - - return cmd_name, cmd, args[1:] - - def get_command(self, ctx, cmd_name): - """Given a context and a command name, this returns a - :class:`Command` object if it exists or returns `None`. - """ - raise NotImplementedError() - - def list_commands(self, ctx): - """Returns a list of subcommand names in the order they should - appear. - """ - return [] - - -class Group(MultiCommand): - """A group allows a command to have subcommands attached. This is the - most common way to implement nesting in Click. - - :param commands: a dictionary of commands. - """ - - def __init__(self, name=None, commands=None, **attrs): - MultiCommand.__init__(self, name, **attrs) - #: the registered subcommands by their exported names. - self.commands = commands or {} - - def add_command(self, cmd, name=None): - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError('Command has no name.') - _check_multicommand(self, name, cmd, register=True) - self.commands[name] = cmd - - def command(self, *args, **kwargs): - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` but - immediately registers the created command with this instance by - calling into :meth:`add_command`. - """ - def decorator(f): - cmd = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - return decorator - - def group(self, *args, **kwargs): - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` but - immediately registers the created command with this instance by - calling into :meth:`add_command`. - """ - def decorator(f): - cmd = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - return decorator - - def get_command(self, ctx, cmd_name): - return self.commands.get(cmd_name) - - def list_commands(self, ctx): - return sorted(self.commands) - - -class CommandCollection(MultiCommand): - """A command collection is a multi command that merges multiple multi - commands together into one. This is a straightforward implementation - that accepts a list of different multi commands as sources and - provides all the commands for each of them. - """ - - def __init__(self, name=None, sources=None, **attrs): - MultiCommand.__init__(self, name, **attrs) - #: The list of registered multi commands. - self.sources = sources or [] - - def add_source(self, multi_cmd): - """Adds a new multi command to the chain dispatcher.""" - self.sources.append(multi_cmd) - - def get_command(self, ctx, cmd_name): - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - if rv is not None: - if self.chain: - _check_multicommand(self, cmd_name, rv) - return rv - - def list_commands(self, ctx): - rv = set() - for source in self.sources: - rv.update(source.list_commands(ctx)) - return sorted(rv) - - -class Parameter(object): - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. In Click 2.0, the old callback format will still work, - but it will raise a warning to give you change to migrate the - code easier. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The later is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: a callback that should be executed after the parameter - was matched. This is called as ``fn(ctx, param, - value)`` and needs to return the value. Before Click - 2.0, the signature was ``(ctx, value)``. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - """ - param_type_name = 'parameter' - - def __init__(self, param_decls=None, type=None, required=False, - default=None, callback=None, nargs=None, metavar=None, - expose_value=True, is_eager=False, envvar=None, - autocompletion=None): - self.name, self.opts, self.secondary_opts = \ - self._parse_decls(param_decls or (), expose_value) - - self.type = convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = False - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self.autocompletion = autocompletion - - @property - def human_readable_name(self): - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name - - def make_metavar(self): - if self.metavar is not None: - return self.metavar - metavar = self.type.get_metavar(self) - if metavar is None: - metavar = self.type.name.upper() - if self.nargs != 1: - metavar += '...' - return metavar - - def get_default(self, ctx): - """Given a context variable this calculates the default value.""" - # Otherwise go with the regular default. - if callable(self.default): - rv = self.default() - else: - rv = self.default - return self.type_cast_value(ctx, rv) - - def add_to_parser(self, parser, ctx): - pass - - def consume_value(self, ctx, opts): - value = opts.get(self.name) - if value is None: - value = self.value_from_envvar(ctx) - if value is None: - value = ctx.lookup_default(self.name) - return value - - def type_cast_value(self, ctx, value): - """Given a value this runs it properly through the type system. - This automatically handles things like `nargs` and `multiple` as - well as composite types. - """ - if self.type.is_composite: - if self.nargs <= 1: - raise TypeError('Attempted to invoke composite type ' - 'but nargs has been set to %s. This is ' - 'not supported; nargs needs to be set to ' - 'a fixed value > 1.' % self.nargs) - if self.multiple: - return tuple(self.type(x or (), self, ctx) for x in value or ()) - return self.type(value or (), self, ctx) - - def _convert(value, level): - if level == 0: - return self.type(value, self, ctx) - return tuple(_convert(x, level - 1) for x in value or ()) - return _convert(value, (self.nargs != 1) + bool(self.multiple)) - - def process_value(self, ctx, value): - """Given a value and context this runs the logic to convert the - value as necessary. - """ - # If the value we were given is None we do nothing. This way - # code that calls this can easily figure out if something was - # not provided. Otherwise it would be converted into an empty - # tuple for multiple invocations which is inconvenient. - if value is not None: - return self.type_cast_value(ctx, value) - - def value_is_missing(self, value): - if value is None: - return True - if (self.nargs != 1 or self.multiple) and value == (): - return True - return False - - def full_process_value(self, ctx, value): - value = self.process_value(ctx, value) - - if value is None and not ctx.resilient_parsing: - value = self.get_default(ctx) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - return value - - def resolve_envvar_value(self, ctx): - if self.envvar is None: - return - if isinstance(self.envvar, (tuple, list)): - for envvar in self.envvar: - rv = os.environ.get(envvar) - if rv is not None: - return rv - else: - return os.environ.get(self.envvar) - - def value_from_envvar(self, ctx): - rv = self.resolve_envvar_value(ctx) - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - return rv - - def handle_parse_result(self, ctx, opts, args): - with augment_usage_errors(ctx, param=self): - value = self.consume_value(ctx, opts) - try: - value = self.full_process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - value = None - if self.callback is not None: - try: - value = invoke_param_callback( - self.callback, ctx, self, value) - except Exception: - if not ctx.resilient_parsing: - raise - - if self.expose_value: - ctx.params[self.name] = value - return value, args - - def get_help_record(self, ctx): - pass - - def get_usage_pieces(self, ctx): - return [] - - def get_error_hint(self, ctx): - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return ' / '.join('"%s"' % x for x in hint_list) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: controls if the default value should be shown on the - help page. Normally, defaults are not shown. If this - value is a string, it shows the string instead of the - value. This is particularly useful for dynamic options. - :param show_envvar: controls if an environment variable should be shown on - the help page. Normally, environment variables - are not shown. - :param prompt: if set to `True` or a non empty string then the user will be - prompted for input. If set to `True` the prompt will be the - option name capitalized. - :param confirmation_prompt: if set then the value will need to be confirmed - if it was prompted for. - :param hide_input: if this is `True` then the input on the prompt will be - hidden from the user. This is useful for password - input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - """ - param_type_name = 'option' - - def __init__(self, param_decls=None, show_default=False, - prompt=False, confirmation_prompt=False, - hide_input=False, is_flag=None, flag_value=None, - multiple=False, count=False, allow_from_autoenv=True, - type=None, help=None, hidden=False, show_choices=True, - show_envvar=False, **attrs): - default_is_missing = attrs.get('default', _missing) is _missing - Parameter.__init__(self, param_decls, type=type, **attrs) - - if prompt is True: - prompt_text = self.name.replace('_', ' ').capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.hide_input = hide_input - self.hidden = hidden - - # Flags - if is_flag is None: - if flag_value is not None: - is_flag = True - else: - is_flag = bool(self.secondary_opts) - if is_flag and default_is_missing: - self.default = False - if flag_value is None: - flag_value = not self.default - self.is_flag = is_flag - self.flag_value = flag_value - if self.is_flag and isinstance(self.flag_value, bool) \ - and type is None: - self.type = BOOL - self.is_bool_flag = True - else: - self.is_bool_flag = False - - # Counting - self.count = count - if count: - if type is None: - self.type = IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.multiple = multiple - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - # Sanity check for stuff we don't support - if __debug__: - if self.nargs < 0: - raise TypeError('Options cannot have nargs < 0') - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError('Cannot prompt for flags that are not bools.') - if not self.is_bool_flag and self.secondary_opts: - raise TypeError('Got secondary option for non boolean flag.') - if self.is_bool_flag and self.hide_input \ - and self.prompt is not None: - raise TypeError('Hidden input does not work with boolean ' - 'flag prompts.') - if self.count: - if self.multiple: - raise TypeError('Options cannot be multiple and count ' - 'at the same time.') - elif self.is_flag: - raise TypeError('Options cannot be count and flags at ' - 'the same time.') - - def _parse_decls(self, decls, expose_value): - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if isidentifier(decl): - if name is not None: - raise TypeError('Name defined twice') - name = decl - else: - split_char = decl[:1] == '/' and ';' or '/' - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - else: - possible_names.append(split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace('-', '_').lower() - if not isidentifier(name): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError('Could not determine name for option') - - if not opts and not secondary_opts: - raise TypeError('No options defined but a name was passed (%s). ' - 'Did you mean to declare an argument instead ' - 'of an option?' % name) - - return name, opts, secondary_opts - - def add_to_parser(self, parser, ctx): - kwargs = { - 'dest': self.name, - 'nargs': self.nargs, - 'obj': self, - } - - if self.multiple: - action = 'append' - elif self.count: - action = 'count' - else: - action = 'store' - - if self.is_flag: - kwargs.pop('nargs', None) - if self.is_bool_flag and self.secondary_opts: - parser.add_option(self.opts, action=action + '_const', - const=True, **kwargs) - parser.add_option(self.secondary_opts, action=action + - '_const', const=False, **kwargs) - else: - parser.add_option(self.opts, action=action + '_const', - const=self.flag_value, - **kwargs) - else: - kwargs['action'] = action - parser.add_option(self.opts, **kwargs) - - def get_help_record(self, ctx): - if self.hidden: - return - any_prefix_is_slash = [] - - def _write_opts(opts): - rv, any_slashes = join_options(opts) - if any_slashes: - any_prefix_is_slash[:] = [True] - if not self.is_flag and not self.count: - rv += ' ' + self.make_metavar() - return rv - - rv = [_write_opts(self.opts)] - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or '' - extra = [] - if self.show_envvar: - envvar = self.envvar - if envvar is None: - if self.allow_from_autoenv and \ - ctx.auto_envvar_prefix is not None: - envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) - if envvar is not None: - extra.append('env var: %s' % ( - ', '.join('%s' % d for d in envvar) - if isinstance(envvar, (list, tuple)) - else envvar, )) - if self.default is not None and self.show_default: - if isinstance(self.show_default, string_types): - default_string = '({})'.format(self.show_default) - elif isinstance(self.default, (list, tuple)): - default_string = ', '.join('%s' % d for d in self.default) - elif inspect.isfunction(self.default): - default_string = "(dynamic)" - else: - default_string = self.default - extra.append('default: {}'.format(default_string)) - - if self.required: - extra.append('required') - if extra: - help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra)) - - return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help) - - def get_default(self, ctx): - # If we're a non boolean flag out default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return param.flag_value - return None - return Parameter.get_default(self, ctx) - - def prompt_for_value(self, ctx): - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - return prompt(self.prompt, default=default, type=self.type, - hide_input=self.hide_input, show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x)) - - def resolve_envvar_value(self, ctx): - rv = Parameter.resolve_envvar_value(self, ctx) - if rv is not None: - return rv - if self.allow_from_autoenv and \ - ctx.auto_envvar_prefix is not None: - envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper()) - return os.environ.get(envvar) - - def value_from_envvar(self, ctx): - rv = self.resolve_envvar_value(ctx) - if rv is None: - return None - value_depth = (self.nargs != 1) + bool(self.multiple) - if value_depth > 0 and rv is not None: - rv = self.type.split_envvar_value(rv) - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - return rv - - def full_process_value(self, ctx, value): - if value is None and self.prompt is not None \ - and not ctx.resilient_parsing: - return self.prompt_for_value(ctx) - return Parameter.full_process_value(self, ctx, value) - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the parameter constructor. - """ - param_type_name = 'argument' - - def __init__(self, param_decls, required=None, **attrs): - if required is None: - if attrs.get('default') is not None: - required = False - else: - required = attrs.get('nargs', 1) > 0 - Parameter.__init__(self, param_decls, required=required, **attrs) - if self.default is not None and self.nargs < 0: - raise TypeError('nargs=-1 in combination with a default value ' - 'is not supported.') - - @property - def human_readable_name(self): - if self.metavar is not None: - return self.metavar - return self.name.upper() - - def make_metavar(self): - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(self) - if not var: - var = self.name.upper() - if not self.required: - var = '[%s]' % var - if self.nargs != 1: - var += '...' - return var - - def _parse_decls(self, decls, expose_value): - if not decls: - if not expose_value: - return None, [], [] - raise TypeError('Could not determine name for argument') - if len(decls) == 1: - name = arg = decls[0] - name = name.replace('-', '_').lower() - else: - raise TypeError('Arguments take exactly one ' - 'parameter declaration, got %d' % len(decls)) - return name, [arg], [] - - def get_usage_pieces(self, ctx): - return [self.make_metavar()] - - def get_error_hint(self, ctx): - return '"%s"' % self.make_metavar() - - def add_to_parser(self, parser, ctx): - parser.add_argument(dest=self.name, nargs=self.nargs, - obj=self) - - -# Circular dependency between decorators and core -from .decorators import command, group diff --git a/venv/lib/python3.6/site-packages/click/decorators.py b/venv/lib/python3.6/site-packages/click/decorators.py deleted file mode 100644 index c57c530..0000000 --- a/venv/lib/python3.6/site-packages/click/decorators.py +++ /dev/null @@ -1,311 +0,0 @@ -import sys -import inspect - -from functools import update_wrapper - -from ._compat import iteritems -from ._unicodefun import _check_for_unicode_literals -from .utils import echo -from .globals import get_current_context - - -def pass_context(f): - """Marks a callback as wanting to receive the current context - object as first argument. - """ - def new_func(*args, **kwargs): - return f(get_current_context(), *args, **kwargs) - return update_wrapper(new_func, f) - - -def pass_obj(f): - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - def new_func(*args, **kwargs): - return f(get_current_context().obj, *args, **kwargs) - return update_wrapper(new_func, f) - - -def make_pass_decorator(object_type, ensure=False): - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - def decorator(f): - def new_func(*args, **kwargs): - ctx = get_current_context() - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - if obj is None: - raise RuntimeError('Managed to invoke callback without a ' - 'context object of type %r existing' - % object_type.__name__) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - -def _make_command(f, name, attrs, cls): - if isinstance(f, Command): - raise TypeError('Attempted to convert a callback into a ' - 'command twice.') - try: - params = f.__click_params__ - params.reverse() - del f.__click_params__ - except AttributeError: - params = [] - help = attrs.get('help') - if help is None: - help = inspect.getdoc(f) - if isinstance(help, bytes): - help = help.decode('utf-8') - else: - help = inspect.cleandoc(help) - attrs['help'] = help - _check_for_unicode_literals() - return cls(name=name or f.__name__.lower().replace('_', '-'), - callback=f, params=params, **attrs) - - -def command(name=None, cls=None, **attrs): - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function. If you - want to change that, you can pass the intended name as the first - argument. - - All keyword arguments are forwarded to the underlying command class. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: the name of the command. This defaults to the function - name with underscores replaced by dashes. - :param cls: the command class to instantiate. This defaults to - :class:`Command`. - """ - if cls is None: - cls = Command - def decorator(f): - cmd = _make_command(f, name, attrs, cls) - cmd.__doc__ = f.__doc__ - return cmd - return decorator - - -def group(name=None, **attrs): - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - """ - attrs.setdefault('cls', Group) - return command(name, **attrs) - - -def _param_memo(f, param): - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, '__click_params__'): - f.__click_params__ = [] - f.__click_params__.append(param) - - -def argument(*param_decls, **attrs): - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - """ - def decorator(f): - ArgumentClass = attrs.pop('cls', Argument) - _param_memo(f, ArgumentClass(param_decls, **attrs)) - return f - return decorator - - -def option(*param_decls, **attrs): - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - """ - def decorator(f): - # Issue 926, copy attrs, so pre-defined options can re-use the same cls= - option_attrs = attrs.copy() - - if 'help' in option_attrs: - option_attrs['help'] = inspect.cleandoc(option_attrs['help']) - OptionClass = option_attrs.pop('cls', Option) - _param_memo(f, OptionClass(param_decls, **option_attrs)) - return f - return decorator - - -def confirmation_option(*param_decls, **attrs): - """Shortcut for confirmation prompts that can be ignored by passing - ``--yes`` as parameter. - - This is equivalent to decorating a function with :func:`option` with - the following parameters:: - - def callback(ctx, param, value): - if not value: - ctx.abort() - - @click.command() - @click.option('--yes', is_flag=True, callback=callback, - expose_value=False, prompt='Do you want to continue?') - def dropdb(): - pass - """ - def decorator(f): - def callback(ctx, param, value): - if not value: - ctx.abort() - attrs.setdefault('is_flag', True) - attrs.setdefault('callback', callback) - attrs.setdefault('expose_value', False) - attrs.setdefault('prompt', 'Do you want to continue?') - attrs.setdefault('help', 'Confirm the action without prompting.') - return option(*(param_decls or ('--yes',)), **attrs)(f) - return decorator - - -def password_option(*param_decls, **attrs): - """Shortcut for password prompts. - - This is equivalent to decorating a function with :func:`option` with - the following parameters:: - - @click.command() - @click.option('--password', prompt=True, confirmation_prompt=True, - hide_input=True) - def changeadmin(password): - pass - """ - def decorator(f): - attrs.setdefault('prompt', True) - attrs.setdefault('confirmation_prompt', True) - attrs.setdefault('hide_input', True) - return option(*(param_decls or ('--password',)), **attrs)(f) - return decorator - - -def version_option(version=None, *param_decls, **attrs): - """Adds a ``--version`` option which immediately ends the program - printing out the version number. This is implemented as an eager - option that prints the version and exits the program in the callback. - - :param version: the version number to show. If not provided Click - attempts an auto discovery via setuptools. - :param prog_name: the name of the program (defaults to autodetection) - :param message: custom message to show instead of the default - (``'%(prog)s, version %(version)s'``) - :param others: everything else is forwarded to :func:`option`. - """ - if version is None: - if hasattr(sys, '_getframe'): - module = sys._getframe(1).f_globals.get('__name__') - else: - module = '' - - def decorator(f): - prog_name = attrs.pop('prog_name', None) - message = attrs.pop('message', '%(prog)s, version %(version)s') - - def callback(ctx, param, value): - if not value or ctx.resilient_parsing: - return - prog = prog_name - if prog is None: - prog = ctx.find_root().info_name - ver = version - if ver is None: - try: - import pkg_resources - except ImportError: - pass - else: - for dist in pkg_resources.working_set: - scripts = dist.get_entry_map().get('console_scripts') or {} - for script_name, entry_point in iteritems(scripts): - if entry_point.module_name == module: - ver = dist.version - break - if ver is None: - raise RuntimeError('Could not determine version') - echo(message % { - 'prog': prog, - 'version': ver, - }, color=ctx.color) - ctx.exit() - - attrs.setdefault('is_flag', True) - attrs.setdefault('expose_value', False) - attrs.setdefault('is_eager', True) - attrs.setdefault('help', 'Show the version and exit.') - attrs['callback'] = callback - return option(*(param_decls or ('--version',)), **attrs)(f) - return decorator - - -def help_option(*param_decls, **attrs): - """Adds a ``--help`` option which immediately ends the program - printing out the help page. This is usually unnecessary to add as - this is added by default to all commands unless suppressed. - - Like :func:`version_option`, this is implemented as eager option that - prints in the callback and exits. - - All arguments are forwarded to :func:`option`. - """ - def decorator(f): - def callback(ctx, param, value): - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - attrs.setdefault('is_flag', True) - attrs.setdefault('expose_value', False) - attrs.setdefault('help', 'Show this message and exit.') - attrs.setdefault('is_eager', True) - attrs['callback'] = callback - return option(*(param_decls or ('--help',)), **attrs)(f) - return decorator - - -# Circular dependencies between core and decorators -from .core import Command, Group, Argument, Option diff --git a/venv/lib/python3.6/site-packages/click/exceptions.py b/venv/lib/python3.6/site-packages/click/exceptions.py deleted file mode 100644 index 6fa1765..0000000 --- a/venv/lib/python3.6/site-packages/click/exceptions.py +++ /dev/null @@ -1,235 +0,0 @@ -from ._compat import PY2, filename_to_ui, get_text_stderr -from .utils import echo - - -def _join_param_hints(param_hint): - if isinstance(param_hint, (tuple, list)): - return ' / '.join('"%s"' % x for x in param_hint) - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception - exit_code = 1 - - def __init__(self, message): - ctor_msg = message - if PY2: - if ctor_msg is not None: - ctor_msg = ctor_msg.encode('utf-8') - Exception.__init__(self, ctor_msg) - self.message = message - - def format_message(self): - return self.message - - def __str__(self): - return self.message - - if PY2: - __unicode__ = __str__ - - def __str__(self): - return self.message.encode('utf-8') - - def show(self, file=None): - if file is None: - file = get_text_stderr() - echo('Error: %s' % self.format_message(), file=file) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - exit_code = 2 - - def __init__(self, message, ctx=None): - ClickException.__init__(self, message) - self.ctx = ctx - self.cmd = self.ctx and self.ctx.command or None - - def show(self, file=None): - if file is None: - file = get_text_stderr() - color = None - hint = '' - if (self.cmd is not None and - self.cmd.get_help_option(self.ctx) is not None): - hint = ('Try "%s %s" for help.\n' - % (self.ctx.command_path, self.ctx.help_option_names[0])) - if self.ctx is not None: - color = self.ctx.color - echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color) - echo('Error: %s' % self.format_message(), file=file, color=color) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__(self, message, ctx=None, param=None, - param_hint=None): - UsageError.__init__(self, message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self): - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) - else: - return 'Invalid value: %s' % self.message - param_hint = _join_param_hints(param_hint) - - return 'Invalid value for %s: %s' % (param_hint, self.message) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__(self, message=None, ctx=None, param=None, - param_hint=None, param_type=None): - BadParameter.__init__(self, message, ctx, param, param_hint) - self.param_type = param_type - - def format_message(self): - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) - else: - param_hint = None - param_hint = _join_param_hints(param_hint) - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message(self.param) - if msg_extra: - if msg: - msg += '. ' + msg_extra - else: - msg = msg_extra - - return 'Missing %s%s%s%s' % ( - param_type, - param_hint and ' %s' % param_hint or '', - msg and '. ' or '.', - msg or '', - ) - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__(self, option_name, message=None, possibilities=None, - ctx=None): - if message is None: - message = 'no such option: %s' % option_name - UsageError.__init__(self, message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self): - bits = [self.message] - if self.possibilities: - if len(self.possibilities) == 1: - bits.append('Did you mean %s?' % self.possibilities[0]) - else: - possibilities = sorted(self.possibilities) - bits.append('(Possible options: %s)' % ', '.join(possibilities)) - return ' '.join(bits) - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__(self, option_name, message, ctx=None): - UsageError.__init__(self, message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - def __init__(self, message, ctx=None): - UsageError.__init__(self, message, ctx) - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename, hint=None): - ui_filename = filename_to_ui(filename) - if hint is None: - hint = 'unknown error' - ClickException.__init__(self, hint) - self.ui_filename = ui_filename - self.filename = filename - - def format_message(self): - return 'Could not open file %s: %s' % (self.ui_filename, self.message) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - def __init__(self, code=0): - self.exit_code = code diff --git a/venv/lib/python3.6/site-packages/click/formatting.py b/venv/lib/python3.6/site-packages/click/formatting.py deleted file mode 100644 index a3d6a4d..0000000 --- a/venv/lib/python3.6/site-packages/click/formatting.py +++ /dev/null @@ -1,256 +0,0 @@ -from contextlib import contextmanager -from .termui import get_terminal_size -from .parser import split_opt -from ._compat import term_len - - -# Can force a width. This is used by the test system -FORCED_WIDTH = None - - -def measure_table(rows): - widths = {} - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows(rows, col_count): - for row in rows: - row = tuple(row) - yield row + ('',) * (col_count - len(row)) - - -def wrap_text(text, width=78, initial_indent='', subsequent_indent='', - preserve_paragraphs=False): - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - text = text.expandtabs() - wrapper = TextWrapper(width, initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False) - if not preserve_paragraphs: - return wrapper.fill(text) - - p = [] - buf = [] - indent = None - - def _flush_par(): - if not buf: - return - if buf[0].strip() == '\b': - p.append((indent or 0, True, '\n'.join(buf[1:]))) - else: - p.append((indent or 0, False, ' '.join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(' ' * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return '\n\n'.join(rv) - - -class HelpFormatter(object): - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__(self, indent_increment=2, width=None, max_width=None): - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(get_terminal_size()[0], max_width) - 2, 50) - self.width = width - self.current_indent = 0 - self.buffer = [] - - def write(self, string): - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self): - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self): - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage(self, prog, args='', prefix='Usage: '): - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: the prefix for the first line. - """ - usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog) - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = ' ' * term_len(usage_prefix) - self.write(wrap_text(args, text_width, - initial_indent=usage_prefix, - subsequent_indent=indent)) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write('\n') - indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4) - self.write(wrap_text(args, text_width, - initial_indent=indent, - subsequent_indent=indent)) - - self.write('\n') - - def write_heading(self, heading): - """Writes a heading into the buffer.""" - self.write('%*s%s:\n' % (self.current_indent, '', heading)) - - def write_paragraph(self): - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write('\n') - - def write_text(self, text): - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - text_width = max(self.width - self.current_indent, 11) - indent = ' ' * self.current_indent - self.write(wrap_text(text, text_width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True)) - self.write('\n') - - def write_dl(self, rows, col_max=30, col_spacing=2): - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError('Expected two columns for definition list') - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write('%*s%s' % (self.current_indent, '', first)) - if not second: - self.write('\n') - continue - if term_len(first) <= first_col - col_spacing: - self.write(' ' * (first_col - term_len(first))) - else: - self.write('\n') - self.write(' ' * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - lines = iter(wrap_text(second, text_width).splitlines()) - if lines: - self.write(next(lines) + '\n') - for line in lines: - self.write('%*s%s\n' % ( - first_col + self.current_indent, '', line)) - else: - self.write('\n') - - @contextmanager - def section(self, name): - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self): - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self): - """Returns the buffer contents.""" - return ''.join(self.buffer) - - -def join_options(options): - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - for opt in options: - prefix = split_opt(opt)[0] - if prefix == '/': - any_prefix_is_slash = True - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - - rv = ', '.join(x[1] for x in rv) - return rv, any_prefix_is_slash diff --git a/venv/lib/python3.6/site-packages/click/globals.py b/venv/lib/python3.6/site-packages/click/globals.py deleted file mode 100644 index 843b594..0000000 --- a/venv/lib/python3.6/site-packages/click/globals.py +++ /dev/null @@ -1,48 +0,0 @@ -from threading import local - - -_local = local() - - -def get_current_context(silent=False): - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: is set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return getattr(_local, 'stack')[-1] - except (AttributeError, IndexError): - if not silent: - raise RuntimeError('There is no active click context.') - - -def push_context(ctx): - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault('stack', []).append(ctx) - - -def pop_context(): - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color=None): - """"Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - ctx = get_current_context(silent=True) - if ctx is not None: - return ctx.color diff --git a/venv/lib/python3.6/site-packages/click/parser.py b/venv/lib/python3.6/site-packages/click/parser.py deleted file mode 100644 index 1c3ae9c..0000000 --- a/venv/lib/python3.6/site-packages/click/parser.py +++ /dev/null @@ -1,427 +0,0 @@ -# -*- coding: utf-8 -*- -""" -click.parser -~~~~~~~~~~~~ - -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. -""" - -import re -from collections import deque -from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \ - BadArgumentUsage - - -def _unpack_args(args, nargs_spec): - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv = [] - spos = None - - def _fetch(c): - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError('Cannot have two nargs < 0') - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1:] = reversed(rv[spos + 1:]) - - return tuple(rv), list(args) - - -def _error_opt_args(nargs, opt): - if nargs == 1: - raise BadOptionUsage(opt, '%s option requires an argument' % opt) - raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs)) - - -def split_opt(opt): - first = opt[:1] - if first.isalnum(): - return '', opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def normalize_opt(opt, ctx): - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = split_opt(opt) - return prefix + ctx.token_normalize_func(opt) - - -def split_arg_string(string): - """Given an argument string this attempts to split it into small parts.""" - rv = [] - for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" - r'|"([^"\\]*(?:\\.[^"\\]*)*)"' - r'|\S+)\s*', string, re.S): - arg = match.group().strip() - if arg[:1] == arg[-1:] and arg[:1] in '"\'': - arg = arg[1:-1].encode('ascii', 'backslashreplace') \ - .decode('unicode-escape') - try: - arg = type(string)(arg) - except UnicodeError: - pass - rv.append(arg) - return rv - - -class Option(object): - - def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None): - self._short_opts = [] - self._long_opts = [] - self.prefixes = set() - - for opt in opts: - prefix, value = split_opt(opt) - if not prefix: - raise ValueError('Invalid start character for option (%s)' - % opt) - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = 'store' - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self): - return self.action in ('store', 'append') - - def process(self, value, state): - if self.action == 'store': - state.opts[self.dest] = value - elif self.action == 'store_const': - state.opts[self.dest] = self.const - elif self.action == 'append': - state.opts.setdefault(self.dest, []).append(value) - elif self.action == 'append_const': - state.opts.setdefault(self.dest, []).append(self.const) - elif self.action == 'count': - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 - else: - raise ValueError('unknown action %r' % self.action) - state.order.append(self.obj) - - -class Argument(object): - - def __init__(self, dest, nargs=1, obj=None): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process(self, value, state): - if self.nargs > 1: - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage('argument %s takes %d values' - % (self.dest, self.nargs)) - state.opts[self.dest] = value - state.order.append(self.obj) - - -class ParsingState(object): - - def __init__(self, rargs): - self.opts = {} - self.largs = [] - self.rargs = rargs - self.order = [] - - -class OptionParser(object): - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - """ - - def __init__(self, ctx=None): - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options = False - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - self._short_opt = {} - self._long_opt = {} - self._opt_prefixes = set(['-', '--']) - self._args = [] - - def add_option(self, opts, dest, action=None, nargs=1, const=None, - obj=None): - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``appnd_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - if obj is None: - obj = dest - opts = [normalize_opt(opt, self.ctx) for opt in opts] - option = Option(opts, dest, action=action, nargs=nargs, - const=const, obj=obj) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument(self, dest, nargs=1, obj=None): - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - if obj is None: - obj = dest - self._args.append(Argument(dest=dest, nargs=nargs, obj=obj)) - - def parse_args(self, args): - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state): - pargs, args = _unpack_args(state.largs + state.rargs, - [x.nargs for x in self._args]) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state): - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == '--': - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt(self, opt, explicit_value, state): - if opt not in self._long_opt: - possibilities = [word for word in self._long_opt - if word.startswith(opt)] - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - nargs = option.nargs - if len(state.rargs) < nargs: - _error_opt_args(nargs, opt) - elif nargs == 1: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - elif explicit_value is not None: - raise BadOptionUsage(opt, '%s option does not take a value' % opt) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg, state): - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = normalize_opt(prefix + ch, self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - nargs = option.nargs - if len(state.rargs) < nargs: - _error_opt_args(nargs, opt) - elif nargs == 1: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we re-combinate the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(prefix + ''.join(unknown_options)) - - def _process_opts(self, arg, state): - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if '=' in arg: - long_opt, explicit_value = arg.split('=', 1) - else: - long_opt = arg - norm_long_opt = normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - return self._match_short_opt(arg, state) - if not self.ignore_unknown_options: - raise - state.largs.append(arg) diff --git a/venv/lib/python3.6/site-packages/click/termui.py b/venv/lib/python3.6/site-packages/click/termui.py deleted file mode 100644 index bf9a3aa..0000000 --- a/venv/lib/python3.6/site-packages/click/termui.py +++ /dev/null @@ -1,606 +0,0 @@ -import os -import sys -import struct -import inspect -import itertools - -from ._compat import raw_input, text_type, string_types, \ - isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN -from .utils import echo -from .exceptions import Abort, UsageError -from .types import convert_type, Choice, Path -from .globals import resolve_color_default - - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func = raw_input - -_ansi_colors = { - 'black': 30, - 'red': 31, - 'green': 32, - 'yellow': 33, - 'blue': 34, - 'magenta': 35, - 'cyan': 36, - 'white': 37, - 'reset': 39, - 'bright_black': 90, - 'bright_red': 91, - 'bright_green': 92, - 'bright_yellow': 93, - 'bright_blue': 94, - 'bright_magenta': 95, - 'bright_cyan': 96, - 'bright_white': 97, -} -_ansi_reset_all = '\033[0m' - - -def hidden_prompt_func(prompt): - import getpass - return getpass.getpass(prompt) - - -def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None): - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += ' (' + ", ".join(map(str, type.choices)) + ')' - if default is not None and show_default: - prompt = '%s [%s]' % (prompt, default) - return prompt + suffix - - -def prompt(text, default=None, hide_input=False, confirmation_prompt=False, - type=None, value_proc=None, prompt_suffix=': ', show_default=True, - err=False, show_choices=True): - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending a interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - .. versionadded:: 7.0 - Added the show_choices parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: asks for confirmation for the value. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - """ - result = None - - def prompt_func(text): - f = hide_input and hidden_prompt_func or visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text, nl=False, err=err) - return f('') - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type) - - while 1: - while 1: - value = prompt_func(prompt) - if value: - break - elif default is not None: - if isinstance(value_proc, Path): - # validate Path default value(exists, dir_okay etc.) - value = default - break - return default - try: - result = value_proc(value) - except UsageError as e: - echo('Error: %s' % e.message, err=err) - continue - if not confirmation_prompt: - return result - while 1: - value2 = prompt_func('Repeat for confirmation: ') - if value2: - break - if value == value2: - return result - echo('Error: the two entered values do not match', err=err) - - -def confirm(text, default=False, abort=False, prompt_suffix=': ', - show_default=True, err=False): - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param text: the question to ask. - :param default: the default for the prompt. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - prompt = _build_prompt(text, prompt_suffix, show_default, - default and 'Y/n' or 'y/N') - while 1: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt, nl=False, err=err) - value = visible_prompt_func('').lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() - if value in ('y', 'yes'): - rv = True - elif value in ('n', 'no'): - rv = False - elif value == '': - rv = default - else: - echo('Error: invalid input', err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def get_terminal_size(): - """Returns the current size of the terminal as tuple in the form - ``(width, height)`` in columns and rows. - """ - # If shutil has get_terminal_size() (Python 3.3 and later) use that - if sys.version_info >= (3, 3): - import shutil - shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None) - if shutil_get_terminal_size: - sz = shutil_get_terminal_size() - return sz.columns, sz.lines - - # We provide a sensible default for get_winterm_size() when being invoked - # inside a subprocess. Without this, it would not provide a useful input. - if get_winterm_size is not None: - size = get_winterm_size() - if size == (0, 0): - return (79, 24) - else: - return size - - def ioctl_gwinsz(fd): - try: - import fcntl - import termios - cr = struct.unpack( - 'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) - except Exception: - return - return cr - - cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) - if not cr: - try: - fd = os.open(os.ctermid(), os.O_RDONLY) - try: - cr = ioctl_gwinsz(fd) - finally: - os.close(fd) - except Exception: - pass - if not cr or not cr[0] or not cr[1]: - cr = (os.environ.get('LINES', 25), - os.environ.get('COLUMNS', DEFAULT_COLUMNS)) - return int(cr[1]), int(cr[0]) - - -def echo_via_pager(text_or_generator, color=None): - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = text_or_generator() - elif isinstance(text_or_generator, string_types): - i = [text_or_generator] - else: - i = iter(text_or_generator) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, string_types) else text_type(el) - for el in i) - - from ._termui_impl import pager - return pager(itertools.chain(text_generator, "\n"), color) - - -def progressbar(iterable=None, length=None, label=None, show_eta=True, - show_percent=None, show_pos=False, - item_show_func=None, fill_char='#', empty_char='-', - bar_template='%(label)s [%(bar)s] %(info)s', - info_sep=' ', width=36, file=None, color=None): - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already displayed. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `color` parameter. Added a `update` method to the - progressbar object. - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: a function called with the current item which - can return a string to show the current item - next to the progress bar. Note that the current - item can be `None`! - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: the file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - """ - from ._termui_impl import ProgressBar - color = resolve_color_default(color) - return ProgressBar(iterable=iterable, length=length, show_eta=show_eta, - show_percent=show_percent, show_pos=show_pos, - item_show_func=item_show_func, fill_char=fill_char, - empty_char=empty_char, bar_template=bar_template, - info_sep=info_sep, file=file, label=label, - width=width, color=color) - - -def clear(): - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - # If we're on Windows and we don't have colorama available, then we - # clear the screen by shelling out. Otherwise we can use an escape - # sequence. - if WIN: - os.system('cls') - else: - sys.stdout.write('\033[2J\033[1;1H') - - -def style(text, fg=None, bg=None, bold=None, dim=None, underline=None, - blink=None, reverse=None, reset=True): - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - .. versionadded:: 2.0 - - .. versionadded:: 7.0 - Added support for bright colors. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - """ - bits = [] - if fg: - try: - bits.append('\033[%dm' % (_ansi_colors[fg])) - except KeyError: - raise TypeError('Unknown color %r' % fg) - if bg: - try: - bits.append('\033[%dm' % (_ansi_colors[bg] + 10)) - except KeyError: - raise TypeError('Unknown color %r' % bg) - if bold is not None: - bits.append('\033[%dm' % (1 if bold else 22)) - if dim is not None: - bits.append('\033[%dm' % (2 if dim else 22)) - if underline is not None: - bits.append('\033[%dm' % (4 if underline else 24)) - if blink is not None: - bits.append('\033[%dm' % (5 if blink else 25)) - if reverse is not None: - bits.append('\033[%dm' % (7 if reverse else 27)) - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return ''.join(bits) - - -def unstyle(text): - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho(message=None, file=None, nl=True, err=False, color=None, **styles): - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - .. versionadded:: 2.0 - """ - if message is not None: - message = style(message, **styles) - return echo(message, file=file, nl=nl, err=err, color=color) - - -def edit(text=None, editor=None, env=None, require_save=True, - extension='.txt', filename=None): - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. - """ - from ._termui_impl import Editor - editor = Editor(editor=editor, env=env, require_save=require_save, - extension=extension) - if filename is None: - return editor.edit(text) - editor.edit_file(filename) - - -def launch(url, wait=False, locate=False): - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: waits for the program to stop. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar = None - - -def getchar(echo=False): - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - f = _getchar - if f is None: - from ._termui_impl import getchar as f - return f(echo) - - -def raw_terminal(): - from ._termui_impl import raw_terminal as f - return f() - - -def pause(info='Press any key to continue ...', err=False): - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: the info string to print before pausing. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/venv/lib/python3.6/site-packages/click/testing.py b/venv/lib/python3.6/site-packages/click/testing.py deleted file mode 100644 index 1b2924e..0000000 --- a/venv/lib/python3.6/site-packages/click/testing.py +++ /dev/null @@ -1,374 +0,0 @@ -import os -import sys -import shutil -import tempfile -import contextlib -import shlex - -from ._compat import iteritems, PY2, string_types - - -# If someone wants to vendor click, we want to ensure the -# correct package is discovered. Ideally we could use a -# relative import here but unfortunately Python does not -# support that. -clickpkg = sys.modules[__name__.rsplit('.', 1)[0]] - - -if PY2: - from cStringIO import StringIO -else: - import io - from ._compat import _find_binary_reader - - -class EchoingStdin(object): - - def __init__(self, input, output): - self._input = input - self._output = output - - def __getattr__(self, x): - return getattr(self._input, x) - - def _echo(self, rv): - self._output.write(rv) - return rv - - def read(self, n=-1): - return self._echo(self._input.read(n)) - - def readline(self, n=-1): - return self._echo(self._input.readline(n)) - - def readlines(self): - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self): - return iter(self._echo(x) for x in self._input) - - def __repr__(self): - return repr(self._input) - - -def make_input_stream(input, charset): - # Is already an input stream. - if hasattr(input, 'read'): - if PY2: - return input - rv = _find_binary_reader(input) - if rv is not None: - return rv - raise TypeError('Could not find binary reader for input stream.') - - if input is None: - input = b'' - elif not isinstance(input, bytes): - input = input.encode(charset) - if PY2: - return StringIO(input) - return io.BytesIO(input) - - -class Result(object): - """Holds the captured result of an invoked CLI script.""" - - def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code, - exception, exc_info=None): - #: The runner that created the result - self.runner = runner - #: The standard output as bytes. - self.stdout_bytes = stdout_bytes - #: The standard error as bytes, or False(y) if not available - self.stderr_bytes = stderr_bytes - #: The exit code as integer. - self.exit_code = exit_code - #: The exception that happened if one did. - self.exception = exception - #: The traceback - self.exc_info = exc_info - - @property - def output(self): - """The (standard) output as unicode string.""" - return self.stdout - - @property - def stdout(self): - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, 'replace') \ - .replace('\r\n', '\n') - - @property - def stderr(self): - """The standard error as unicode string.""" - if not self.stderr_bytes: - raise ValueError("stderr not separately captured") - return self.stderr_bytes.decode(self.runner.charset, 'replace') \ - .replace('\r\n', '\n') - - - def __repr__(self): - return '<%s %s>' % ( - type(self).__name__, - self.exception and repr(self.exception) or 'okay', - ) - - -class CliRunner(object): - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. This is - UTF-8 by default and should not be changed currently as - the reporting to Click only works in Python 2 properly. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from stdin writes - to stdout. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param mix_stderr: if this is set to `False`, then stdout and stderr are - preserved as independent streams. This is useful for - Unix-philosophy apps that have predictable stdout and - noisy stderr, such that each may be measured - independently - """ - - def __init__(self, charset=None, env=None, echo_stdin=False, - mix_stderr=True): - if charset is None: - charset = 'utf-8' - self.charset = charset - self.env = env or {} - self.echo_stdin = echo_stdin - self.mix_stderr = mix_stderr - - def get_default_prog_name(self, cli): - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or 'root' - - def make_env(self, overrides=None): - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation(self, input=None, env=None, color=False): - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up stdin with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - .. versionadded:: 4.0 - The ``color`` parameter was added. - - :param input: the input stream to put into sys.stdin. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - """ - input = make_input_stream(input, self.charset) - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = clickpkg.formatting.FORCED_WIDTH - clickpkg.formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - if PY2: - bytes_output = StringIO() - if self.echo_stdin: - input = EchoingStdin(input, bytes_output) - sys.stdout = bytes_output - if not self.mix_stderr: - bytes_error = StringIO() - sys.stderr = bytes_error - else: - bytes_output = io.BytesIO() - if self.echo_stdin: - input = EchoingStdin(input, bytes_output) - input = io.TextIOWrapper(input, encoding=self.charset) - sys.stdout = io.TextIOWrapper( - bytes_output, encoding=self.charset) - if not self.mix_stderr: - bytes_error = io.BytesIO() - sys.stderr = io.TextIOWrapper( - bytes_error, encoding=self.charset) - - if self.mix_stderr: - sys.stderr = sys.stdout - - sys.stdin = input - - def visible_input(prompt=None): - sys.stdout.write(prompt or '') - val = input.readline().rstrip('\r\n') - sys.stdout.write(val + '\n') - sys.stdout.flush() - return val - - def hidden_input(prompt=None): - sys.stdout.write((prompt or '') + '\n') - sys.stdout.flush() - return input.readline().rstrip('\r\n') - - def _getchar(echo): - char = sys.stdin.read(1) - if echo: - sys.stdout.write(char) - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi(stream=None, color=None): - if color is None: - return not default_color - return not color - - old_visible_prompt_func = clickpkg.termui.visible_prompt_func - old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func - old__getchar_func = clickpkg.termui._getchar - old_should_strip_ansi = clickpkg.utils.should_strip_ansi - clickpkg.termui.visible_prompt_func = visible_input - clickpkg.termui.hidden_prompt_func = hidden_input - clickpkg.termui._getchar = _getchar - clickpkg.utils.should_strip_ansi = should_strip_ansi - - old_env = {} - try: - for key, value in iteritems(env): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (bytes_output, not self.mix_stderr and bytes_error) - finally: - for key, value in iteritems(old_env): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - clickpkg.termui.visible_prompt_func = old_visible_prompt_func - clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func - clickpkg.termui._getchar = old__getchar_func - clickpkg.utils.should_strip_ansi = old_should_strip_ansi - clickpkg.formatting.FORCED_WIDTH = old_forced_width - - def invoke(self, cli, args=None, input=None, env=None, - catch_exceptions=True, color=False, mix_stderr=False, **extra): - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - .. versionadded:: 3.0 - The ``catch_exceptions`` parameter was added. - - .. versionchanged:: 3.0 - The result object now has an `exc_info` attribute with the - traceback if available. - - .. versionadded:: 4.0 - The ``color`` parameter was added. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - """ - exc_info = None - with self.isolation(input=input, env=env, color=color) as outstreams: - exception = None - exit_code = 0 - - if isinstance(args, string_types): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - exit_code = e.code - if exit_code is None: - exit_code = 0 - - if exit_code != 0: - exception = e - - if not isinstance(exit_code, int): - sys.stdout.write(str(exit_code)) - sys.stdout.write('\n') - exit_code = 1 - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - stdout = outstreams[0].getvalue() - stderr = outstreams[1] and outstreams[1].getvalue() - - return Result(runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - exit_code=exit_code, - exception=exception, - exc_info=exc_info) - - @contextlib.contextmanager - def isolated_filesystem(self): - """A context manager that creates a temporary folder and changes - the current working directory to it for isolated filesystem tests. - """ - cwd = os.getcwd() - t = tempfile.mkdtemp() - os.chdir(t) - try: - yield t - finally: - os.chdir(cwd) - try: - shutil.rmtree(t) - except (OSError, IOError): - pass diff --git a/venv/lib/python3.6/site-packages/click/types.py b/venv/lib/python3.6/site-packages/click/types.py deleted file mode 100644 index 1f88032..0000000 --- a/venv/lib/python3.6/site-packages/click/types.py +++ /dev/null @@ -1,668 +0,0 @@ -import os -import stat -from datetime import datetime - -from ._compat import open_stream, text_type, filename_to_ui, \ - get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2 -from .exceptions import BadParameter -from .utils import safecall, LazyFile - - -class ParamType(object): - """Helper for converting values through types. The following is - necessary for a valid type: - - * it needs a name - * it needs to pass through None unchanged - * it needs to convert from a string - * it needs to convert its result type through unchanged - (eg: needs to be idempotent) - * it needs to be able to deal with param and context being `None`. - This can be the case when the object is used with prompt - inputs. - """ - is_composite = False - - #: the descriptive name of this type - name = None - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter = None - - def __call__(self, value, param=None, ctx=None): - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param): - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param): - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert(self, value, param, ctx): - """Converts the value. This is not invoked for values that are - `None` (the missing value). - """ - return value - - def split_envvar_value(self, rv): - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or '').split(self.envvar_list_splitter) - - def fail(self, message, param=None, ctx=None): - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self): - raise NotImplementedError() - - -class FuncParamType(ParamType): - - def __init__(self, func): - self.name = func.__name__ - self.func = func - - def convert(self, value, param, ctx): - try: - return self.func(value) - except ValueError: - try: - value = text_type(value) - except UnicodeError: - value = str(value).decode('utf-8', 'replace') - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = 'text' - - def convert(self, value, param, ctx): - return value - - def __repr__(self): - return 'UNPROCESSED' - - -class StringParamType(ParamType): - name = 'text' - - def convert(self, value, param, ctx): - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = get_filesystem_encoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode('utf-8', 'replace') - return value - return value - - def __repr__(self): - return 'STRING' - - -class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set - of supported values. All of these values have to be strings. - - You should only pass a list or tuple of choices. Other iterables - (like generators) may lead to surprising results. - - See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - """ - - name = 'choice' - - def __init__(self, choices, case_sensitive=True): - self.choices = choices - self.case_sensitive = case_sensitive - - def get_metavar(self, param): - return '[%s]' % '|'.join(self.choices) - - def get_missing_message(self, param): - return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices) - - def convert(self, value, param, ctx): - # Exact match - if value in self.choices: - return value - - # Match through normalization and case sensitivity - # first do token_normalize_func, then lowercase - # preserve original `value` to produce an accurate message in - # `self.fail` - normed_value = value - normed_choices = self.choices - - if ctx is not None and \ - ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(value) - normed_choices = [ctx.token_normalize_func(choice) for choice in - self.choices] - - if not self.case_sensitive: - normed_value = normed_value.lower() - normed_choices = [choice.lower() for choice in normed_choices] - - if normed_value in normed_choices: - return normed_value - - self.fail('invalid choice: %s. (choose from %s)' % - (value, ', '.join(self.choices)), param, ctx) - - def __repr__(self): - return 'Choice(%r)' % list(self.choices) - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - name = 'datetime' - - def __init__(self, formats=None): - self.formats = formats or [ - '%Y-%m-%d', - '%Y-%m-%dT%H:%M:%S', - '%Y-%m-%d %H:%M:%S' - ] - - def get_metavar(self, param): - return '[{}]'.format('|'.join(self.formats)) - - def _try_to_convert_date(self, value, format): - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert(self, value, param, ctx): - # Exact match - for format in self.formats: - dtime = self._try_to_convert_date(value, format) - if dtime: - return dtime - - self.fail( - 'invalid datetime format: {}. (choose from {})'.format( - value, ', '.join(self.formats))) - - def __repr__(self): - return 'DateTime' - - -class IntParamType(ParamType): - name = 'integer' - - def convert(self, value, param, ctx): - try: - return int(value) - except (ValueError, UnicodeError): - self.fail('%s is not a valid integer' % value, param, ctx) - - def __repr__(self): - return 'INT' - - -class IntRange(IntParamType): - """A parameter that works similar to :data:`click.INT` but restricts - the value to fit into a range. The default behavior is to fail if the - value falls outside the range, but it can also be silently clamped - between the two edges. - - See :ref:`ranges` for an example. - """ - name = 'integer range' - - def __init__(self, min=None, max=None, clamp=False): - self.min = min - self.max = max - self.clamp = clamp - - def convert(self, value, param, ctx): - rv = IntParamType.convert(self, value, param, ctx) - if self.clamp: - if self.min is not None and rv < self.min: - return self.min - if self.max is not None and rv > self.max: - return self.max - if self.min is not None and rv < self.min or \ - self.max is not None and rv > self.max: - if self.min is None: - self.fail('%s is bigger than the maximum valid value ' - '%s.' % (rv, self.max), param, ctx) - elif self.max is None: - self.fail('%s is smaller than the minimum valid value ' - '%s.' % (rv, self.min), param, ctx) - else: - self.fail('%s is not in the valid range of %s to %s.' - % (rv, self.min, self.max), param, ctx) - return rv - - def __repr__(self): - return 'IntRange(%r, %r)' % (self.min, self.max) - - -class FloatParamType(ParamType): - name = 'float' - - def convert(self, value, param, ctx): - try: - return float(value) - except (UnicodeError, ValueError): - self.fail('%s is not a valid floating point value' % - value, param, ctx) - - def __repr__(self): - return 'FLOAT' - - -class FloatRange(FloatParamType): - """A parameter that works similar to :data:`click.FLOAT` but restricts - the value to fit into a range. The default behavior is to fail if the - value falls outside the range, but it can also be silently clamped - between the two edges. - - See :ref:`ranges` for an example. - """ - name = 'float range' - - def __init__(self, min=None, max=None, clamp=False): - self.min = min - self.max = max - self.clamp = clamp - - def convert(self, value, param, ctx): - rv = FloatParamType.convert(self, value, param, ctx) - if self.clamp: - if self.min is not None and rv < self.min: - return self.min - if self.max is not None and rv > self.max: - return self.max - if self.min is not None and rv < self.min or \ - self.max is not None and rv > self.max: - if self.min is None: - self.fail('%s is bigger than the maximum valid value ' - '%s.' % (rv, self.max), param, ctx) - elif self.max is None: - self.fail('%s is smaller than the minimum valid value ' - '%s.' % (rv, self.min), param, ctx) - else: - self.fail('%s is not in the valid range of %s to %s.' - % (rv, self.min, self.max), param, ctx) - return rv - - def __repr__(self): - return 'FloatRange(%r, %r)' % (self.min, self.max) - - -class BoolParamType(ParamType): - name = 'boolean' - - def convert(self, value, param, ctx): - if isinstance(value, bool): - return bool(value) - value = value.lower() - if value in ('true', 't', '1', 'yes', 'y'): - return True - elif value in ('false', 'f', '0', 'no', 'n'): - return False - self.fail('%s is not a valid boolean' % value, param, ctx) - - def __repr__(self): - return 'BOOL' - - -class UUIDParameterType(ParamType): - name = 'uuid' - - def convert(self, value, param, ctx): - import uuid - try: - if PY2 and isinstance(value, text_type): - value = value.encode('ascii') - return uuid.UUID(value) - except (UnicodeError, ValueError): - self.fail('%s is not a valid UUID value' % value, param, ctx) - - def __repr__(self): - return 'UUID' - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Starting with Click 2.0, files can also be opened atomically in which - case all writes go into a separate file in the same folder and upon - completion the file will be moved over to the original location. This - is useful if a file regularly read by other users is modified. - - See :ref:`file-args` for more information. - """ - name = 'filename' - envvar_list_splitter = os.path.pathsep - - def __init__(self, mode='r', encoding=None, errors='strict', lazy=None, - atomic=False): - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def resolve_lazy_flag(self, value): - if self.lazy is not None: - return self.lazy - if value == '-': - return False - elif 'w' in self.mode: - return True - return False - - def convert(self, value, param, ctx): - try: - if hasattr(value, 'read') or hasattr(value, 'write'): - return value - - lazy = self.resolve_lazy_flag(value) - - if lazy: - f = LazyFile(value, self.mode, self.encoding, self.errors, - atomic=self.atomic) - if ctx is not None: - ctx.call_on_close(f.close_intelligently) - return f - - f, should_close = open_stream(value, self.mode, - self.encoding, self.errors, - atomic=self.atomic) - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - return f - except (IOError, OSError) as e: - self.fail('Could not open file: %s: %s' % ( - filename_to_ui(value), - get_streerror(e), - ), param, ctx) - - -class Path(ParamType): - """The path type is similar to the :class:`File` type but it performs - different checks. First of all, instead of returning an open file - handle it returns just the filename. Secondly, it can perform various - basic checks about what the file or directory should be. - - .. versionchanged:: 6.0 - `allow_dash` was added. - - :param exists: if set to true, the file or directory needs to exist for - this value to be valid. If this is not required and a - file does indeed not exist, then all further checks are - silently skipped. - :param file_okay: controls if a file is a possible value. - :param dir_okay: controls if a directory is a possible value. - :param writable: if true, a writable check is performed. - :param readable: if true, a readable check is performed. - :param resolve_path: if this is true, then the path is fully resolved - before the value is passed onwards. This means - that it's absolute and symlinks are resolved. It - will not expand a tilde-prefix, as this is - supposed to be done by the shell only. - :param allow_dash: If this is set to `True`, a single dash to indicate - standard streams is permitted. - :param path_type: optionally a string type that should be used to - represent the path. The default is `None` which - means the return value will be either bytes or - unicode depending on what makes most sense given the - input data Click deals with. - """ - envvar_list_splitter = os.path.pathsep - - def __init__(self, exists=False, file_okay=True, dir_okay=True, - writable=False, readable=True, resolve_path=False, - allow_dash=False, path_type=None): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.writable = writable - self.readable = readable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name = 'file' - self.path_type = 'File' - elif self.dir_okay and not self.file_okay: - self.name = 'directory' - self.path_type = 'Directory' - else: - self.name = 'path' - self.path_type = 'Path' - - def coerce_path_result(self, rv): - if self.type is not None and not isinstance(rv, self.type): - if self.type is text_type: - rv = rv.decode(get_filesystem_encoding()) - else: - rv = rv.encode(get_filesystem_encoding()) - return rv - - def convert(self, value, param, ctx): - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-') - - if not is_dash: - if self.resolve_path: - rv = os.path.realpath(rv) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail('%s "%s" does not exist.' % ( - self.path_type, - filename_to_ui(value) - ), param, ctx) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail('%s "%s" is a file.' % ( - self.path_type, - filename_to_ui(value) - ), param, ctx) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail('%s "%s" is a directory.' % ( - self.path_type, - filename_to_ui(value) - ), param, ctx) - if self.writable and not os.access(value, os.W_OK): - self.fail('%s "%s" is not writable.' % ( - self.path_type, - filename_to_ui(value) - ), param, ctx) - if self.readable and not os.access(value, os.R_OK): - self.fail('%s "%s" is not readable.' % ( - self.path_type, - filename_to_ui(value) - ), param, ctx) - - return self.coerce_path_result(rv) - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types): - self.types = [convert_type(ty) for ty in types] - - @property - def name(self): - return "<" + " ".join(ty.name for ty in self.types) + ">" - - @property - def arity(self): - return len(self.types) - - def convert(self, value, param, ctx): - if len(value) != len(self.types): - raise TypeError('It would appear that nargs is set to conflict ' - 'with the composite type arity.') - return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) - - -def convert_type(ty, default=None): - """Converts a callable or python ty into the most appropriate param - ty. - """ - guessed_type = False - if ty is None and default is not None: - if isinstance(default, tuple): - ty = tuple(map(type, default)) - else: - ty = type(default) - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - if isinstance(ty, ParamType): - return ty - if ty is text_type or ty is str or ty is None: - return STRING - if ty is int: - return INT - # Booleans are only okay if not guessed. This is done because for - # flags the default value is actually a bit of a lie in that it - # indicates which of the flags is the one we want. See get_default() - # for more information. - if ty is bool and not guessed_type: - return BOOL - if ty is float: - return FLOAT - if guessed_type: - return STRING - - # Catch a common mistake - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError('Attempted to use an uninstantiated ' - 'parameter type (%s).' % ty) - except TypeError: - pass - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but internally -#: no string conversion takes place. This is necessary to achieve the -#: same bytes/unicode behavior on Python 2/3 in situations where you want -#: to not convert argument types. This is usually useful when working -#: with file paths as they can appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() diff --git a/venv/lib/python3.6/site-packages/click/utils.py b/venv/lib/python3.6/site-packages/click/utils.py deleted file mode 100644 index fc84369..0000000 --- a/venv/lib/python3.6/site-packages/click/utils.py +++ /dev/null @@ -1,440 +0,0 @@ -import os -import sys - -from .globals import resolve_color_default - -from ._compat import text_type, open_stream, get_filesystem_encoding, \ - get_streerror, string_types, PY2, binary_streams, text_streams, \ - filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \ - _default_text_stdout, _default_text_stderr, is_bytes, WIN - -if not PY2: - from ._compat import _find_binary_writer -elif WIN: - from ._winconsole import _get_windows_argv, \ - _hash_py_argv, _initial_argv_hash - - -echo_native_types = string_types + (bytes, bytearray) - - -def _posixify(name): - return '-'.join(name.split()).lower() - - -def safecall(func): - """Wraps a function so that it swallows exceptions.""" - def wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - except Exception: - pass - return wrapper - - -def make_str(value): - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(get_filesystem_encoding()) - except UnicodeError: - return value.decode('utf-8', 'replace') - return text_type(value) - - -def make_default_short_help(help, max_length=45): - """Return a condensed version of help string.""" - words = help.split() - total_length = 0 - result = [] - done = False - - for word in words: - if word[-1:] == '.': - done = True - new_length = result and 1 + len(word) or len(word) - if total_length + new_length > max_length: - result.append('...') - done = True - else: - if result: - result.append(' ') - result.append(word) - if done: - break - total_length += new_length - - return ''.join(result) - - -class LazyFile(object): - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__(self, filename, mode='r', encoding=None, errors='strict', - atomic=False): - self.name = filename - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - - if filename == '-': - self._f, self.should_close = open_stream(filename, mode, - encoding, errors) - else: - if 'r' in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name): - return getattr(self.open(), name) - - def __repr__(self): - if self._f is not None: - return repr(self._f) - return '' % (self.name, self.mode) - - def open(self): - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream(self.name, self.mode, - self.encoding, - self.errors, - atomic=self.atomic) - except (IOError, OSError) as e: - from .exceptions import FileError - raise FileError(self.name, hint=get_streerror(e)) - self._f = rv - return rv - - def close(self): - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self): - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - self.close_intelligently() - - def __iter__(self): - self.open() - return iter(self._f) - - -class KeepOpenFile(object): - - def __init__(self, file): - self._file = file - - def __getattr__(self, name): - return getattr(self._file, name) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - pass - - def __repr__(self): - return repr(self._file) - - def __iter__(self): - return iter(self._file) - - -def echo(message=None, file=None, nl=True, err=False, color=None): - """Prints a message plus a newline to the given file or stdout. On - first sight, this looks like the print function, but it has improved - support for handling Unicode and binary data that does not fail no - matter how badly configured the system is. - - Primarily it means that you can print binary data as well as Unicode - data on both 2.x and 3.x to the given file in the most appropriate way - possible. This is a very carefree function in that it will try its - best to not fail. As of Click 6.0 this includes support for unicode - output on the Windows console. - - In addition to that, if `colorama`_ is installed, the echo function will - also support clever handling of ANSI codes. Essentially it will then - do the following: - - - add transparent handling of ANSI color codes on Windows. - - hide ANSI codes automatically if the destination file is not a - terminal. - - .. _colorama: https://pypi.org/project/colorama/ - - .. versionchanged:: 6.0 - As of Click 6.0 the echo function will properly support unicode - output on the windows console. Not that click does not modify - the interpreter in any way which means that `sys.stdout` or the - print statement or function will still not provide unicode support. - - .. versionchanged:: 2.0 - Starting with version 2.0 of Click, the echo function will work - with colorama if it's installed. - - .. versionadded:: 3.0 - The `err` parameter was added. - - .. versionchanged:: 4.0 - Added the `color` flag. - - :param message: the message to print - :param file: the file to write to (defaults to ``stdout``) - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``. This is faster and easier than calling - :func:`get_text_stderr` yourself. - :param nl: if set to `True` (the default) a newline is printed afterwards. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, echo_native_types): - message = text_type(message) - - if nl: - message = message or u'' - if isinstance(message, text_type): - message += u'\n' - else: - message += b'\n' - - # If there is a message, and we're in Python 3, and the value looks - # like bytes, we manually need to find the binary stream and write the - # message in there. This is done separately so that most stream - # types will work as you would expect. Eg: you can write to StringIO - # for other cases. - if message and not PY2 and is_bytes(message): - binary_file = _find_binary_writer(file) - if binary_file is not None: - file.flush() - binary_file.write(message) - binary_file.flush() - return - - # ANSI-style support. If there is no message or we are dealing with - # bytes nothing is happening. If we are connected to a file we want - # to strip colors. If we are on windows we either wrap the stream - # to strip the color or we use the colorama support to translate the - # ansi codes to API calls. - if message and not is_bytes(message): - color = resolve_color_default(color) - if should_strip_ansi(file, color): - message = strip_ansi(message) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file) - elif not color: - message = strip_ansi(message) - - if message: - file.write(message) - file.flush() - - -def get_binary_stream(name): - """Returns a system stream for byte processing. This essentially - returns the stream from the sys module with the given name but it - solves some compatibility issues between different Python versions. - Primarily this function is necessary for getting binary streams on - Python 3. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError('Unknown standard stream %r' % name) - return opener() - - -def get_text_stream(name, encoding=None, errors='strict'): - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts on Python 3 - for already correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError('Unknown standard stream %r' % name) - return opener(encoding, errors) - - -def open_file(filename, mode='r', encoding=None, errors='strict', - lazy=False, atomic=False): - """This is similar to how the :class:`File` works but for manual - usage. Files are opened non lazy by default. This can open regular - files as well as stdin/stdout if ``'-'`` is passed. - - If stdin/stdout is returned the stream is wrapped so that the context - manager will not close the stream accidentally. This makes it possible - to always use the function like this without having to worry to - accidentally close a standard stream:: - - with open_file(filename) as f: - ... - - .. versionadded:: 3.0 - - :param filename: the name of the file to open (or ``'-'`` for stdin/stdout). - :param mode: the mode in which to open the file. - :param encoding: the encoding to use. - :param errors: the error handling for this file. - :param lazy: can be flipped to true to open the file lazily. - :param atomic: in atomic mode writes go into a temporary file and it's - moved on close. - """ - if lazy: - return LazyFile(filename, mode, encoding, errors, atomic=atomic) - f, should_close = open_stream(filename, mode, encoding, errors, - atomic=atomic) - if not should_close: - f = KeepOpenFile(f) - return f - - -def get_os_args(): - """This returns the argument part of sys.argv in the most appropriate - form for processing. What this means is that this return value is in - a format that works for Click to process but does not necessarily - correspond well to what's actually standard for the interpreter. - - On most environments the return value is ``sys.argv[:1]`` unchanged. - However if you are on Windows and running Python 2 the return value - will actually be a list of unicode strings instead because the - default behavior on that platform otherwise will not be able to - carry all possible values that sys.argv can have. - - .. versionadded:: 6.0 - """ - # We can only extract the unicode argv if sys.argv has not been - # changed since the startup of the application. - if PY2 and WIN and _initial_argv_hash == _hash_py_argv(): - return _get_windows_argv() - return sys.argv[1:] - - -def format_filename(filename, shorten=False): - """Formats a filename for user display. The main purpose of this - function is to ensure that the filename can be displayed at all. This - will decode the filename to unicode if necessary in a way that it will - not fail. Optionally, it can shorten the filename to not include the - full path to the filename. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - return filename_to_ui(filename) - - -def get_app_dir(app_name, roaming=True, force_posix=False): - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Win XP (roaming): - ``C:\Documents and Settings\\Local Settings\Application Data\Foo Bar`` - Win XP (not roaming): - ``C:\Documents and Settings\\Application Data\Foo Bar`` - Win 7 (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Win 7 (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no affect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = roaming and 'APPDATA' or 'LOCALAPPDATA' - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser('~') - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) - if sys.platform == 'darwin': - return os.path.join(os.path.expanduser( - '~/Library/Application Support'), app_name) - return os.path.join( - os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), - _posixify(app_name)) - - -class PacifyFlushWrapper(object): - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped): - self.wrapped = wrapped - - def flush(self): - try: - self.wrapped.flush() - except IOError as e: - import errno - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr): - return getattr(self.wrapped, attr) diff --git a/venv/lib/python3.6/site-packages/dateutil/__init__.py b/venv/lib/python3.6/site-packages/dateutil/__init__.py deleted file mode 100644 index 0defb82..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -try: - from ._version import version as __version__ -except ImportError: - __version__ = 'unknown' - -__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', - 'utils', 'zoneinfo'] diff --git a/venv/lib/python3.6/site-packages/dateutil/_common.py b/venv/lib/python3.6/site-packages/dateutil/_common.py deleted file mode 100644 index 4eb2659..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/_common.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Common code used in multiple modules. -""" - - -class weekday(object): - __slots__ = ["weekday", "n"] - - def __init__(self, weekday, n=None): - self.weekday = weekday - self.n = n - - def __call__(self, n): - if n == self.n: - return self - else: - return self.__class__(self.weekday, n) - - def __eq__(self, other): - try: - if self.weekday != other.weekday or self.n != other.n: - return False - except AttributeError: - return False - return True - - def __hash__(self): - return hash(( - self.weekday, - self.n, - )) - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] - if not self.n: - return s - else: - return "%s(%+d)" % (s, self.n) - -# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.6/site-packages/dateutil/_version.py b/venv/lib/python3.6/site-packages/dateutil/_version.py deleted file mode 100644 index 670d7ab..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# coding: utf-8 -# file generated by setuptools_scm -# don't change, don't track in version control -version = '2.8.0' diff --git a/venv/lib/python3.6/site-packages/dateutil/easter.py b/venv/lib/python3.6/site-packages/dateutil/easter.py deleted file mode 100644 index 53b7c78..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/easter.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic easter computing method for any given year, using -Western, Orthodox or Julian algorithms. -""" - -import datetime - -__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] - -EASTER_JULIAN = 1 -EASTER_ORTHODOX = 2 -EASTER_WESTERN = 3 - - -def easter(year, method=EASTER_WESTERN): - """ - This method was ported from the work done by GM Arts, - on top of the algorithm by Claus Tondering, which was - based in part on the algorithm of Ouding (1940), as - quoted in "Explanatory Supplement to the Astronomical - Almanac", P. Kenneth Seidelmann, editor. - - This algorithm implements three different easter - calculation methods: - - 1 - Original calculation in Julian calendar, valid in - dates after 326 AD - 2 - Original method, with date converted to Gregorian - calendar, valid in years 1583 to 4099 - 3 - Revised method, in Gregorian calendar, valid in - years 1583 to 4099 as well - - These methods are represented by the constants: - - * ``EASTER_JULIAN = 1`` - * ``EASTER_ORTHODOX = 2`` - * ``EASTER_WESTERN = 3`` - - The default method is method 3. - - More about the algorithm may be found at: - - `GM Arts: Easter Algorithms `_ - - and - - `The Calendar FAQ: Easter `_ - - """ - - if not (1 <= method <= 3): - raise ValueError("invalid method") - - # g - Golden year - 1 - # c - Century - # h - (23 - Epact) mod 30 - # i - Number of days from March 21 to Paschal Full Moon - # j - Weekday for PFM (0=Sunday, etc) - # p - Number of days from March 21 to Sunday on or before PFM - # (-6 to 28 methods 1 & 3, to 56 for method 2) - # e - Extra days to add for method 2 (converting Julian - # date to Gregorian date) - - y = year - g = y % 19 - e = 0 - if method < 3: - # Old method - i = (19*g + 15) % 30 - j = (y + y//4 + i) % 7 - if method == 2: - # Extra dates to convert Julian to Gregorian date - e = 10 - if y > 1600: - e = e + y//100 - 16 - (y//100 - 16)//4 - else: - # New method - c = y//100 - h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 - i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) - j = (y + y//4 + i + 2 - c + c//4) % 7 - - # p can be from -6 to 56 corresponding to dates 22 March to 23 May - # (later dates apply to method 2, although 23 May never actually occurs) - p = i - j + e - d = 1 + (p + 27 + (p + 6)//40) % 31 - m = 3 + (p + 26)//30 - return datetime.date(int(y), int(m), int(d)) diff --git a/venv/lib/python3.6/site-packages/dateutil/parser/__init__.py b/venv/lib/python3.6/site-packages/dateutil/parser/__init__.py deleted file mode 100644 index 216762c..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/parser/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -from ._parser import parse, parser, parserinfo -from ._parser import DEFAULTPARSER, DEFAULTTZPARSER -from ._parser import UnknownTimezoneWarning - -from ._parser import __doc__ - -from .isoparser import isoparser, isoparse - -__all__ = ['parse', 'parser', 'parserinfo', - 'isoparse', 'isoparser', - 'UnknownTimezoneWarning'] - - -### -# Deprecate portions of the private interface so that downstream code that -# is improperly relying on it is given *some* notice. - - -def __deprecated_private_func(f): - from functools import wraps - import warnings - - msg = ('{name} is a private function and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=f.__name__) - - @wraps(f) - def deprecated_func(*args, **kwargs): - warnings.warn(msg, DeprecationWarning) - return f(*args, **kwargs) - - return deprecated_func - -def __deprecate_private_class(c): - import warnings - - msg = ('{name} is a private class and may break without warning, ' - 'it will be moved and or renamed in future versions.') - msg = msg.format(name=c.__name__) - - class private_class(c): - __doc__ = c.__doc__ - - def __init__(self, *args, **kwargs): - warnings.warn(msg, DeprecationWarning) - super(private_class, self).__init__(*args, **kwargs) - - private_class.__name__ = c.__name__ - - return private_class - - -from ._parser import _timelex, _resultbase -from ._parser import _tzparser, _parsetz - -_timelex = __deprecate_private_class(_timelex) -_tzparser = __deprecate_private_class(_tzparser) -_resultbase = __deprecate_private_class(_resultbase) -_parsetz = __deprecated_private_func(_parsetz) diff --git a/venv/lib/python3.6/site-packages/dateutil/parser/_parser.py b/venv/lib/python3.6/site-packages/dateutil/parser/_parser.py deleted file mode 100644 index 0da0f3e..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/parser/_parser.py +++ /dev/null @@ -1,1580 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a generic date/time string parser which is able to parse -most known formats to represent a date and/or time. - -This module attempts to be forgiving with regards to unlikely input formats, -returning a datetime object even for dates which are ambiguous. If an element -of a date/time stamp is omitted, the following rules are applied: - -- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour - on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is - specified. -- If a time zone is omitted, a timezone-naive datetime is returned. - -If any other elements are missing, they are taken from the -:class:`datetime.datetime` object passed to the parameter ``default``. If this -results in a day number exceeding the valid number of days per month, the -value falls back to the end of the month. - -Additional resources about date/time string formats can be found below: - -- `A summary of the international standard date and time notation - `_ -- `W3C Date and Time Formats `_ -- `Time Formats (Planetary Rings Node) `_ -- `CPAN ParseDate module - `_ -- `Java SimpleDateFormat Class - `_ -""" -from __future__ import unicode_literals - -import datetime -import re -import string -import time -import warnings - -from calendar import monthrange -from io import StringIO - -import six -from six import integer_types, text_type - -from decimal import Decimal - -from warnings import warn - -from .. import relativedelta -from .. import tz - -__all__ = ["parse", "parserinfo"] - - -# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth -# making public and/or figuring out if there is something we can -# take off their plate. -class _timelex(object): - # Fractional seconds are sometimes split by a comma - _split_decimal = re.compile("([.,])") - - def __init__(self, instream): - if six.PY2: - # In Python 2, we can't duck type properly because unicode has - # a 'decode' function, and we'd be double-decoding - if isinstance(instream, (bytes, bytearray)): - instream = instream.decode() - else: - if getattr(instream, 'decode', None) is not None: - instream = instream.decode() - - if isinstance(instream, text_type): - instream = StringIO(instream) - elif getattr(instream, 'read', None) is None: - raise TypeError('Parser must be a string or character stream, not ' - '{itype}'.format(itype=instream.__class__.__name__)) - - self.instream = instream - self.charstack = [] - self.tokenstack = [] - self.eof = False - - def get_token(self): - """ - This function breaks the time string into lexical units (tokens), which - can be parsed by the parser. Lexical units are demarcated by changes in - the character set, so any continuous string of letters is considered - one unit, any continuous string of numbers is considered one unit. - - The main complication arises from the fact that dots ('.') can be used - both as separators (e.g. "Sep.20.2009") or decimal points (e.g. - "4:30:21.447"). As such, it is necessary to read the full context of - any dot-separated strings before breaking it into tokens; as such, this - function maintains a "token stack", for when the ambiguous context - demands that multiple tokens be parsed at once. - """ - if self.tokenstack: - return self.tokenstack.pop(0) - - seenletters = False - token = None - state = None - - while not self.eof: - # We only realize that we've reached the end of a token when we - # find a character that's not part of the current token - since - # that character may be part of the next token, it's stored in the - # charstack. - if self.charstack: - nextchar = self.charstack.pop(0) - else: - nextchar = self.instream.read(1) - while nextchar == '\x00': - nextchar = self.instream.read(1) - - if not nextchar: - self.eof = True - break - elif not state: - # First character of the token - determines if we're starting - # to parse a word, a number or something else. - token = nextchar - if self.isword(nextchar): - state = 'a' - elif self.isnum(nextchar): - state = '0' - elif self.isspace(nextchar): - token = ' ' - break # emit token - else: - break # emit token - elif state == 'a': - # If we've already started reading a word, we keep reading - # letters until we find something that's not part of a word. - seenletters = True - if self.isword(nextchar): - token += nextchar - elif nextchar == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0': - # If we've already started reading a number, we keep reading - # numbers until we find something that doesn't fit. - if self.isnum(nextchar): - token += nextchar - elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == 'a.': - # If we've seen some letters and a dot separator, continue - # parsing, and the tokens will be broken up later. - seenletters = True - if nextchar == '.' or self.isword(nextchar): - token += nextchar - elif self.isnum(nextchar) and token[-1] == '.': - token += nextchar - state = '0.' - else: - self.charstack.append(nextchar) - break # emit token - elif state == '0.': - # If we've seen at least one dot separator, keep going, we'll - # break up the tokens later. - if nextchar == '.' or self.isnum(nextchar): - token += nextchar - elif self.isword(nextchar) and token[-1] == '.': - token += nextchar - state = 'a.' - else: - self.charstack.append(nextchar) - break # emit token - - if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or - token[-1] in '.,')): - l = self._split_decimal.split(token) - token = l[0] - for tok in l[1:]: - if tok: - self.tokenstack.append(tok) - - if state == '0.' and token.count('.') == 0: - token = token.replace(',', '.') - - return token - - def __iter__(self): - return self - - def __next__(self): - token = self.get_token() - if token is None: - raise StopIteration - - return token - - def next(self): - return self.__next__() # Python 2.x support - - @classmethod - def split(cls, s): - return list(cls(s)) - - @classmethod - def isword(cls, nextchar): - """ Whether or not the next character is part of a word """ - return nextchar.isalpha() - - @classmethod - def isnum(cls, nextchar): - """ Whether the next character is part of a number """ - return nextchar.isdigit() - - @classmethod - def isspace(cls, nextchar): - """ Whether the next character is whitespace """ - return nextchar.isspace() - - -class _resultbase(object): - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def _repr(self, classname): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (classname, ", ".join(l)) - - def __len__(self): - return (sum(getattr(self, attr) is not None - for attr in self.__slots__)) - - def __repr__(self): - return self._repr(self.__class__.__name__) - - -class parserinfo(object): - """ - Class which handles what inputs are accepted. Subclass this to customize - the language and acceptable values for each parameter. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. Default is ``False``. - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - Default is ``False``. - """ - - # m from a.m/p.m, t from ISO T separator - JUMP = [" ", ".", ",", ";", "-", "/", "'", - "at", "on", "and", "ad", "m", "t", "of", - "st", "nd", "rd", "th"] - - WEEKDAYS = [("Mon", "Monday"), - ("Tue", "Tuesday"), # TODO: "Tues" - ("Wed", "Wednesday"), - ("Thu", "Thursday"), # TODO: "Thurs" - ("Fri", "Friday"), - ("Sat", "Saturday"), - ("Sun", "Sunday")] - MONTHS = [("Jan", "January"), - ("Feb", "February"), # TODO: "Febr" - ("Mar", "March"), - ("Apr", "April"), - ("May", "May"), - ("Jun", "June"), - ("Jul", "July"), - ("Aug", "August"), - ("Sep", "Sept", "September"), - ("Oct", "October"), - ("Nov", "November"), - ("Dec", "December")] - HMS = [("h", "hour", "hours"), - ("m", "minute", "minutes"), - ("s", "second", "seconds")] - AMPM = [("am", "a"), - ("pm", "p")] - UTCZONE = ["UTC", "GMT", "Z", "z"] - PERTAIN = ["of"] - TZOFFSET = {} - # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", - # "Anno Domini", "Year of Our Lord"] - - def __init__(self, dayfirst=False, yearfirst=False): - self._jump = self._convert(self.JUMP) - self._weekdays = self._convert(self.WEEKDAYS) - self._months = self._convert(self.MONTHS) - self._hms = self._convert(self.HMS) - self._ampm = self._convert(self.AMPM) - self._utczone = self._convert(self.UTCZONE) - self._pertain = self._convert(self.PERTAIN) - - self.dayfirst = dayfirst - self.yearfirst = yearfirst - - self._year = time.localtime().tm_year - self._century = self._year // 100 * 100 - - def _convert(self, lst): - dct = {} - for i, v in enumerate(lst): - if isinstance(v, tuple): - for v in v: - dct[v.lower()] = i - else: - dct[v.lower()] = i - return dct - - def jump(self, name): - return name.lower() in self._jump - - def weekday(self, name): - try: - return self._weekdays[name.lower()] - except KeyError: - pass - return None - - def month(self, name): - try: - return self._months[name.lower()] + 1 - except KeyError: - pass - return None - - def hms(self, name): - try: - return self._hms[name.lower()] - except KeyError: - return None - - def ampm(self, name): - try: - return self._ampm[name.lower()] - except KeyError: - return None - - def pertain(self, name): - return name.lower() in self._pertain - - def utczone(self, name): - return name.lower() in self._utczone - - def tzoffset(self, name): - if name in self._utczone: - return 0 - - return self.TZOFFSET.get(name) - - def convertyear(self, year, century_specified=False): - """ - Converts two-digit years to year within [-50, 49] - range of self._year (current local time) - """ - - # Function contract is that the year is always positive - assert year >= 0 - - if year < 100 and not century_specified: - # assume current century to start - year += self._century - - if year >= self._year + 50: # if too far in future - year -= 100 - elif year < self._year - 50: # if too far in past - year += 100 - - return year - - def validate(self, res): - # move to info - if res.year is not None: - res.year = self.convertyear(res.year, res.century_specified) - - if ((res.tzoffset == 0 and not res.tzname) or - (res.tzname == 'Z' or res.tzname == 'z')): - res.tzname = "UTC" - res.tzoffset = 0 - elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): - res.tzoffset = 0 - return True - - -class _ymd(list): - def __init__(self, *args, **kwargs): - super(self.__class__, self).__init__(*args, **kwargs) - self.century_specified = False - self.dstridx = None - self.mstridx = None - self.ystridx = None - - @property - def has_year(self): - return self.ystridx is not None - - @property - def has_month(self): - return self.mstridx is not None - - @property - def has_day(self): - return self.dstridx is not None - - def could_be_day(self, value): - if self.has_day: - return False - elif not self.has_month: - return 1 <= value <= 31 - elif not self.has_year: - # Be permissive, assume leapyear - month = self[self.mstridx] - return 1 <= value <= monthrange(2000, month)[1] - else: - month = self[self.mstridx] - year = self[self.ystridx] - return 1 <= value <= monthrange(year, month)[1] - - def append(self, val, label=None): - if hasattr(val, '__len__'): - if val.isdigit() and len(val) > 2: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - elif val > 100: - self.century_specified = True - if label not in [None, 'Y']: # pragma: no cover - raise ValueError(label) - label = 'Y' - - super(self.__class__, self).append(int(val)) - - if label == 'M': - if self.has_month: - raise ValueError('Month is already set') - self.mstridx = len(self) - 1 - elif label == 'D': - if self.has_day: - raise ValueError('Day is already set') - self.dstridx = len(self) - 1 - elif label == 'Y': - if self.has_year: - raise ValueError('Year is already set') - self.ystridx = len(self) - 1 - - def _resolve_from_stridxs(self, strids): - """ - Try to resolve the identities of year/month/day elements using - ystridx, mstridx, and dstridx, if enough of these are specified. - """ - if len(self) == 3 and len(strids) == 2: - # we can back out the remaining stridx value - missing = [x for x in range(3) if x not in strids.values()] - key = [x for x in ['y', 'm', 'd'] if x not in strids] - assert len(missing) == len(key) == 1 - key = key[0] - val = missing[0] - strids[key] = val - - assert len(self) == len(strids) # otherwise this should not be called - out = {key: self[strids[key]] for key in strids} - return (out.get('y'), out.get('m'), out.get('d')) - - def resolve_ymd(self, yearfirst, dayfirst): - len_ymd = len(self) - year, month, day = (None, None, None) - - strids = (('y', self.ystridx), - ('m', self.mstridx), - ('d', self.dstridx)) - - strids = {key: val for key, val in strids if val is not None} - if (len(self) == len(strids) > 0 or - (len(self) == 3 and len(strids) == 2)): - return self._resolve_from_stridxs(strids) - - mstridx = self.mstridx - - if len_ymd > 3: - raise ValueError("More than three YMD values") - elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): - # One member, or two members with a month string - if mstridx is not None: - month = self[mstridx] - # since mstridx is 0 or 1, self[mstridx-1] always - # looks up the other element - other = self[mstridx - 1] - else: - other = self[0] - - if len_ymd > 1 or mstridx is None: - if other > 31: - year = other - else: - day = other - - elif len_ymd == 2: - # Two members with numbers - if self[0] > 31: - # 99-01 - year, month = self - elif self[1] > 31: - # 01-99 - month, year = self - elif dayfirst and self[1] <= 12: - # 13-01 - day, month = self - else: - # 01-13 - month, day = self - - elif len_ymd == 3: - # Three members - if mstridx == 0: - if self[1] > 31: - # Apr-2003-25 - month, year, day = self - else: - month, day, year = self - elif mstridx == 1: - if self[0] > 31 or (yearfirst and self[2] <= 31): - # 99-Jan-01 - year, month, day = self - else: - # 01-Jan-01 - # Give precendence to day-first, since - # two-digit years is usually hand-written. - day, month, year = self - - elif mstridx == 2: - # WTF!? - if self[1] > 31: - # 01-99-Jan - day, year, month = self - else: - # 99-01-Jan - year, day, month = self - - else: - if (self[0] > 31 or - self.ystridx == 0 or - (yearfirst and self[1] <= 12 and self[2] <= 31)): - # 99-01-01 - if dayfirst and self[2] <= 12: - year, day, month = self - else: - year, month, day = self - elif self[0] > 12 or (dayfirst and self[1] <= 12): - # 13-01-01 - day, month, year = self - else: - # 01-13-01 - month, day, year = self - - return year, month, day - - -class parser(object): - def __init__(self, info=None): - self.info = info or parserinfo() - - def parse(self, timestr, default=None, - ignoretz=False, tzinfos=None, **kwargs): - """ - Parse the date/time string into a :class:`datetime.datetime` object. - - :param timestr: - Any date/time string using the supported formats. - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a - naive :class:`datetime.datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param \\*\\*kwargs: - Keyword arguments as passed to ``_parse()``. - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ValueError: - Raised for invalid or unknown string format, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date - would be created. - - :raises TypeError: - Raised for non-string or character stream input. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - - if default is None: - default = datetime.datetime.now().replace(hour=0, minute=0, - second=0, microsecond=0) - - res, skipped_tokens = self._parse(timestr, **kwargs) - - if res is None: - raise ValueError("Unknown string format:", timestr) - - if len(res) == 0: - raise ValueError("String does not contain a date:", timestr) - - ret = self._build_naive(res, default) - - if not ignoretz: - ret = self._build_tzaware(ret, res, tzinfos) - - if kwargs.get('fuzzy_with_tokens', False): - return ret, skipped_tokens - else: - return ret - - class _result(_resultbase): - __slots__ = ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond", - "tzname", "tzoffset", "ampm","any_unused_tokens"] - - def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, - fuzzy_with_tokens=False): - """ - Private method which performs the heavy lifting of parsing, called from - ``parse()``, which passes on its ``kwargs`` to this function. - - :param timestr: - The string to parse. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM - and YMD. If set to ``None``, this value is retrieved from the - current :class:`parserinfo` object (which itself defaults to - ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken - to be the year, otherwise the last number is taken to be the year. - If this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - """ - if fuzzy_with_tokens: - fuzzy = True - - info = self.info - - if dayfirst is None: - dayfirst = info.dayfirst - - if yearfirst is None: - yearfirst = info.yearfirst - - res = self._result() - l = _timelex.split(timestr) # Splits the timestr into tokens - - skipped_idxs = [] - - # year/month/day list - ymd = _ymd() - - len_l = len(l) - i = 0 - try: - while i < len_l: - - # Check if it's a number - value_repr = l[i] - try: - value = float(value_repr) - except ValueError: - value = None - - if value is not None: - # Numeric token - i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) - - # Check weekday - elif info.weekday(l[i]) is not None: - value = info.weekday(l[i]) - res.weekday = value - - # Check month name - elif info.month(l[i]) is not None: - value = info.month(l[i]) - ymd.append(value, 'M') - - if i + 1 < len_l: - if l[i + 1] in ('-', '/'): - # Jan-01[-99] - sep = l[i + 1] - ymd.append(l[i + 2]) - - if i + 3 < len_l and l[i + 3] == sep: - # Jan-01-99 - ymd.append(l[i + 4]) - i += 2 - - i += 2 - - elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and - info.pertain(l[i + 2])): - # Jan of 01 - # In this case, 01 is clearly year - if l[i + 4].isdigit(): - # Convert it here to become unambiguous - value = int(l[i + 4]) - year = str(info.convertyear(value)) - ymd.append(year, 'Y') - else: - # Wrong guess - pass - # TODO: not hit in tests - i += 4 - - # Check am/pm - elif info.ampm(l[i]) is not None: - value = info.ampm(l[i]) - val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) - - if val_is_ampm: - res.hour = self._adjust_ampm(res.hour, value) - res.ampm = value - - elif fuzzy: - skipped_idxs.append(i) - - # Check for a timezone name - elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): - res.tzname = l[i] - res.tzoffset = info.tzoffset(res.tzname) - - # Check for something like GMT+3, or BRST+3. Notice - # that it doesn't mean "I am 3 hours after GMT", but - # "my time +3 is GMT". If found, we reverse the - # logic so that timezone parsing code will get it - # right. - if i + 1 < len_l and l[i + 1] in ('+', '-'): - l[i + 1] = ('+', '-')[l[i + 1] == '+'] - res.tzoffset = None - if info.utczone(res.tzname): - # With something like GMT+3, the timezone - # is *not* GMT. - res.tzname = None - - # Check for a numbered timezone - elif res.hour is not None and l[i] in ('+', '-'): - signal = (-1, 1)[l[i] == '+'] - len_li = len(l[i + 1]) - - # TODO: check that l[i + 1] is integer? - if len_li == 4: - # -0300 - hour_offset = int(l[i + 1][:2]) - min_offset = int(l[i + 1][2:]) - elif i + 2 < len_l and l[i + 2] == ':': - # -03:00 - hour_offset = int(l[i + 1]) - min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? - i += 2 - elif len_li <= 2: - # -[0]3 - hour_offset = int(l[i + 1][:2]) - min_offset = 0 - else: - raise ValueError(timestr) - - res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) - - # Look for a timezone name between parenthesis - if (i + 5 < len_l and - info.jump(l[i + 2]) and l[i + 3] == '(' and - l[i + 5] == ')' and - 3 <= len(l[i + 4]) and - self._could_be_tzname(res.hour, res.tzname, - None, l[i + 4])): - # -0300 (BRST) - res.tzname = l[i + 4] - i += 4 - - i += 1 - - # Check jumps - elif not (info.jump(l[i]) or fuzzy): - raise ValueError(timestr) - - else: - skipped_idxs.append(i) - i += 1 - - # Process year/month/day - year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) - - res.century_specified = ymd.century_specified - res.year = year - res.month = month - res.day = day - - except (IndexError, ValueError): - return None, None - - if not info.validate(res): - return None, None - - if fuzzy_with_tokens: - skipped_tokens = self._recombine_skipped(l, skipped_idxs) - return res, tuple(skipped_tokens) - else: - return res, None - - def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): - # Token is a number - value_repr = tokens[idx] - try: - value = self._to_decimal(value_repr) - except Exception as e: - six.raise_from(ValueError('Unknown numeric token'), e) - - len_li = len(value_repr) - - len_l = len(tokens) - - if (len(ymd) == 3 and len_li in (2, 4) and - res.hour is None and - (idx + 1 >= len_l or - (tokens[idx + 1] != ':' and - info.hms(tokens[idx + 1]) is None))): - # 19990101T23[59] - s = tokens[idx] - res.hour = int(s[:2]) - - if len_li == 4: - res.minute = int(s[2:]) - - elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): - # YYMMDD or HHMMSS[.ss] - s = tokens[idx] - - if not ymd and '.' not in tokens[idx]: - ymd.append(s[:2]) - ymd.append(s[2:4]) - ymd.append(s[4:]) - else: - # 19990101T235959[.59] - - # TODO: Check if res attributes already set. - res.hour = int(s[:2]) - res.minute = int(s[2:4]) - res.second, res.microsecond = self._parsems(s[4:]) - - elif len_li in (8, 12, 14): - # YYYYMMDD - s = tokens[idx] - ymd.append(s[:4], 'Y') - ymd.append(s[4:6]) - ymd.append(s[6:8]) - - if len_li > 8: - res.hour = int(s[8:10]) - res.minute = int(s[10:12]) - - if len_li > 12: - res.second = int(s[12:]) - - elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: - # HH[ ]h or MM[ ]m or SS[.ss][ ]s - hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) - (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) - if hms is not None: - # TODO: checking that hour/minute/second are not - # already set? - self._assign_hms(res, value_repr, hms) - - elif idx + 2 < len_l and tokens[idx + 1] == ':': - # HH:MM[:SS[.ss]] - res.hour = int(value) - value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? - (res.minute, res.second) = self._parse_min_sec(value) - - if idx + 4 < len_l and tokens[idx + 3] == ':': - res.second, res.microsecond = self._parsems(tokens[idx + 4]) - - idx += 2 - - idx += 2 - - elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): - sep = tokens[idx + 1] - ymd.append(value_repr) - - if idx + 2 < len_l and not info.jump(tokens[idx + 2]): - if tokens[idx + 2].isdigit(): - # 01-01[-01] - ymd.append(tokens[idx + 2]) - else: - # 01-Jan[-01] - value = info.month(tokens[idx + 2]) - - if value is not None: - ymd.append(value, 'M') - else: - raise ValueError() - - if idx + 3 < len_l and tokens[idx + 3] == sep: - # We have three members - value = info.month(tokens[idx + 4]) - - if value is not None: - ymd.append(value, 'M') - else: - ymd.append(tokens[idx + 4]) - idx += 2 - - idx += 1 - idx += 1 - - elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): - if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: - # 12 am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) - idx += 1 - else: - # Year, month or day - ymd.append(value) - idx += 1 - - elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): - # 12am - hour = int(value) - res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) - idx += 1 - - elif ymd.could_be_day(value): - ymd.append(value) - - elif not fuzzy: - raise ValueError() - - return idx - - def _find_hms_idx(self, idx, tokens, info, allow_jump): - len_l = len(tokens) - - if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: - # There is an "h", "m", or "s" label following this token. We take - # assign the upcoming label to the current token. - # e.g. the "12" in 12h" - hms_idx = idx + 1 - - elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and - info.hms(tokens[idx+2]) is not None): - # There is a space and then an "h", "m", or "s" label. - # e.g. the "12" in "12 h" - hms_idx = idx + 2 - - elif idx > 0 and info.hms(tokens[idx-1]) is not None: - # There is a "h", "m", or "s" preceeding this token. Since neither - # of the previous cases was hit, there is no label following this - # token, so we use the previous label. - # e.g. the "04" in "12h04" - hms_idx = idx-1 - - elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and - info.hms(tokens[idx-2]) is not None): - # If we are looking at the final token, we allow for a - # backward-looking check to skip over a space. - # TODO: Are we sure this is the right condition here? - hms_idx = idx - 2 - - else: - hms_idx = None - - return hms_idx - - def _assign_hms(self, res, value_repr, hms): - # See GH issue #427, fixing float rounding - value = self._to_decimal(value_repr) - - if hms == 0: - # Hour - res.hour = int(value) - if value % 1: - res.minute = int(60*(value % 1)) - - elif hms == 1: - (res.minute, res.second) = self._parse_min_sec(value) - - elif hms == 2: - (res.second, res.microsecond) = self._parsems(value_repr) - - def _could_be_tzname(self, hour, tzname, tzoffset, token): - return (hour is not None and - tzname is None and - tzoffset is None and - len(token) <= 5 and - (all(x in string.ascii_uppercase for x in token) - or token in self.info.UTCZONE)) - - def _ampm_valid(self, hour, ampm, fuzzy): - """ - For fuzzy parsing, 'a' or 'am' (both valid English words) - may erroneously trigger the AM/PM flag. Deal with that - here. - """ - val_is_ampm = True - - # If there's already an AM/PM flag, this one isn't one. - if fuzzy and ampm is not None: - val_is_ampm = False - - # If AM/PM is found and hour is not, raise a ValueError - if hour is None: - if fuzzy: - val_is_ampm = False - else: - raise ValueError('No hour specified with AM or PM flag.') - elif not 0 <= hour <= 12: - # If AM/PM is found, it's a 12 hour clock, so raise - # an error for invalid range - if fuzzy: - val_is_ampm = False - else: - raise ValueError('Invalid hour specified for 12-hour clock.') - - return val_is_ampm - - def _adjust_ampm(self, hour, ampm): - if hour < 12 and ampm == 1: - hour += 12 - elif hour == 12 and ampm == 0: - hour = 0 - return hour - - def _parse_min_sec(self, value): - # TODO: Every usage of this function sets res.second to the return - # value. Are there any cases where second will be returned as None and - # we *dont* want to set res.second = None? - minute = int(value) - second = None - - sec_remainder = value % 1 - if sec_remainder: - second = int(60 * sec_remainder) - return (minute, second) - - def _parsems(self, value): - """Parse a I[.F] seconds value into (seconds, microseconds).""" - if "." not in value: - return int(value), 0 - else: - i, f = value.split(".") - return int(i), int(f.ljust(6, "0")[:6]) - - def _parse_hms(self, idx, tokens, info, hms_idx): - # TODO: Is this going to admit a lot of false-positives for when we - # just happen to have digits and "h", "m" or "s" characters in non-date - # text? I guess hex hashes won't have that problem, but there's plenty - # of random junk out there. - if hms_idx is None: - hms = None - new_idx = idx - elif hms_idx > idx: - hms = info.hms(tokens[hms_idx]) - new_idx = hms_idx - else: - # Looking backwards, increment one. - hms = info.hms(tokens[hms_idx]) + 1 - new_idx = idx - - return (new_idx, hms) - - def _recombine_skipped(self, tokens, skipped_idxs): - """ - >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] - >>> skipped_idxs = [0, 1, 2, 5] - >>> _recombine_skipped(tokens, skipped_idxs) - ["foo bar", "baz"] - """ - skipped_tokens = [] - for i, idx in enumerate(sorted(skipped_idxs)): - if i > 0 and idx - 1 == skipped_idxs[i - 1]: - skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] - else: - skipped_tokens.append(tokens[idx]) - - return skipped_tokens - - def _build_tzinfo(self, tzinfos, tzname, tzoffset): - if callable(tzinfos): - tzdata = tzinfos(tzname, tzoffset) - else: - tzdata = tzinfos.get(tzname) - # handle case where tzinfo is paased an options that returns None - # eg tzinfos = {'BRST' : None} - if isinstance(tzdata, datetime.tzinfo) or tzdata is None: - tzinfo = tzdata - elif isinstance(tzdata, text_type): - tzinfo = tz.tzstr(tzdata) - elif isinstance(tzdata, integer_types): - tzinfo = tz.tzoffset(tzname, tzdata) - return tzinfo - - def _build_tzaware(self, naive, res, tzinfos): - if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): - tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) - aware = naive.replace(tzinfo=tzinfo) - aware = self._assign_tzname(aware, res.tzname) - - elif res.tzname and res.tzname in time.tzname: - aware = naive.replace(tzinfo=tz.tzlocal()) - - # Handle ambiguous local datetime - aware = self._assign_tzname(aware, res.tzname) - - # This is mostly relevant for winter GMT zones parsed in the UK - if (aware.tzname() != res.tzname and - res.tzname in self.info.UTCZONE): - aware = aware.replace(tzinfo=tz.tzutc()) - - elif res.tzoffset == 0: - aware = naive.replace(tzinfo=tz.tzutc()) - - elif res.tzoffset: - aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) - - elif not res.tzname and not res.tzoffset: - # i.e. no timezone information was found. - aware = naive - - elif res.tzname: - # tz-like string was parsed but we don't know what to do - # with it - warnings.warn("tzname {tzname} identified but not understood. " - "Pass `tzinfos` argument in order to correctly " - "return a timezone-aware datetime. In a future " - "version, this will raise an " - "exception.".format(tzname=res.tzname), - category=UnknownTimezoneWarning) - aware = naive - - return aware - - def _build_naive(self, res, default): - repl = {} - for attr in ("year", "month", "day", "hour", - "minute", "second", "microsecond"): - value = getattr(res, attr) - if value is not None: - repl[attr] = value - - if 'day' not in repl: - # If the default day exceeds the last day of the month, fall back - # to the end of the month. - cyear = default.year if res.year is None else res.year - cmonth = default.month if res.month is None else res.month - cday = default.day if res.day is None else res.day - - if cday > monthrange(cyear, cmonth)[1]: - repl['day'] = monthrange(cyear, cmonth)[1] - - naive = default.replace(**repl) - - if res.weekday is not None and not res.day: - naive = naive + relativedelta.relativedelta(weekday=res.weekday) - - return naive - - def _assign_tzname(self, dt, tzname): - if dt.tzname() != tzname: - new_dt = tz.enfold(dt, fold=1) - if new_dt.tzname() == tzname: - return new_dt - - return dt - - def _to_decimal(self, val): - try: - decimal_value = Decimal(val) - # See GH 662, edge case, infinite value should not be converted via `_to_decimal` - if not decimal_value.is_finite(): - raise ValueError("Converted decimal value is infinite or NaN") - except Exception as e: - msg = "Could not convert %s to decimal" % val - six.raise_from(ValueError(msg), e) - else: - return decimal_value - - -DEFAULTPARSER = parser() - - -def parse(timestr, parserinfo=None, **kwargs): - """ - - Parse a string in one of the supported formats, using the - ``parserinfo`` parameters. - - :param timestr: - A string containing a date/time stamp. - - :param parserinfo: - A :class:`parserinfo` object containing parameters for the parser. - If ``None``, the default arguments to the :class:`parserinfo` - constructor are used. - - The ``**kwargs`` parameter takes the following keyword arguments: - - :param default: - The default datetime object, if this is a datetime object and not - ``None``, elements specified in ``timestr`` replace elements in the - default object. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a naive - :class:`datetime` object is returned. - - :param tzinfos: - Additional time zone names / aliases which may be present in the - string. This argument maps time zone names (and optionally offsets - from those time zones) to time zones. This parameter can be a - dictionary with timezone aliases mapping time zone names to time - zones or a function taking two parameters (``tzname`` and - ``tzoffset``) and returning a time zone. - - The timezones to which the names are mapped can be an integer - offset from UTC in seconds or a :class:`tzinfo` object. - - .. doctest:: - :options: +NORMALIZE_WHITESPACE - - >>> from dateutil.parser import parse - >>> from dateutil.tz import gettz - >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} - >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) - >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) - datetime.datetime(2012, 1, 19, 17, 21, - tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) - - This parameter is ignored if ``ignoretz`` is set. - - :param dayfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the day (``True``) or month (``False``). If - ``yearfirst`` is set to ``True``, this distinguishes between YDM and - YMD. If set to ``None``, this value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param yearfirst: - Whether to interpret the first value in an ambiguous 3-integer date - (e.g. 01/05/09) as the year. If ``True``, the first number is taken to - be the year, otherwise the last number is taken to be the year. If - this is set to ``None``, the value is retrieved from the current - :class:`parserinfo` object (which itself defaults to ``False``). - - :param fuzzy: - Whether to allow fuzzy parsing, allowing for string like "Today is - January 1, 2047 at 8:21:00AM". - - :param fuzzy_with_tokens: - If ``True``, ``fuzzy`` is automatically set to True, and the parser - will return a tuple where the first element is the parsed - :class:`datetime.datetime` datetimestamp and the second element is - a tuple containing the portions of the string which were ignored: - - .. doctest:: - - >>> from dateutil.parser import parse - >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) - (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) - - :return: - Returns a :class:`datetime.datetime` object or, if the - ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the - first element being a :class:`datetime.datetime` object, the second - a tuple containing the fuzzy tokens. - - :raises ValueError: - Raised for invalid or unknown string format, if the provided - :class:`tzinfo` is not in a valid format, or if an invalid date - would be created. - - :raises OverflowError: - Raised if the parsed date exceeds the largest valid C integer on - your system. - """ - if parserinfo: - return parser(parserinfo).parse(timestr, **kwargs) - else: - return DEFAULTPARSER.parse(timestr, **kwargs) - - -class _tzparser(object): - - class _result(_resultbase): - - __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", - "start", "end"] - - class _attr(_resultbase): - __slots__ = ["month", "week", "weekday", - "yday", "jyday", "day", "time"] - - def __repr__(self): - return self._repr("") - - def __init__(self): - _resultbase.__init__(self) - self.start = self._attr() - self.end = self._attr() - - def parse(self, tzstr): - res = self._result() - l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] - used_idxs = list() - try: - - len_l = len(l) - - i = 0 - while i < len_l: - # BRST+3[BRDT[+2]] - j = i - while j < len_l and not [x for x in l[j] - if x in "0123456789:,-+"]: - j += 1 - if j != i: - if not res.stdabbr: - offattr = "stdoffset" - res.stdabbr = "".join(l[i:j]) - else: - offattr = "dstoffset" - res.dstabbr = "".join(l[i:j]) - - for ii in range(j): - used_idxs.append(ii) - i = j - if (i < len_l and (l[i] in ('+', '-') or l[i][0] in - "0123456789")): - if l[i] in ('+', '-'): - # Yes, that's right. See the TZ variable - # documentation. - signal = (1, -1)[l[i] == '+'] - used_idxs.append(i) - i += 1 - else: - signal = -1 - len_li = len(l[i]) - if len_li == 4: - # -0300 - setattr(res, offattr, (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) * signal) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - setattr(res, offattr, - (int(l[i]) * 3600 + - int(l[i + 2]) * 60) * signal) - used_idxs.append(i) - i += 2 - elif len_li <= 2: - # -[0]3 - setattr(res, offattr, - int(l[i][:2]) * 3600 * signal) - else: - return None - used_idxs.append(i) - i += 1 - if res.dstabbr: - break - else: - break - - - if i < len_l: - for j in range(i, len_l): - if l[j] == ';': - l[j] = ',' - - assert l[i] == ',' - - i += 1 - - if i >= len_l: - pass - elif (8 <= l.count(',') <= 9 and - not [y for x in l[i:] if x != ',' - for y in x if y not in "0123456789+-"]): - # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] - for x in (res.start, res.end): - x.month = int(l[i]) - used_idxs.append(i) - i += 2 - if l[i] == '-': - value = int(l[i + 1]) * -1 - used_idxs.append(i) - i += 1 - else: - value = int(l[i]) - used_idxs.append(i) - i += 2 - if value: - x.week = value - x.weekday = (int(l[i]) - 1) % 7 - else: - x.day = int(l[i]) - used_idxs.append(i) - i += 2 - x.time = int(l[i]) - used_idxs.append(i) - i += 2 - if i < len_l: - if l[i] in ('-', '+'): - signal = (-1, 1)[l[i] == "+"] - used_idxs.append(i) - i += 1 - else: - signal = 1 - used_idxs.append(i) - res.dstoffset = (res.stdoffset + int(l[i]) * signal) - - # This was a made-up format that is not in normal use - warn(('Parsed time zone "%s"' % tzstr) + - 'is in a non-standard dateutil-specific format, which ' + - 'is now deprecated; support for parsing this format ' + - 'will be removed in future versions. It is recommended ' + - 'that you switch to a standard format like the GNU ' + - 'TZ variable format.', tz.DeprecatedTzFormatWarning) - elif (l.count(',') == 2 and l[i:].count('/') <= 2 and - not [y for x in l[i:] if x not in (',', '/', 'J', 'M', - '.', '-', ':') - for y in x if y not in "0123456789"]): - for x in (res.start, res.end): - if l[i] == 'J': - # non-leap year day (1 based) - used_idxs.append(i) - i += 1 - x.jyday = int(l[i]) - elif l[i] == 'M': - # month[-.]week[-.]weekday - used_idxs.append(i) - i += 1 - x.month = int(l[i]) - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.week = int(l[i]) - if x.week == 5: - x.week = -1 - used_idxs.append(i) - i += 1 - assert l[i] in ('-', '.') - used_idxs.append(i) - i += 1 - x.weekday = (int(l[i]) - 1) % 7 - else: - # year day (zero based) - x.yday = int(l[i]) + 1 - - used_idxs.append(i) - i += 1 - - if i < len_l and l[i] == '/': - used_idxs.append(i) - i += 1 - # start time - len_li = len(l[i]) - if len_li == 4: - # -0300 - x.time = (int(l[i][:2]) * 3600 + - int(l[i][2:]) * 60) - elif i + 1 < len_l and l[i + 1] == ':': - # -03:00 - x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 - used_idxs.append(i) - i += 2 - if i + 1 < len_l and l[i + 1] == ':': - used_idxs.append(i) - i += 2 - x.time += int(l[i]) - elif len_li <= 2: - # -[0]3 - x.time = (int(l[i][:2]) * 3600) - else: - return None - used_idxs.append(i) - i += 1 - - assert i == len_l or l[i] == ',' - - i += 1 - - assert i >= len_l - - except (IndexError, ValueError, AssertionError): - return None - - unused_idxs = set(range(len_l)).difference(used_idxs) - res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) - return res - - -DEFAULTTZPARSER = _tzparser() - - -def _parsetz(tzstr): - return DEFAULTTZPARSER.parse(tzstr) - -class UnknownTimezoneWarning(RuntimeWarning): - """Raised when the parser finds a timezone it cannot parse into a tzinfo""" -# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.6/site-packages/dateutil/parser/isoparser.py b/venv/lib/python3.6/site-packages/dateutil/parser/isoparser.py deleted file mode 100644 index e3cf6d8..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/parser/isoparser.py +++ /dev/null @@ -1,411 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers a parser for ISO-8601 strings - -It is intended to support all valid date, time and datetime formats per the -ISO-8601 specification. - -..versionadded:: 2.7.0 -""" -from datetime import datetime, timedelta, time, date -import calendar -from dateutil import tz - -from functools import wraps - -import re -import six - -__all__ = ["isoparse", "isoparser"] - - -def _takes_ascii(f): - @wraps(f) - def func(self, str_in, *args, **kwargs): - # If it's a stream, read the whole thing - str_in = getattr(str_in, 'read', lambda: str_in)() - - # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII - if isinstance(str_in, six.text_type): - # ASCII is the same in UTF-8 - try: - str_in = str_in.encode('ascii') - except UnicodeEncodeError as e: - msg = 'ISO-8601 strings should contain only ASCII characters' - six.raise_from(ValueError(msg), e) - - return f(self, str_in, *args, **kwargs) - - return func - - -class isoparser(object): - def __init__(self, sep=None): - """ - :param sep: - A single character that separates date and time portions. If - ``None``, the parser will accept any single character. - For strict ISO-8601 adherence, pass ``'T'``. - """ - if sep is not None: - if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): - raise ValueError('Separator must be a single, non-numeric ' + - 'ASCII character') - - sep = sep.encode('ascii') - - self._sep = sep - - @_takes_ascii - def isoparse(self, dt_str): - """ - Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. - - An ISO-8601 datetime string consists of a date portion, followed - optionally by a time portion - the date and time portions are separated - by a single character separator, which is ``T`` in the official - standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be - combined with a time portion. - - Supported date formats are: - - Common: - - - ``YYYY`` - - ``YYYY-MM`` or ``YYYYMM`` - - ``YYYY-MM-DD`` or ``YYYYMMDD`` - - Uncommon: - - - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) - - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day - - The ISO week and day numbering follows the same logic as - :func:`datetime.date.isocalendar`. - - Supported time formats are: - - - ``hh`` - - ``hh:mm`` or ``hhmm`` - - ``hh:mm:ss`` or ``hhmmss`` - - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) - - Midnight is a special case for `hh`, as the standard supports both - 00:00 and 24:00 as a representation. The decimal separator can be - either a dot or a comma. - - - .. caution:: - - Support for fractional components other than seconds is part of the - ISO-8601 standard, but is not currently implemented in this parser. - - Supported time zone offset formats are: - - - `Z` (UTC) - - `±HH:MM` - - `±HHMM` - - `±HH` - - Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, - with the exception of UTC, which will be represented as - :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such - as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. - - :param dt_str: - A string or stream containing only an ISO-8601 datetime string - - :return: - Returns a :class:`datetime.datetime` representing the string. - Unspecified components default to their lowest value. - - .. warning:: - - As of version 2.7.0, the strictness of the parser should not be - considered a stable part of the contract. Any valid ISO-8601 string - that parses correctly with the default settings will continue to - parse correctly in future versions, but invalid strings that - currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not - guaranteed to continue failing in future versions if they encode - a valid date. - - .. versionadded:: 2.7.0 - """ - components, pos = self._parse_isodate(dt_str) - - if len(dt_str) > pos: - if self._sep is None or dt_str[pos:pos + 1] == self._sep: - components += self._parse_isotime(dt_str[pos + 1:]) - else: - raise ValueError('String contains unknown ISO components') - - if len(components) > 3 and components[3] == 24: - components[3] = 0 - return datetime(*components) + timedelta(days=1) - - return datetime(*components) - - @_takes_ascii - def parse_isodate(self, datestr): - """ - Parse the date portion of an ISO string. - - :param datestr: - The string portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.date` object - """ - components, pos = self._parse_isodate(datestr) - if pos < len(datestr): - raise ValueError('String contains unknown ISO ' + - 'components: {}'.format(datestr)) - return date(*components) - - @_takes_ascii - def parse_isotime(self, timestr): - """ - Parse the time portion of an ISO string. - - :param timestr: - The time portion of an ISO string, without a separator - - :return: - Returns a :class:`datetime.time` object - """ - components = self._parse_isotime(timestr) - if components[0] == 24: - components[0] = 0 - return time(*components) - - @_takes_ascii - def parse_tzstr(self, tzstr, zero_as_utc=True): - """ - Parse a valid ISO time zone string. - - See :func:`isoparser.isoparse` for details on supported formats. - - :param tzstr: - A string representing an ISO time zone offset - - :param zero_as_utc: - Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones - - :return: - Returns :class:`dateutil.tz.tzoffset` for offsets and - :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is - specified) offsets equivalent to UTC. - """ - return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) - - # Constants - _DATE_SEP = b'-' - _TIME_SEP = b':' - _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') - - def _parse_isodate(self, dt_str): - try: - return self._parse_isodate_common(dt_str) - except ValueError: - return self._parse_isodate_uncommon(dt_str) - - def _parse_isodate_common(self, dt_str): - len_str = len(dt_str) - components = [1, 1, 1] - - if len_str < 4: - raise ValueError('ISO string too short') - - # Year - components[0] = int(dt_str[0:4]) - pos = 4 - if pos >= len_str: - return components, pos - - has_sep = dt_str[pos:pos + 1] == self._DATE_SEP - if has_sep: - pos += 1 - - # Month - if len_str - pos < 2: - raise ValueError('Invalid common month') - - components[1] = int(dt_str[pos:pos + 2]) - pos += 2 - - if pos >= len_str: - if has_sep: - return components, pos - else: - raise ValueError('Invalid ISO format') - - if has_sep: - if dt_str[pos:pos + 1] != self._DATE_SEP: - raise ValueError('Invalid separator in ISO string') - pos += 1 - - # Day - if len_str - pos < 2: - raise ValueError('Invalid common day') - components[2] = int(dt_str[pos:pos + 2]) - return components, pos + 2 - - def _parse_isodate_uncommon(self, dt_str): - if len(dt_str) < 4: - raise ValueError('ISO string too short') - - # All ISO formats start with the year - year = int(dt_str[0:4]) - - has_sep = dt_str[4:5] == self._DATE_SEP - - pos = 4 + has_sep # Skip '-' if it's there - if dt_str[pos:pos + 1] == b'W': - # YYYY-?Www-?D? - pos += 1 - weekno = int(dt_str[pos:pos + 2]) - pos += 2 - - dayno = 1 - if len(dt_str) > pos: - if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: - raise ValueError('Inconsistent use of dash separator') - - pos += has_sep - - dayno = int(dt_str[pos:pos + 1]) - pos += 1 - - base_date = self._calculate_weekdate(year, weekno, dayno) - else: - # YYYYDDD or YYYY-DDD - if len(dt_str) - pos < 3: - raise ValueError('Invalid ordinal day') - - ordinal_day = int(dt_str[pos:pos + 3]) - pos += 3 - - if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): - raise ValueError('Invalid ordinal day' + - ' {} for year {}'.format(ordinal_day, year)) - - base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) - - components = [base_date.year, base_date.month, base_date.day] - return components, pos - - def _calculate_weekdate(self, year, week, day): - """ - Calculate the day of corresponding to the ISO year-week-day calendar. - - This function is effectively the inverse of - :func:`datetime.date.isocalendar`. - - :param year: - The year in the ISO calendar - - :param week: - The week in the ISO calendar - range is [1, 53] - - :param day: - The day in the ISO calendar - range is [1 (MON), 7 (SUN)] - - :return: - Returns a :class:`datetime.date` - """ - if not 0 < week < 54: - raise ValueError('Invalid week: {}'.format(week)) - - if not 0 < day < 8: # Range is 1-7 - raise ValueError('Invalid weekday: {}'.format(day)) - - # Get week 1 for the specific year: - jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it - week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) - - # Now add the specific number of weeks and days to get what we want - week_offset = (week - 1) * 7 + (day - 1) - return week_1 + timedelta(days=week_offset) - - def _parse_isotime(self, timestr): - len_str = len(timestr) - components = [0, 0, 0, 0, None] - pos = 0 - comp = -1 - - if len(timestr) < 2: - raise ValueError('ISO time too short') - - has_sep = len_str >= 3 and timestr[2:3] == self._TIME_SEP - - while pos < len_str and comp < 5: - comp += 1 - - if timestr[pos:pos + 1] in b'-+Zz': - # Detect time zone boundary - components[-1] = self._parse_tzstr(timestr[pos:]) - pos = len_str - break - - if comp < 3: - # Hour, minute, second - components[comp] = int(timestr[pos:pos + 2]) - pos += 2 - if (has_sep and pos < len_str and - timestr[pos:pos + 1] == self._TIME_SEP): - pos += 1 - - if comp == 3: - # Fraction of a second - frac = self._FRACTION_REGEX.match(timestr[pos:]) - if not frac: - continue - - us_str = frac.group(1)[:6] # Truncate to microseconds - components[comp] = int(us_str) * 10**(6 - len(us_str)) - pos += len(frac.group()) - - if pos < len_str: - raise ValueError('Unused components in ISO string') - - if components[0] == 24: - # Standard supports 00:00 and 24:00 as representations of midnight - if any(component != 0 for component in components[1:4]): - raise ValueError('Hour may only be 24 at 24:00:00.000') - - return components - - def _parse_tzstr(self, tzstr, zero_as_utc=True): - if tzstr == b'Z' or tzstr == b'z': - return tz.tzutc() - - if len(tzstr) not in {3, 5, 6}: - raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') - - if tzstr[0:1] == b'-': - mult = -1 - elif tzstr[0:1] == b'+': - mult = 1 - else: - raise ValueError('Time zone offset requires sign') - - hours = int(tzstr[1:3]) - if len(tzstr) == 3: - minutes = 0 - else: - minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) - - if zero_as_utc and hours == 0 and minutes == 0: - return tz.tzutc() - else: - if minutes > 59: - raise ValueError('Invalid minutes in time zone offset') - - if hours > 23: - raise ValueError('Invalid hours in time zone offset') - - return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) - - -DEFAULT_ISOPARSER = isoparser() -isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/venv/lib/python3.6/site-packages/dateutil/relativedelta.py b/venv/lib/python3.6/site-packages/dateutil/relativedelta.py deleted file mode 100644 index c65c66e..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/relativedelta.py +++ /dev/null @@ -1,599 +0,0 @@ -# -*- coding: utf-8 -*- -import datetime -import calendar - -import operator -from math import copysign - -from six import integer_types -from warnings import warn - -from ._common import weekday - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - -__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - - -class relativedelta(object): - """ - The relativedelta type is designed to be applied to an existing datetime and - can replace specific components of that datetime, or represents an interval - of time. - - It is based on the specification of the excellent work done by M.-A. Lemburg - in his - `mx.DateTime `_ extension. - However, notice that this type does *NOT* implement the same algorithm as - his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. - - There are two different ways to build a relativedelta instance. The - first one is passing it two date/datetime classes:: - - relativedelta(datetime1, datetime2) - - The second one is passing it any number of the following keyword arguments:: - - relativedelta(arg1=x,arg2=y,arg3=z...) - - year, month, day, hour, minute, second, microsecond: - Absolute information (argument is singular); adding or subtracting a - relativedelta with absolute information does not perform an arithmetic - operation, but rather REPLACES the corresponding value in the - original datetime with the value(s) in relativedelta. - - years, months, weeks, days, hours, minutes, seconds, microseconds: - Relative information, may be negative (argument is plural); adding - or subtracting a relativedelta with relative information performs - the corresponding aritmetic operation on the original datetime value - with the information in the relativedelta. - - weekday: - One of the weekday instances (MO, TU, etc) available in the - relativedelta module. These instances may receive a parameter N, - specifying the Nth weekday, which could be positive or negative - (like MO(+1) or MO(-2)). Not specifying it is the same as specifying - +1. You can also use an integer, where 0=MO. This argument is always - relative e.g. if the calculated date is already Monday, using MO(1) - or MO(-1) won't change the day. To effectively make it absolute, use - it in combination with the day argument (e.g. day=1, MO(1) for first - Monday of the month). - - leapdays: - Will add given days to the date found, if year is a leap - year, and the date found is post 28 of february. - - yearday, nlyearday: - Set the yearday or the non-leap year day (jump leap days). - These are converted to day/month/leapdays information. - - There are relative and absolute forms of the keyword - arguments. The plural is relative, and the singular is - absolute. For each argument in the order below, the absolute form - is applied first (by setting each attribute to that value) and - then the relative form (by adding the value to the attribute). - - The order of attributes considered when this relativedelta is - added to a datetime is: - - 1. Year - 2. Month - 3. Day - 4. Hours - 5. Minutes - 6. Seconds - 7. Microseconds - - Finally, weekday is applied, using the rule described above. - - For example - - >>> from datetime import datetime - >>> from dateutil.relativedelta import relativedelta, MO - >>> dt = datetime(2018, 4, 9, 13, 37, 0) - >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) - >>> dt + delta - datetime.datetime(2018, 4, 2, 14, 37) - - First, the day is set to 1 (the first of the month), then 25 hours - are added, to get to the 2nd day and 14th hour, finally the - weekday is applied, but since the 2nd is already a Monday there is - no effect. - - """ - - def __init__(self, dt1=None, dt2=None, - years=0, months=0, days=0, leapdays=0, weeks=0, - hours=0, minutes=0, seconds=0, microseconds=0, - year=None, month=None, day=None, weekday=None, - yearday=None, nlyearday=None, - hour=None, minute=None, second=None, microsecond=None): - - if dt1 and dt2: - # datetime is a subclass of date. So both must be date - if not (isinstance(dt1, datetime.date) and - isinstance(dt2, datetime.date)): - raise TypeError("relativedelta only diffs datetime/date") - - # We allow two dates, or two datetimes, so we coerce them to be - # of the same type - if (isinstance(dt1, datetime.datetime) != - isinstance(dt2, datetime.datetime)): - if not isinstance(dt1, datetime.datetime): - dt1 = datetime.datetime.fromordinal(dt1.toordinal()) - elif not isinstance(dt2, datetime.datetime): - dt2 = datetime.datetime.fromordinal(dt2.toordinal()) - - self.years = 0 - self.months = 0 - self.days = 0 - self.leapdays = 0 - self.hours = 0 - self.minutes = 0 - self.seconds = 0 - self.microseconds = 0 - self.year = None - self.month = None - self.day = None - self.weekday = None - self.hour = None - self.minute = None - self.second = None - self.microsecond = None - self._has_time = 0 - - # Get year / month delta between the two - months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) - self._set_months(months) - - # Remove the year/month delta so the timedelta is just well-defined - # time units (seconds, days and microseconds) - dtm = self.__radd__(dt2) - - # If we've overshot our target, make an adjustment - if dt1 < dt2: - compare = operator.gt - increment = 1 - else: - compare = operator.lt - increment = -1 - - while compare(dt1, dtm): - months += increment - self._set_months(months) - dtm = self.__radd__(dt2) - - # Get the timedelta between the "months-adjusted" date and dt1 - delta = dt1 - dtm - self.seconds = delta.seconds + delta.days * 86400 - self.microseconds = delta.microseconds - else: - # Check for non-integer values in integer-only quantities - if any(x is not None and x != int(x) for x in (years, months)): - raise ValueError("Non-integer years and months are " - "ambiguous and not currently supported.") - - # Relative information - self.years = int(years) - self.months = int(months) - self.days = days + weeks * 7 - self.leapdays = leapdays - self.hours = hours - self.minutes = minutes - self.seconds = seconds - self.microseconds = microseconds - - # Absolute information - self.year = year - self.month = month - self.day = day - self.hour = hour - self.minute = minute - self.second = second - self.microsecond = microsecond - - if any(x is not None and int(x) != x - for x in (year, month, day, hour, - minute, second, microsecond)): - # For now we'll deprecate floats - later it'll be an error. - warn("Non-integer value passed as absolute information. " + - "This is not a well-defined condition and will raise " + - "errors in future versions.", DeprecationWarning) - - if isinstance(weekday, integer_types): - self.weekday = weekdays[weekday] - else: - self.weekday = weekday - - yday = 0 - if nlyearday: - yday = nlyearday - elif yearday: - yday = yearday - if yearday > 59: - self.leapdays = -1 - if yday: - ydayidx = [31, 59, 90, 120, 151, 181, 212, - 243, 273, 304, 334, 366] - for idx, ydays in enumerate(ydayidx): - if yday <= ydays: - self.month = idx+1 - if idx == 0: - self.day = yday - else: - self.day = yday-ydayidx[idx-1] - break - else: - raise ValueError("invalid year day (%d)" % yday) - - self._fix() - - def _fix(self): - if abs(self.microseconds) > 999999: - s = _sign(self.microseconds) - div, mod = divmod(self.microseconds * s, 1000000) - self.microseconds = mod * s - self.seconds += div * s - if abs(self.seconds) > 59: - s = _sign(self.seconds) - div, mod = divmod(self.seconds * s, 60) - self.seconds = mod * s - self.minutes += div * s - if abs(self.minutes) > 59: - s = _sign(self.minutes) - div, mod = divmod(self.minutes * s, 60) - self.minutes = mod * s - self.hours += div * s - if abs(self.hours) > 23: - s = _sign(self.hours) - div, mod = divmod(self.hours * s, 24) - self.hours = mod * s - self.days += div * s - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years += div * s - if (self.hours or self.minutes or self.seconds or self.microseconds - or self.hour is not None or self.minute is not None or - self.second is not None or self.microsecond is not None): - self._has_time = 1 - else: - self._has_time = 0 - - @property - def weeks(self): - return int(self.days / 7.0) - - @weeks.setter - def weeks(self, value): - self.days = self.days - (self.weeks * 7) + value * 7 - - def _set_months(self, months): - self.months = months - if abs(self.months) > 11: - s = _sign(self.months) - div, mod = divmod(self.months * s, 12) - self.months = mod * s - self.years = div * s - else: - self.years = 0 - - def normalized(self): - """ - Return a version of this object represented entirely using integer - values for the relative attributes. - - >>> relativedelta(days=1.5, hours=2).normalized() - relativedelta(days=+1, hours=+14) - - :return: - Returns a :class:`dateutil.relativedelta.relativedelta` object. - """ - # Cascade remainders down (rounding each to roughly nearest microsecond) - days = int(self.days) - - hours_f = round(self.hours + 24 * (self.days - days), 11) - hours = int(hours_f) - - minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) - minutes = int(minutes_f) - - seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) - seconds = int(seconds_f) - - microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) - - # Constructor carries overflow back up with call to _fix() - return self.__class__(years=self.years, months=self.months, - days=days, hours=hours, minutes=minutes, - seconds=seconds, microseconds=microseconds, - leapdays=self.leapdays, year=self.year, - month=self.month, day=self.day, - weekday=self.weekday, hour=self.hour, - minute=self.minute, second=self.second, - microsecond=self.microsecond) - - def __add__(self, other): - if isinstance(other, relativedelta): - return self.__class__(years=other.years + self.years, - months=other.months + self.months, - days=other.days + self.days, - hours=other.hours + self.hours, - minutes=other.minutes + self.minutes, - seconds=other.seconds + self.seconds, - microseconds=(other.microseconds + - self.microseconds), - leapdays=other.leapdays or self.leapdays, - year=(other.year if other.year is not None - else self.year), - month=(other.month if other.month is not None - else self.month), - day=(other.day if other.day is not None - else self.day), - weekday=(other.weekday if other.weekday is not None - else self.weekday), - hour=(other.hour if other.hour is not None - else self.hour), - minute=(other.minute if other.minute is not None - else self.minute), - second=(other.second if other.second is not None - else self.second), - microsecond=(other.microsecond if other.microsecond - is not None else - self.microsecond)) - if isinstance(other, datetime.timedelta): - return self.__class__(years=self.years, - months=self.months, - days=self.days + other.days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds + other.seconds, - microseconds=self.microseconds + other.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - if not isinstance(other, datetime.date): - return NotImplemented - elif self._has_time and not isinstance(other, datetime.datetime): - other = datetime.datetime.fromordinal(other.toordinal()) - year = (self.year or other.year)+self.years - month = self.month or other.month - if self.months: - assert 1 <= abs(self.months) <= 12 - month += self.months - if month > 12: - year += 1 - month -= 12 - elif month < 1: - year -= 1 - month += 12 - day = min(calendar.monthrange(year, month)[1], - self.day or other.day) - repl = {"year": year, "month": month, "day": day} - for attr in ["hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - repl[attr] = value - days = self.days - if self.leapdays and month > 2 and calendar.isleap(year): - days += self.leapdays - ret = (other.replace(**repl) - + datetime.timedelta(days=days, - hours=self.hours, - minutes=self.minutes, - seconds=self.seconds, - microseconds=self.microseconds)) - if self.weekday: - weekday, nth = self.weekday.weekday, self.weekday.n or 1 - jumpdays = (abs(nth) - 1) * 7 - if nth > 0: - jumpdays += (7 - ret.weekday() + weekday) % 7 - else: - jumpdays += (ret.weekday() - weekday) % 7 - jumpdays *= -1 - ret += datetime.timedelta(days=jumpdays) - return ret - - def __radd__(self, other): - return self.__add__(other) - - def __rsub__(self, other): - return self.__neg__().__radd__(other) - - def __sub__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented # In case the other object defines __rsub__ - return self.__class__(years=self.years - other.years, - months=self.months - other.months, - days=self.days - other.days, - hours=self.hours - other.hours, - minutes=self.minutes - other.minutes, - seconds=self.seconds - other.seconds, - microseconds=self.microseconds - other.microseconds, - leapdays=self.leapdays or other.leapdays, - year=(self.year if self.year is not None - else other.year), - month=(self.month if self.month is not None else - other.month), - day=(self.day if self.day is not None else - other.day), - weekday=(self.weekday if self.weekday is not None else - other.weekday), - hour=(self.hour if self.hour is not None else - other.hour), - minute=(self.minute if self.minute is not None else - other.minute), - second=(self.second if self.second is not None else - other.second), - microsecond=(self.microsecond if self.microsecond - is not None else - other.microsecond)) - - def __abs__(self): - return self.__class__(years=abs(self.years), - months=abs(self.months), - days=abs(self.days), - hours=abs(self.hours), - minutes=abs(self.minutes), - seconds=abs(self.seconds), - microseconds=abs(self.microseconds), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __neg__(self): - return self.__class__(years=-self.years, - months=-self.months, - days=-self.days, - hours=-self.hours, - minutes=-self.minutes, - seconds=-self.seconds, - microseconds=-self.microseconds, - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - def __bool__(self): - return not (not self.years and - not self.months and - not self.days and - not self.hours and - not self.minutes and - not self.seconds and - not self.microseconds and - not self.leapdays and - self.year is None and - self.month is None and - self.day is None and - self.weekday is None and - self.hour is None and - self.minute is None and - self.second is None and - self.microsecond is None) - # Compatibility with Python 2.x - __nonzero__ = __bool__ - - def __mul__(self, other): - try: - f = float(other) - except TypeError: - return NotImplemented - - return self.__class__(years=int(self.years * f), - months=int(self.months * f), - days=int(self.days * f), - hours=int(self.hours * f), - minutes=int(self.minutes * f), - seconds=int(self.seconds * f), - microseconds=int(self.microseconds * f), - leapdays=self.leapdays, - year=self.year, - month=self.month, - day=self.day, - weekday=self.weekday, - hour=self.hour, - minute=self.minute, - second=self.second, - microsecond=self.microsecond) - - __rmul__ = __mul__ - - def __eq__(self, other): - if not isinstance(other, relativedelta): - return NotImplemented - if self.weekday or other.weekday: - if not self.weekday or not other.weekday: - return False - if self.weekday.weekday != other.weekday.weekday: - return False - n1, n2 = self.weekday.n, other.weekday.n - if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): - return False - return (self.years == other.years and - self.months == other.months and - self.days == other.days and - self.hours == other.hours and - self.minutes == other.minutes and - self.seconds == other.seconds and - self.microseconds == other.microseconds and - self.leapdays == other.leapdays and - self.year == other.year and - self.month == other.month and - self.day == other.day and - self.hour == other.hour and - self.minute == other.minute and - self.second == other.second and - self.microsecond == other.microsecond) - - def __hash__(self): - return hash(( - self.weekday, - self.years, - self.months, - self.days, - self.hours, - self.minutes, - self.seconds, - self.microseconds, - self.leapdays, - self.year, - self.month, - self.day, - self.hour, - self.minute, - self.second, - self.microsecond, - )) - - def __ne__(self, other): - return not self.__eq__(other) - - def __div__(self, other): - try: - reciprocal = 1 / float(other) - except TypeError: - return NotImplemented - - return self.__mul__(reciprocal) - - __truediv__ = __div__ - - def __repr__(self): - l = [] - for attr in ["years", "months", "days", "leapdays", - "hours", "minutes", "seconds", "microseconds"]: - value = getattr(self, attr) - if value: - l.append("{attr}={value:+g}".format(attr=attr, value=value)) - for attr in ["year", "month", "day", "weekday", - "hour", "minute", "second", "microsecond"]: - value = getattr(self, attr) - if value is not None: - l.append("{attr}={value}".format(attr=attr, value=repr(value))) - return "{classname}({attrs})".format(classname=self.__class__.__name__, - attrs=", ".join(l)) - - -def _sign(x): - return int(copysign(1, x)) - -# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.6/site-packages/dateutil/rrule.py b/venv/lib/python3.6/site-packages/dateutil/rrule.py deleted file mode 100644 index 20a0c4a..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/rrule.py +++ /dev/null @@ -1,1736 +0,0 @@ -# -*- coding: utf-8 -*- -""" -The rrule module offers a small, complete, and very fast, implementation of -the recurrence rules documented in the -`iCalendar RFC `_, -including support for caching of results. -""" -import itertools -import datetime -import calendar -import re -import sys - -try: - from math import gcd -except ImportError: - from fractions import gcd - -from six import advance_iterator, integer_types -from six.moves import _thread, range -import heapq - -from ._common import weekday as weekdaybase -from .tz import tzutc, tzlocal - -# For warning about deprecation of until and count -from warnings import warn - -__all__ = ["rrule", "rruleset", "rrulestr", - "YEARLY", "MONTHLY", "WEEKLY", "DAILY", - "HOURLY", "MINUTELY", "SECONDLY", - "MO", "TU", "WE", "TH", "FR", "SA", "SU"] - -# Every mask is 7 days longer to handle cross-year weekly periods. -M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + - [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) -M365MASK = list(M366MASK) -M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) -MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -MDAY365MASK = list(MDAY366MASK) -M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) -NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) -NMDAY365MASK = list(NMDAY366MASK) -M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) -M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) -WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 -del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] -MDAY365MASK = tuple(MDAY365MASK) -M365MASK = tuple(M365MASK) - -FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] - -(YEARLY, - MONTHLY, - WEEKLY, - DAILY, - HOURLY, - MINUTELY, - SECONDLY) = list(range(7)) - -# Imported on demand. -easter = None -parser = None - - -class weekday(weekdaybase): - """ - This version of weekday does not allow n = 0. - """ - def __init__(self, wkday, n=None): - if n == 0: - raise ValueError("Can't create weekday with n==0") - - super(weekday, self).__init__(wkday, n) - - -MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) - - -def _invalidates_cache(f): - """ - Decorator for rruleset methods which may invalidate the - cached length. - """ - def inner_func(self, *args, **kwargs): - rv = f(self, *args, **kwargs) - self._invalidate_cache() - return rv - - return inner_func - - -class rrulebase(object): - def __init__(self, cache=False): - if cache: - self._cache = [] - self._cache_lock = _thread.allocate_lock() - self._invalidate_cache() - else: - self._cache = None - self._cache_complete = False - self._len = None - - def __iter__(self): - if self._cache_complete: - return iter(self._cache) - elif self._cache is None: - return self._iter() - else: - return self._iter_cached() - - def _invalidate_cache(self): - if self._cache is not None: - self._cache = [] - self._cache_complete = False - self._cache_gen = self._iter() - - if self._cache_lock.locked(): - self._cache_lock.release() - - self._len = None - - def _iter_cached(self): - i = 0 - gen = self._cache_gen - cache = self._cache - acquire = self._cache_lock.acquire - release = self._cache_lock.release - while gen: - if i == len(cache): - acquire() - if self._cache_complete: - break - try: - for j in range(10): - cache.append(advance_iterator(gen)) - except StopIteration: - self._cache_gen = gen = None - self._cache_complete = True - break - release() - yield cache[i] - i += 1 - while i < self._len: - yield cache[i] - i += 1 - - def __getitem__(self, item): - if self._cache_complete: - return self._cache[item] - elif isinstance(item, slice): - if item.step and item.step < 0: - return list(iter(self))[item] - else: - return list(itertools.islice(self, - item.start or 0, - item.stop or sys.maxsize, - item.step or 1)) - elif item >= 0: - gen = iter(self) - try: - for i in range(item+1): - res = advance_iterator(gen) - except StopIteration: - raise IndexError - return res - else: - return list(iter(self))[item] - - def __contains__(self, item): - if self._cache_complete: - return item in self._cache - else: - for i in self: - if i == item: - return True - elif i > item: - return False - return False - - # __len__() introduces a large performance penality. - def count(self): - """ Returns the number of recurrences in this set. It will have go - trough the whole recurrence, if this hasn't been done before. """ - if self._len is None: - for x in self: - pass - return self._len - - def before(self, dt, inc=False): - """ Returns the last recurrence before the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - last = None - if inc: - for i in gen: - if i > dt: - break - last = i - else: - for i in gen: - if i >= dt: - break - last = i - return last - - def after(self, dt, inc=False): - """ Returns the first recurrence after the given datetime instance. The - inc keyword defines what happens if dt is an occurrence. With - inc=True, if dt itself is an occurrence, it will be returned. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - if inc: - for i in gen: - if i >= dt: - return i - else: - for i in gen: - if i > dt: - return i - return None - - def xafter(self, dt, count=None, inc=False): - """ - Generator which yields up to `count` recurrences after the given - datetime instance, equivalent to `after`. - - :param dt: - The datetime at which to start generating recurrences. - - :param count: - The maximum number of recurrences to generate. If `None` (default), - dates are generated until the recurrence rule is exhausted. - - :param inc: - If `dt` is an instance of the rule and `inc` is `True`, it is - included in the output. - - :yields: Yields a sequence of `datetime` objects. - """ - - if self._cache_complete: - gen = self._cache - else: - gen = self - - # Select the comparison function - if inc: - comp = lambda dc, dtc: dc >= dtc - else: - comp = lambda dc, dtc: dc > dtc - - # Generate dates - n = 0 - for d in gen: - if comp(d, dt): - if count is not None: - n += 1 - if n > count: - break - - yield d - - def between(self, after, before, inc=False, count=1): - """ Returns all the occurrences of the rrule between after and before. - The inc keyword defines what happens if after and/or before are - themselves occurrences. With inc=True, they will be included in the - list, if they are found in the recurrence set. """ - if self._cache_complete: - gen = self._cache - else: - gen = self - started = False - l = [] - if inc: - for i in gen: - if i > before: - break - elif not started: - if i >= after: - started = True - l.append(i) - else: - l.append(i) - else: - for i in gen: - if i >= before: - break - elif not started: - if i > after: - started = True - l.append(i) - else: - l.append(i) - return l - - -class rrule(rrulebase): - """ - That's the base of the rrule operation. It accepts all the keywords - defined in the RFC as its constructor parameters (except byday, - which was renamed to byweekday) and more. The constructor prototype is:: - - rrule(freq) - - Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, - or SECONDLY. - - .. note:: - Per RFC section 3.3.10, recurrence instances falling on invalid dates - and times are ignored rather than coerced: - - Recurrence rules may generate recurrence instances with an invalid - date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM - on a day where the local time is moved forward by an hour at 1:00 - AM). Such recurrence instances MUST be ignored and MUST NOT be - counted as part of the recurrence set. - - This can lead to possibly surprising behavior when, for example, the - start date occurs at the end of the month: - - >>> from dateutil.rrule import rrule, MONTHLY - >>> from datetime import datetime - >>> start_date = datetime(2014, 12, 31) - >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) - ... # doctest: +NORMALIZE_WHITESPACE - [datetime.datetime(2014, 12, 31, 0, 0), - datetime.datetime(2015, 1, 31, 0, 0), - datetime.datetime(2015, 3, 31, 0, 0), - datetime.datetime(2015, 5, 31, 0, 0)] - - Additionally, it supports the following keyword arguments: - - :param dtstart: - The recurrence start. Besides being the base for the recurrence, - missing parameters in the final recurrence instances will also be - extracted from this date. If not given, datetime.now() will be used - instead. - :param interval: - The interval between each freq iteration. For example, when using - YEARLY, an interval of 2 means once every two years, but with HOURLY, - it means once every two hours. The default interval is 1. - :param wkst: - The week start day. Must be one of the MO, TU, WE constants, or an - integer, specifying the first day of the week. This will affect - recurrences based on weekly periods. The default week start is got - from calendar.firstweekday(), and may be modified by - calendar.setfirstweekday(). - :param count: - If given, this determines how many occurrences will be generated. - - .. note:: - As of version 2.5.0, the use of the keyword ``until`` in conjunction - with ``count`` is deprecated, to make sure ``dateutil`` is fully - compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` - **must not** occur in the same call to ``rrule``. - :param until: - If given, this must be a datetime instance specifying the upper-bound - limit of the recurrence. The last recurrence in the rule is the greatest - datetime that is less than or equal to the value specified in the - ``until`` parameter. - - .. note:: - As of version 2.5.0, the use of the keyword ``until`` in conjunction - with ``count`` is deprecated, to make sure ``dateutil`` is fully - compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` - **must not** occur in the same call to ``rrule``. - :param bysetpos: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each given integer will specify an occurrence - number, corresponding to the nth occurrence of the rule inside the - frequency period. For example, a bysetpos of -1 if combined with a - MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will - result in the last work day of every month. - :param bymonth: - If given, it must be either an integer, or a sequence of integers, - meaning the months to apply the recurrence to. - :param bymonthday: - If given, it must be either an integer, or a sequence of integers, - meaning the month days to apply the recurrence to. - :param byyearday: - If given, it must be either an integer, or a sequence of integers, - meaning the year days to apply the recurrence to. - :param byeaster: - If given, it must be either an integer, or a sequence of integers, - positive or negative. Each integer will define an offset from the - Easter Sunday. Passing the offset 0 to byeaster will yield the Easter - Sunday itself. This is an extension to the RFC specification. - :param byweekno: - If given, it must be either an integer, or a sequence of integers, - meaning the week numbers to apply the recurrence to. Week numbers - have the meaning described in ISO8601, that is, the first week of - the year is that containing at least four days of the new year. - :param byweekday: - If given, it must be either an integer (0 == MO), a sequence of - integers, one of the weekday constants (MO, TU, etc), or a sequence - of these constants. When given, these variables will define the - weekdays where the recurrence will be applied. It's also possible to - use an argument n for the weekday instances, which will mean the nth - occurrence of this weekday in the period. For example, with MONTHLY, - or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the - first friday of the month where the recurrence happens. Notice that in - the RFC documentation, this is specified as BYDAY, but was renamed to - avoid the ambiguity of that keyword. - :param byhour: - If given, it must be either an integer, or a sequence of integers, - meaning the hours to apply the recurrence to. - :param byminute: - If given, it must be either an integer, or a sequence of integers, - meaning the minutes to apply the recurrence to. - :param bysecond: - If given, it must be either an integer, or a sequence of integers, - meaning the seconds to apply the recurrence to. - :param cache: - If given, it must be a boolean value specifying to enable or disable - caching of results. If you will use the same rrule instance multiple - times, enabling caching will improve the performance considerably. - """ - def __init__(self, freq, dtstart=None, - interval=1, wkst=None, count=None, until=None, bysetpos=None, - bymonth=None, bymonthday=None, byyearday=None, byeaster=None, - byweekno=None, byweekday=None, - byhour=None, byminute=None, bysecond=None, - cache=False): - super(rrule, self).__init__(cache) - global easter - if not dtstart: - if until and until.tzinfo: - dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) - else: - dtstart = datetime.datetime.now().replace(microsecond=0) - elif not isinstance(dtstart, datetime.datetime): - dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) - else: - dtstart = dtstart.replace(microsecond=0) - self._dtstart = dtstart - self._tzinfo = dtstart.tzinfo - self._freq = freq - self._interval = interval - self._count = count - - # Cache the original byxxx rules, if they are provided, as the _byxxx - # attributes do not necessarily map to the inputs, and this can be - # a problem in generating the strings. Only store things if they've - # been supplied (the string retrieval will just use .get()) - self._original_rule = {} - - if until and not isinstance(until, datetime.datetime): - until = datetime.datetime.fromordinal(until.toordinal()) - self._until = until - - if self._dtstart and self._until: - if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): - # According to RFC5545 Section 3.3.10: - # https://tools.ietf.org/html/rfc5545#section-3.3.10 - # - # > If the "DTSTART" property is specified as a date with UTC - # > time or a date with local time and time zone reference, - # > then the UNTIL rule part MUST be specified as a date with - # > UTC time. - raise ValueError( - 'RRULE UNTIL values must be specified in UTC when DTSTART ' - 'is timezone-aware' - ) - - if count is not None and until: - warn("Using both 'count' and 'until' is inconsistent with RFC 5545" - " and has been deprecated in dateutil. Future versions will " - "raise an error.", DeprecationWarning) - - if wkst is None: - self._wkst = calendar.firstweekday() - elif isinstance(wkst, integer_types): - self._wkst = wkst - else: - self._wkst = wkst.weekday - - if bysetpos is None: - self._bysetpos = None - elif isinstance(bysetpos, integer_types): - if bysetpos == 0 or not (-366 <= bysetpos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - self._bysetpos = (bysetpos,) - else: - self._bysetpos = tuple(bysetpos) - for pos in self._bysetpos: - if pos == 0 or not (-366 <= pos <= 366): - raise ValueError("bysetpos must be between 1 and 366, " - "or between -366 and -1") - - if self._bysetpos: - self._original_rule['bysetpos'] = self._bysetpos - - if (byweekno is None and byyearday is None and bymonthday is None and - byweekday is None and byeaster is None): - if freq == YEARLY: - if bymonth is None: - bymonth = dtstart.month - self._original_rule['bymonth'] = None - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == MONTHLY: - bymonthday = dtstart.day - self._original_rule['bymonthday'] = None - elif freq == WEEKLY: - byweekday = dtstart.weekday() - self._original_rule['byweekday'] = None - - # bymonth - if bymonth is None: - self._bymonth = None - else: - if isinstance(bymonth, integer_types): - bymonth = (bymonth,) - - self._bymonth = tuple(sorted(set(bymonth))) - - if 'bymonth' not in self._original_rule: - self._original_rule['bymonth'] = self._bymonth - - # byyearday - if byyearday is None: - self._byyearday = None - else: - if isinstance(byyearday, integer_types): - byyearday = (byyearday,) - - self._byyearday = tuple(sorted(set(byyearday))) - self._original_rule['byyearday'] = self._byyearday - - # byeaster - if byeaster is not None: - if not easter: - from dateutil import easter - if isinstance(byeaster, integer_types): - self._byeaster = (byeaster,) - else: - self._byeaster = tuple(sorted(byeaster)) - - self._original_rule['byeaster'] = self._byeaster - else: - self._byeaster = None - - # bymonthday - if bymonthday is None: - self._bymonthday = () - self._bynmonthday = () - else: - if isinstance(bymonthday, integer_types): - bymonthday = (bymonthday,) - - bymonthday = set(bymonthday) # Ensure it's unique - - self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) - self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) - - # Storing positive numbers first, then negative numbers - if 'bymonthday' not in self._original_rule: - self._original_rule['bymonthday'] = tuple( - itertools.chain(self._bymonthday, self._bynmonthday)) - - # byweekno - if byweekno is None: - self._byweekno = None - else: - if isinstance(byweekno, integer_types): - byweekno = (byweekno,) - - self._byweekno = tuple(sorted(set(byweekno))) - - self._original_rule['byweekno'] = self._byweekno - - # byweekday / bynweekday - if byweekday is None: - self._byweekday = None - self._bynweekday = None - else: - # If it's one of the valid non-sequence types, convert to a - # single-element sequence before the iterator that builds the - # byweekday set. - if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): - byweekday = (byweekday,) - - self._byweekday = set() - self._bynweekday = set() - for wday in byweekday: - if isinstance(wday, integer_types): - self._byweekday.add(wday) - elif not wday.n or freq > MONTHLY: - self._byweekday.add(wday.weekday) - else: - self._bynweekday.add((wday.weekday, wday.n)) - - if not self._byweekday: - self._byweekday = None - elif not self._bynweekday: - self._bynweekday = None - - if self._byweekday is not None: - self._byweekday = tuple(sorted(self._byweekday)) - orig_byweekday = [weekday(x) for x in self._byweekday] - else: - orig_byweekday = () - - if self._bynweekday is not None: - self._bynweekday = tuple(sorted(self._bynweekday)) - orig_bynweekday = [weekday(*x) for x in self._bynweekday] - else: - orig_bynweekday = () - - if 'byweekday' not in self._original_rule: - self._original_rule['byweekday'] = tuple(itertools.chain( - orig_byweekday, orig_bynweekday)) - - # byhour - if byhour is None: - if freq < HOURLY: - self._byhour = {dtstart.hour} - else: - self._byhour = None - else: - if isinstance(byhour, integer_types): - byhour = (byhour,) - - if freq == HOURLY: - self._byhour = self.__construct_byset(start=dtstart.hour, - byxxx=byhour, - base=24) - else: - self._byhour = set(byhour) - - self._byhour = tuple(sorted(self._byhour)) - self._original_rule['byhour'] = self._byhour - - # byminute - if byminute is None: - if freq < MINUTELY: - self._byminute = {dtstart.minute} - else: - self._byminute = None - else: - if isinstance(byminute, integer_types): - byminute = (byminute,) - - if freq == MINUTELY: - self._byminute = self.__construct_byset(start=dtstart.minute, - byxxx=byminute, - base=60) - else: - self._byminute = set(byminute) - - self._byminute = tuple(sorted(self._byminute)) - self._original_rule['byminute'] = self._byminute - - # bysecond - if bysecond is None: - if freq < SECONDLY: - self._bysecond = ((dtstart.second,)) - else: - self._bysecond = None - else: - if isinstance(bysecond, integer_types): - bysecond = (bysecond,) - - self._bysecond = set(bysecond) - - if freq == SECONDLY: - self._bysecond = self.__construct_byset(start=dtstart.second, - byxxx=bysecond, - base=60) - else: - self._bysecond = set(bysecond) - - self._bysecond = tuple(sorted(self._bysecond)) - self._original_rule['bysecond'] = self._bysecond - - if self._freq >= HOURLY: - self._timeset = None - else: - self._timeset = [] - for hour in self._byhour: - for minute in self._byminute: - for second in self._bysecond: - self._timeset.append( - datetime.time(hour, minute, second, - tzinfo=self._tzinfo)) - self._timeset.sort() - self._timeset = tuple(self._timeset) - - def __str__(self): - """ - Output a string that would generate this RRULE if passed to rrulestr. - This is mostly compatible with RFC5545, except for the - dateutil-specific extension BYEASTER. - """ - - output = [] - h, m, s = [None] * 3 - if self._dtstart: - output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) - h, m, s = self._dtstart.timetuple()[3:6] - - parts = ['FREQ=' + FREQNAMES[self._freq]] - if self._interval != 1: - parts.append('INTERVAL=' + str(self._interval)) - - if self._wkst: - parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) - - if self._count is not None: - parts.append('COUNT=' + str(self._count)) - - if self._until: - parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) - - if self._original_rule.get('byweekday') is not None: - # The str() method on weekday objects doesn't generate - # RFC5545-compliant strings, so we should modify that. - original_rule = dict(self._original_rule) - wday_strings = [] - for wday in original_rule['byweekday']: - if wday.n: - wday_strings.append('{n:+d}{wday}'.format( - n=wday.n, - wday=repr(wday)[0:2])) - else: - wday_strings.append(repr(wday)) - - original_rule['byweekday'] = wday_strings - else: - original_rule = self._original_rule - - partfmt = '{name}={vals}' - for name, key in [('BYSETPOS', 'bysetpos'), - ('BYMONTH', 'bymonth'), - ('BYMONTHDAY', 'bymonthday'), - ('BYYEARDAY', 'byyearday'), - ('BYWEEKNO', 'byweekno'), - ('BYDAY', 'byweekday'), - ('BYHOUR', 'byhour'), - ('BYMINUTE', 'byminute'), - ('BYSECOND', 'bysecond'), - ('BYEASTER', 'byeaster')]: - value = original_rule.get(key) - if value: - parts.append(partfmt.format(name=name, vals=(','.join(str(v) - for v in value)))) - - output.append('RRULE:' + ';'.join(parts)) - return '\n'.join(output) - - def replace(self, **kwargs): - """Return new rrule with same attributes except for those attributes given new - values by whichever keyword arguments are specified.""" - new_kwargs = {"interval": self._interval, - "count": self._count, - "dtstart": self._dtstart, - "freq": self._freq, - "until": self._until, - "wkst": self._wkst, - "cache": False if self._cache is None else True } - new_kwargs.update(self._original_rule) - new_kwargs.update(kwargs) - return rrule(**new_kwargs) - - def _iter(self): - year, month, day, hour, minute, second, weekday, yearday, _ = \ - self._dtstart.timetuple() - - # Some local variables to speed things up a bit - freq = self._freq - interval = self._interval - wkst = self._wkst - until = self._until - bymonth = self._bymonth - byweekno = self._byweekno - byyearday = self._byyearday - byweekday = self._byweekday - byeaster = self._byeaster - bymonthday = self._bymonthday - bynmonthday = self._bynmonthday - bysetpos = self._bysetpos - byhour = self._byhour - byminute = self._byminute - bysecond = self._bysecond - - ii = _iterinfo(self) - ii.rebuild(year, month) - - getdayset = {YEARLY: ii.ydayset, - MONTHLY: ii.mdayset, - WEEKLY: ii.wdayset, - DAILY: ii.ddayset, - HOURLY: ii.ddayset, - MINUTELY: ii.ddayset, - SECONDLY: ii.ddayset}[freq] - - if freq < HOURLY: - timeset = self._timeset - else: - gettimeset = {HOURLY: ii.htimeset, - MINUTELY: ii.mtimeset, - SECONDLY: ii.stimeset}[freq] - if ((freq >= HOURLY and - self._byhour and hour not in self._byhour) or - (freq >= MINUTELY and - self._byminute and minute not in self._byminute) or - (freq >= SECONDLY and - self._bysecond and second not in self._bysecond)): - timeset = () - else: - timeset = gettimeset(hour, minute, second) - - total = 0 - count = self._count - while True: - # Get dayset with the right frequency - dayset, start, end = getdayset(year, month, day) - - # Do the "hard" work ;-) - filtered = False - for i in dayset[start:end]: - if ((bymonth and ii.mmask[i] not in bymonth) or - (byweekno and not ii.wnomask[i]) or - (byweekday and ii.wdaymask[i] not in byweekday) or - (ii.nwdaymask and not ii.nwdaymask[i]) or - (byeaster and not ii.eastermask[i]) or - ((bymonthday or bynmonthday) and - ii.mdaymask[i] not in bymonthday and - ii.nmdaymask[i] not in bynmonthday) or - (byyearday and - ((i < ii.yearlen and i+1 not in byyearday and - -ii.yearlen+i not in byyearday) or - (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and - -ii.nextyearlen+i-ii.yearlen not in byyearday)))): - dayset[i] = None - filtered = True - - # Output results - if bysetpos and timeset: - poslist = [] - for pos in bysetpos: - if pos < 0: - daypos, timepos = divmod(pos, len(timeset)) - else: - daypos, timepos = divmod(pos-1, len(timeset)) - try: - i = [x for x in dayset[start:end] - if x is not None][daypos] - time = timeset[timepos] - except IndexError: - pass - else: - date = datetime.date.fromordinal(ii.yearordinal+i) - res = datetime.datetime.combine(date, time) - if res not in poslist: - poslist.append(res) - poslist.sort() - for res in poslist: - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - total += 1 - yield res - else: - for i in dayset[start:end]: - if i is not None: - date = datetime.date.fromordinal(ii.yearordinal + i) - for time in timeset: - res = datetime.datetime.combine(date, time) - if until and res > until: - self._len = total - return - elif res >= self._dtstart: - if count is not None: - count -= 1 - if count < 0: - self._len = total - return - - total += 1 - yield res - - # Handle frequency and interval - fixday = False - if freq == YEARLY: - year += interval - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == MONTHLY: - month += interval - if month > 12: - div, mod = divmod(month, 12) - month = mod - year += div - if month == 0: - month = 12 - year -= 1 - if year > datetime.MAXYEAR: - self._len = total - return - ii.rebuild(year, month) - elif freq == WEEKLY: - if wkst > weekday: - day += -(weekday+1+(6-wkst))+self._interval*7 - else: - day += -(weekday-wkst)+self._interval*7 - weekday = wkst - fixday = True - elif freq == DAILY: - day += interval - fixday = True - elif freq == HOURLY: - if filtered: - # Jump to one iteration before next day - hour += ((23-hour)//interval)*interval - - if byhour: - ndays, hour = self.__mod_distance(value=hour, - byxxx=self._byhour, - base=24) - else: - ndays, hour = divmod(hour+interval, 24) - - if ndays: - day += ndays - fixday = True - - timeset = gettimeset(hour, minute, second) - elif freq == MINUTELY: - if filtered: - # Jump to one iteration before next day - minute += ((1439-(hour*60+minute))//interval)*interval - - valid = False - rep_rate = (24*60) - for j in range(rep_rate // gcd(interval, rep_rate)): - if byminute: - nhours, minute = \ - self.__mod_distance(value=minute, - byxxx=self._byminute, - base=60) - else: - nhours, minute = divmod(minute+interval, 60) - - div, hour = divmod(hour+nhours, 24) - if div: - day += div - fixday = True - filtered = False - - if not byhour or hour in byhour: - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval and ' + - 'byhour resulting in empty rule.') - - timeset = gettimeset(hour, minute, second) - elif freq == SECONDLY: - if filtered: - # Jump to one iteration before next day - second += (((86399 - (hour * 3600 + minute * 60 + second)) - // interval) * interval) - - rep_rate = (24 * 3600) - valid = False - for j in range(0, rep_rate // gcd(interval, rep_rate)): - if bysecond: - nminutes, second = \ - self.__mod_distance(value=second, - byxxx=self._bysecond, - base=60) - else: - nminutes, second = divmod(second+interval, 60) - - div, minute = divmod(minute+nminutes, 60) - if div: - hour += div - div, hour = divmod(hour, 24) - if div: - day += div - fixday = True - - if ((not byhour or hour in byhour) and - (not byminute or minute in byminute) and - (not bysecond or second in bysecond)): - valid = True - break - - if not valid: - raise ValueError('Invalid combination of interval, ' + - 'byhour and byminute resulting in empty' + - ' rule.') - - timeset = gettimeset(hour, minute, second) - - if fixday and day > 28: - daysinmonth = calendar.monthrange(year, month)[1] - if day > daysinmonth: - while day > daysinmonth: - day -= daysinmonth - month += 1 - if month == 13: - month = 1 - year += 1 - if year > datetime.MAXYEAR: - self._len = total - return - daysinmonth = calendar.monthrange(year, month)[1] - ii.rebuild(year, month) - - def __construct_byset(self, start, byxxx, base): - """ - If a `BYXXX` sequence is passed to the constructor at the same level as - `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some - specifications which cannot be reached given some starting conditions. - - This occurs whenever the interval is not coprime with the base of a - given unit and the difference between the starting position and the - ending position is not coprime with the greatest common denominator - between the interval and the base. For example, with a FREQ of hourly - starting at 17:00 and an interval of 4, the only valid values for - BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not - coprime. - - :param start: - Specifies the starting position. - :param byxxx: - An iterable containing the list of allowed values. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - This does not preserve the type of the iterable, returning a set, since - the values should be unique and the order is irrelevant, this will - speed up later lookups. - - In the event of an empty set, raises a :exception:`ValueError`, as this - results in an empty rrule. - """ - - cset = set() - - # Support a single byxxx value. - if isinstance(byxxx, integer_types): - byxxx = (byxxx, ) - - for num in byxxx: - i_gcd = gcd(self._interval, base) - # Use divmod rather than % because we need to wrap negative nums. - if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: - cset.add(num) - - if len(cset) == 0: - raise ValueError("Invalid rrule byxxx generates an empty set.") - - return cset - - def __mod_distance(self, value, byxxx, base): - """ - Calculates the next value in a sequence where the `FREQ` parameter is - specified along with a `BYXXX` parameter at the same "level" - (e.g. `HOURLY` specified with `BYHOUR`). - - :param value: - The old value of the component. - :param byxxx: - The `BYXXX` set, which should have been generated by - `rrule._construct_byset`, or something else which checks that a - valid rule is present. - :param base: - The largest allowable value for the specified frequency (e.g. - 24 hours, 60 minutes). - - If a valid value is not found after `base` iterations (the maximum - number before the sequence would start to repeat), this raises a - :exception:`ValueError`, as no valid values were found. - - This returns a tuple of `divmod(n*interval, base)`, where `n` is the - smallest number of `interval` repetitions until the next specified - value in `byxxx` is found. - """ - accumulator = 0 - for ii in range(1, base + 1): - # Using divmod() over % to account for negative intervals - div, value = divmod(value + self._interval, base) - accumulator += div - if value in byxxx: - return (accumulator, value) - - -class _iterinfo(object): - __slots__ = ["rrule", "lastyear", "lastmonth", - "yearlen", "nextyearlen", "yearordinal", "yearweekday", - "mmask", "mrange", "mdaymask", "nmdaymask", - "wdaymask", "wnomask", "nwdaymask", "eastermask"] - - def __init__(self, rrule): - for attr in self.__slots__: - setattr(self, attr, None) - self.rrule = rrule - - def rebuild(self, year, month): - # Every mask is 7 days longer to handle cross-year weekly periods. - rr = self.rrule - if year != self.lastyear: - self.yearlen = 365 + calendar.isleap(year) - self.nextyearlen = 365 + calendar.isleap(year + 1) - firstyday = datetime.date(year, 1, 1) - self.yearordinal = firstyday.toordinal() - self.yearweekday = firstyday.weekday() - - wday = datetime.date(year, 1, 1).weekday() - if self.yearlen == 365: - self.mmask = M365MASK - self.mdaymask = MDAY365MASK - self.nmdaymask = NMDAY365MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M365RANGE - else: - self.mmask = M366MASK - self.mdaymask = MDAY366MASK - self.nmdaymask = NMDAY366MASK - self.wdaymask = WDAYMASK[wday:] - self.mrange = M366RANGE - - if not rr._byweekno: - self.wnomask = None - else: - self.wnomask = [0]*(self.yearlen+7) - # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) - no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 - if no1wkst >= 4: - no1wkst = 0 - # Number of days in the year, plus the days we got - # from last year. - wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 - else: - # Number of days in the year, minus the days we - # left in last year. - wyearlen = self.yearlen-no1wkst - div, mod = divmod(wyearlen, 7) - numweeks = div+mod//4 - for n in rr._byweekno: - if n < 0: - n += numweeks+1 - if not (0 < n <= numweeks): - continue - if n > 1: - i = no1wkst+(n-1)*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - else: - i = no1wkst - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if 1 in rr._byweekno: - # Check week number 1 of next year as well - # TODO: Check -numweeks for next year. - i = no1wkst+numweeks*7 - if no1wkst != firstwkst: - i -= 7-firstwkst - if i < self.yearlen: - # If week starts in next year, we - # don't care about it. - for j in range(7): - self.wnomask[i] = 1 - i += 1 - if self.wdaymask[i] == rr._wkst: - break - if no1wkst: - # Check last week number of last year as - # well. If no1wkst is 0, either the year - # started on week start, or week number 1 - # got days from last year, so there are no - # days from last year's last week number in - # this year. - if -1 not in rr._byweekno: - lyearweekday = datetime.date(year-1, 1, 1).weekday() - lno1wkst = (7-lyearweekday+rr._wkst) % 7 - lyearlen = 365+calendar.isleap(year-1) - if lno1wkst >= 4: - lno1wkst = 0 - lnumweeks = 52+(lyearlen + - (lyearweekday-rr._wkst) % 7) % 7//4 - else: - lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 - else: - lnumweeks = -1 - if lnumweeks in rr._byweekno: - for i in range(no1wkst): - self.wnomask[i] = 1 - - if (rr._bynweekday and (month != self.lastmonth or - year != self.lastyear)): - ranges = [] - if rr._freq == YEARLY: - if rr._bymonth: - for month in rr._bymonth: - ranges.append(self.mrange[month-1:month+1]) - else: - ranges = [(0, self.yearlen)] - elif rr._freq == MONTHLY: - ranges = [self.mrange[month-1:month+1]] - if ranges: - # Weekly frequency won't get here, so we may not - # care about cross-year weekly periods. - self.nwdaymask = [0]*self.yearlen - for first, last in ranges: - last -= 1 - for wday, n in rr._bynweekday: - if n < 0: - i = last+(n+1)*7 - i -= (self.wdaymask[i]-wday) % 7 - else: - i = first+(n-1)*7 - i += (7-self.wdaymask[i]+wday) % 7 - if first <= i <= last: - self.nwdaymask[i] = 1 - - if rr._byeaster: - self.eastermask = [0]*(self.yearlen+7) - eyday = easter.easter(year).toordinal()-self.yearordinal - for offset in rr._byeaster: - self.eastermask[eyday+offset] = 1 - - self.lastyear = year - self.lastmonth = month - - def ydayset(self, year, month, day): - return list(range(self.yearlen)), 0, self.yearlen - - def mdayset(self, year, month, day): - dset = [None]*self.yearlen - start, end = self.mrange[month-1:month+1] - for i in range(start, end): - dset[i] = i - return dset, start, end - - def wdayset(self, year, month, day): - # We need to handle cross-year weeks here. - dset = [None]*(self.yearlen+7) - i = datetime.date(year, month, day).toordinal()-self.yearordinal - start = i - for j in range(7): - dset[i] = i - i += 1 - # if (not (0 <= i < self.yearlen) or - # self.wdaymask[i] == self.rrule._wkst): - # This will cross the year boundary, if necessary. - if self.wdaymask[i] == self.rrule._wkst: - break - return dset, start, i - - def ddayset(self, year, month, day): - dset = [None] * self.yearlen - i = datetime.date(year, month, day).toordinal() - self.yearordinal - dset[i] = i - return dset, i, i + 1 - - def htimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for minute in rr._byminute: - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, - tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def mtimeset(self, hour, minute, second): - tset = [] - rr = self.rrule - for second in rr._bysecond: - tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) - tset.sort() - return tset - - def stimeset(self, hour, minute, second): - return (datetime.time(hour, minute, second, - tzinfo=self.rrule._tzinfo),) - - -class rruleset(rrulebase): - """ The rruleset type allows more complex recurrence setups, mixing - multiple rules, dates, exclusion rules, and exclusion dates. The type - constructor takes the following keyword arguments: - - :param cache: If True, caching of results will be enabled, improving - performance of multiple queries considerably. """ - - class _genitem(object): - def __init__(self, genlist, gen): - try: - self.dt = advance_iterator(gen) - genlist.append(self) - except StopIteration: - pass - self.genlist = genlist - self.gen = gen - - def __next__(self): - try: - self.dt = advance_iterator(self.gen) - except StopIteration: - if self.genlist[0] is self: - heapq.heappop(self.genlist) - else: - self.genlist.remove(self) - heapq.heapify(self.genlist) - - next = __next__ - - def __lt__(self, other): - return self.dt < other.dt - - def __gt__(self, other): - return self.dt > other.dt - - def __eq__(self, other): - return self.dt == other.dt - - def __ne__(self, other): - return self.dt != other.dt - - def __init__(self, cache=False): - super(rruleset, self).__init__(cache) - self._rrule = [] - self._rdate = [] - self._exrule = [] - self._exdate = [] - - @_invalidates_cache - def rrule(self, rrule): - """ Include the given :py:class:`rrule` instance in the recurrence set - generation. """ - self._rrule.append(rrule) - - @_invalidates_cache - def rdate(self, rdate): - """ Include the given :py:class:`datetime` instance in the recurrence - set generation. """ - self._rdate.append(rdate) - - @_invalidates_cache - def exrule(self, exrule): - """ Include the given rrule instance in the recurrence set exclusion - list. Dates which are part of the given recurrence rules will not - be generated, even if some inclusive rrule or rdate matches them. - """ - self._exrule.append(exrule) - - @_invalidates_cache - def exdate(self, exdate): - """ Include the given datetime instance in the recurrence set - exclusion list. Dates included that way will not be generated, - even if some inclusive rrule or rdate matches them. """ - self._exdate.append(exdate) - - def _iter(self): - rlist = [] - self._rdate.sort() - self._genitem(rlist, iter(self._rdate)) - for gen in [iter(x) for x in self._rrule]: - self._genitem(rlist, gen) - exlist = [] - self._exdate.sort() - self._genitem(exlist, iter(self._exdate)) - for gen in [iter(x) for x in self._exrule]: - self._genitem(exlist, gen) - lastdt = None - total = 0 - heapq.heapify(rlist) - heapq.heapify(exlist) - while rlist: - ritem = rlist[0] - if not lastdt or lastdt != ritem.dt: - while exlist and exlist[0] < ritem: - exitem = exlist[0] - advance_iterator(exitem) - if exlist and exlist[0] is exitem: - heapq.heapreplace(exlist, exitem) - if not exlist or ritem != exlist[0]: - total += 1 - yield ritem.dt - lastdt = ritem.dt - advance_iterator(ritem) - if rlist and rlist[0] is ritem: - heapq.heapreplace(rlist, ritem) - self._len = total - - - - -class _rrulestr(object): - """ Parses a string representation of a recurrence rule or set of - recurrence rules. - - :param s: - Required, a string defining one or more recurrence rules. - - :param dtstart: - If given, used as the default recurrence start if not specified in the - rule string. - - :param cache: - If set ``True`` caching of results will be enabled, improving - performance of multiple queries considerably. - - :param unfold: - If set ``True`` indicates that a rule string is split over more - than one line and should be joined before processing. - - :param forceset: - If set ``True`` forces a :class:`dateutil.rrule.rruleset` to - be returned. - - :param compatible: - If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. - - :param ignoretz: - If set ``True``, time zones in parsed strings are ignored and a naive - :class:`datetime.datetime` object is returned. - - :param tzids: - If given, a callable or mapping used to retrieve a - :class:`datetime.tzinfo` from a string representation. - Defaults to :func:`dateutil.tz.gettz`. - - :param tzinfos: - Additional time zone names / aliases which may be present in a string - representation. See :func:`dateutil.parser.parse` for more - information. - - :return: - Returns a :class:`dateutil.rrule.rruleset` or - :class:`dateutil.rrule.rrule` - """ - - _freq_map = {"YEARLY": YEARLY, - "MONTHLY": MONTHLY, - "WEEKLY": WEEKLY, - "DAILY": DAILY, - "HOURLY": HOURLY, - "MINUTELY": MINUTELY, - "SECONDLY": SECONDLY} - - _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, - "FR": 4, "SA": 5, "SU": 6} - - def _handle_int(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = int(value) - - def _handle_int_list(self, rrkwargs, name, value, **kwargs): - rrkwargs[name.lower()] = [int(x) for x in value.split(',')] - - _handle_INTERVAL = _handle_int - _handle_COUNT = _handle_int - _handle_BYSETPOS = _handle_int_list - _handle_BYMONTH = _handle_int_list - _handle_BYMONTHDAY = _handle_int_list - _handle_BYYEARDAY = _handle_int_list - _handle_BYEASTER = _handle_int_list - _handle_BYWEEKNO = _handle_int_list - _handle_BYHOUR = _handle_int_list - _handle_BYMINUTE = _handle_int_list - _handle_BYSECOND = _handle_int_list - - def _handle_FREQ(self, rrkwargs, name, value, **kwargs): - rrkwargs["freq"] = self._freq_map[value] - - def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): - global parser - if not parser: - from dateutil import parser - try: - rrkwargs["until"] = parser.parse(value, - ignoretz=kwargs.get("ignoretz"), - tzinfos=kwargs.get("tzinfos")) - except ValueError: - raise ValueError("invalid until date") - - def _handle_WKST(self, rrkwargs, name, value, **kwargs): - rrkwargs["wkst"] = self._weekday_map[value] - - def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): - """ - Two ways to specify this: +1MO or MO(+1) - """ - l = [] - for wday in value.split(','): - if '(' in wday: - # If it's of the form TH(+1), etc. - splt = wday.split('(') - w = splt[0] - n = int(splt[1][:-1]) - elif len(wday): - # If it's of the form +1MO - for i in range(len(wday)): - if wday[i] not in '+-0123456789': - break - n = wday[:i] or None - w = wday[i:] - if n: - n = int(n) - else: - raise ValueError("Invalid (empty) BYDAY specification.") - - l.append(weekdays[self._weekday_map[w]](n)) - rrkwargs["byweekday"] = l - - _handle_BYDAY = _handle_BYWEEKDAY - - def _parse_rfc_rrule(self, line, - dtstart=None, - cache=False, - ignoretz=False, - tzinfos=None): - if line.find(':') != -1: - name, value = line.split(':') - if name != "RRULE": - raise ValueError("unknown parameter name") - else: - value = line - rrkwargs = {} - for pair in value.split(';'): - name, value = pair.split('=') - name = name.upper() - value = value.upper() - try: - getattr(self, "_handle_"+name)(rrkwargs, name, value, - ignoretz=ignoretz, - tzinfos=tzinfos) - except AttributeError: - raise ValueError("unknown parameter '%s'" % name) - except (KeyError, ValueError): - raise ValueError("invalid '%s': %s" % (name, value)) - return rrule(dtstart=dtstart, cache=cache, **rrkwargs) - - def _parse_date_value(self, date_value, parms, rule_tzids, - ignoretz, tzids, tzinfos): - global parser - if not parser: - from dateutil import parser - - datevals = [] - value_found = False - TZID = None - - for parm in parms: - if parm.startswith("TZID="): - try: - tzkey = rule_tzids[parm.split('TZID=')[-1]] - except KeyError: - continue - if tzids is None: - from . import tz - tzlookup = tz.gettz - elif callable(tzids): - tzlookup = tzids - else: - tzlookup = getattr(tzids, 'get', None) - if tzlookup is None: - msg = ('tzids must be a callable, mapping, or None, ' - 'not %s' % tzids) - raise ValueError(msg) - - TZID = tzlookup(tzkey) - continue - - # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found - # only once. - if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: - raise ValueError("unsupported parm: " + parm) - else: - if value_found: - msg = ("Duplicate value parameter found in: " + parm) - raise ValueError(msg) - value_found = True - - for datestr in date_value.split(','): - date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) - if TZID is not None: - if date.tzinfo is None: - date = date.replace(tzinfo=TZID) - else: - raise ValueError('DTSTART/EXDATE specifies multiple timezone') - datevals.append(date) - - return datevals - - def _parse_rfc(self, s, - dtstart=None, - cache=False, - unfold=False, - forceset=False, - compatible=False, - ignoretz=False, - tzids=None, - tzinfos=None): - global parser - if compatible: - forceset = True - unfold = True - - TZID_NAMES = dict(map( - lambda x: (x.upper(), x), - re.findall('TZID=(?P[^:]+):', s) - )) - s = s.upper() - if not s.strip(): - raise ValueError("empty string") - if unfold: - lines = s.splitlines() - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - else: - lines = s.split() - if (not forceset and len(lines) == 1 and (s.find(':') == -1 or - s.startswith('RRULE:'))): - return self._parse_rfc_rrule(lines[0], cache=cache, - dtstart=dtstart, ignoretz=ignoretz, - tzinfos=tzinfos) - else: - rrulevals = [] - rdatevals = [] - exrulevals = [] - exdatevals = [] - for line in lines: - if not line: - continue - if line.find(':') == -1: - name = "RRULE" - value = line - else: - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0] - parms = parms[1:] - if name == "RRULE": - for parm in parms: - raise ValueError("unsupported RRULE parm: "+parm) - rrulevals.append(value) - elif name == "RDATE": - for parm in parms: - if parm != "VALUE=DATE-TIME": - raise ValueError("unsupported RDATE parm: "+parm) - rdatevals.append(value) - elif name == "EXRULE": - for parm in parms: - raise ValueError("unsupported EXRULE parm: "+parm) - exrulevals.append(value) - elif name == "EXDATE": - exdatevals.extend( - self._parse_date_value(value, parms, - TZID_NAMES, ignoretz, - tzids, tzinfos) - ) - elif name == "DTSTART": - dtvals = self._parse_date_value(value, parms, TZID_NAMES, - ignoretz, tzids, tzinfos) - if len(dtvals) != 1: - raise ValueError("Multiple DTSTART values specified:" + - value) - dtstart = dtvals[0] - else: - raise ValueError("unsupported property: "+name) - if (forceset or len(rrulevals) > 1 or rdatevals - or exrulevals or exdatevals): - if not parser and (rdatevals or exdatevals): - from dateutil import parser - rset = rruleset(cache=cache) - for value in rrulevals: - rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in rdatevals: - for datestr in value.split(','): - rset.rdate(parser.parse(datestr, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exrulevals: - rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, - ignoretz=ignoretz, - tzinfos=tzinfos)) - for value in exdatevals: - rset.exdate(value) - if compatible and dtstart: - rset.rdate(dtstart) - return rset - else: - return self._parse_rfc_rrule(rrulevals[0], - dtstart=dtstart, - cache=cache, - ignoretz=ignoretz, - tzinfos=tzinfos) - - def __call__(self, s, **kwargs): - return self._parse_rfc(s, **kwargs) - - -rrulestr = _rrulestr() - -# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.6/site-packages/dateutil/tz/__init__.py b/venv/lib/python3.6/site-packages/dateutil/tz/__init__.py deleted file mode 100644 index 5a2d9cd..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tz/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -from .tz import * -from .tz import __doc__ - -#: Convenience constant providing a :class:`tzutc()` instance -#: -#: .. versionadded:: 2.7.0 -UTC = tzutc() - -__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", - "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", - "enfold", "datetime_ambiguous", "datetime_exists", - "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] - - -class DeprecatedTzFormatWarning(Warning): - """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/venv/lib/python3.6/site-packages/dateutil/tz/_common.py b/venv/lib/python3.6/site-packages/dateutil/tz/_common.py deleted file mode 100644 index 594e082..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tz/_common.py +++ /dev/null @@ -1,419 +0,0 @@ -from six import PY2 - -from functools import wraps - -from datetime import datetime, timedelta, tzinfo - - -ZERO = timedelta(0) - -__all__ = ['tzname_in_python2', 'enfold'] - - -def tzname_in_python2(namefunc): - """Change unicode output into bytestrings in Python 2 - - tzname() API changed in Python 3. It used to return bytes, but was changed - to unicode strings - """ - if PY2: - @wraps(namefunc) - def adjust_encoding(*args, **kwargs): - name = namefunc(*args, **kwargs) - if name is not None: - name = name.encode() - - return name - - return adjust_encoding - else: - return namefunc - - -# The following is adapted from Alexander Belopolsky's tz library -# https://github.com/abalkin/tz -if hasattr(datetime, 'fold'): - # This is the pre-python 3.6 fold situation - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - return dt.replace(fold=fold) - -else: - class _DatetimeWithFold(datetime): - """ - This is a class designed to provide a PEP 495-compliant interface for - Python versions before 3.6. It is used only for dates in a fold, so - the ``fold`` attribute is fixed at ``1``. - - .. versionadded:: 2.6.0 - """ - __slots__ = () - - def replace(self, *args, **kwargs): - """ - Return a datetime with the same attributes, except for those - attributes given new values by whichever keyword arguments are - specified. Note that tzinfo=None can be specified to create a naive - datetime from an aware datetime with no conversion of date and time - data. - - This is reimplemented in ``_DatetimeWithFold`` because pypy3 will - return a ``datetime.datetime`` even if ``fold`` is unchanged. - """ - argnames = ( - 'year', 'month', 'day', 'hour', 'minute', 'second', - 'microsecond', 'tzinfo' - ) - - for arg, argname in zip(args, argnames): - if argname in kwargs: - raise TypeError('Duplicate argument: {}'.format(argname)) - - kwargs[argname] = arg - - for argname in argnames: - if argname not in kwargs: - kwargs[argname] = getattr(self, argname) - - dt_class = self.__class__ if kwargs.get('fold', 1) else datetime - - return dt_class(**kwargs) - - @property - def fold(self): - return 1 - - def enfold(dt, fold=1): - """ - Provides a unified interface for assigning the ``fold`` attribute to - datetimes both before and after the implementation of PEP-495. - - :param fold: - The value for the ``fold`` attribute in the returned datetime. This - should be either 0 or 1. - - :return: - Returns an object for which ``getattr(dt, 'fold', 0)`` returns - ``fold`` for all versions of Python. In versions prior to - Python 3.6, this is a ``_DatetimeWithFold`` object, which is a - subclass of :py:class:`datetime.datetime` with the ``fold`` - attribute added, if ``fold`` is 1. - - .. versionadded:: 2.6.0 - """ - if getattr(dt, 'fold', 0) == fold: - return dt - - args = dt.timetuple()[:6] - args += (dt.microsecond, dt.tzinfo) - - if fold: - return _DatetimeWithFold(*args) - else: - return datetime(*args) - - -def _validate_fromutc_inputs(f): - """ - The CPython version of ``fromutc`` checks that the input is a ``datetime`` - object and that ``self`` is attached as its ``tzinfo``. - """ - @wraps(f) - def fromutc(self, dt): - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - return f(self, dt) - - return fromutc - - -class _tzinfo(tzinfo): - """ - Base class for all ``dateutil`` ``tzinfo`` objects. - """ - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - - dt = dt.replace(tzinfo=self) - - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) - - return same_dt and not same_offset - - def _fold_status(self, dt_utc, dt_wall): - """ - Determine the fold status of a "wall" datetime, given a representation - of the same datetime as a (naive) UTC datetime. This is calculated based - on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all - datetimes, and that this offset is the actual number of hours separating - ``dt_utc`` and ``dt_wall``. - - :param dt_utc: - Representation of the datetime as UTC - - :param dt_wall: - Representation of the datetime as "wall time". This parameter must - either have a `fold` attribute or have a fold-naive - :class:`datetime.tzinfo` attached, otherwise the calculation may - fail. - """ - if self.is_ambiguous(dt_wall): - delta_wall = dt_wall - dt_utc - _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) - else: - _fold = 0 - - return _fold - - def _fold(self, dt): - return getattr(dt, 'fold', 0) - - def _fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurence, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - - # Re-implement the algorithm from Python's datetime.py - dtoff = dt.utcoffset() - if dtoff is None: - raise ValueError("fromutc() requires a non-None utcoffset() " - "result") - - # The original datetime.py code assumes that `dst()` defaults to - # zero during ambiguous times. PEP 495 inverts this presumption, so - # for pre-PEP 495 versions of python, we need to tweak the algorithm. - dtdst = dt.dst() - if dtdst is None: - raise ValueError("fromutc() requires a non-None dst() result") - delta = dtoff - dtdst - - dt += delta - # Set fold=1 so we can default to being in the fold for - # ambiguous dates. - dtdst = enfold(dt, fold=1).dst() - if dtdst is None: - raise ValueError("fromutc(): dt.dst gave inconsistent " - "results; cannot convert") - return dt + dtdst - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Given a timezone-aware datetime in a given timezone, calculates a - timezone-aware datetime in a new timezone. - - Since this is the one time that we *know* we have an unambiguous - datetime object, we take this opportunity to determine whether the - datetime is ambiguous and in a "fold" state (e.g. if it's the first - occurance, chronologically, of the ambiguous datetime). - - :param dt: - A timezone-aware :class:`datetime.datetime` object. - """ - dt_wall = self._fromutc(dt) - - # Calculate the fold status given the two datetimes. - _fold = self._fold_status(dt, dt_wall) - - # Set the default fold value for ambiguous dates - return enfold(dt_wall, fold=_fold) - - -class tzrangebase(_tzinfo): - """ - This is an abstract base class for time zones represented by an annual - transition into and out of DST. Child classes should implement the following - methods: - - * ``__init__(self, *args, **kwargs)`` - * ``transitions(self, year)`` - this is expected to return a tuple of - datetimes representing the DST on and off transitions in standard - time. - - A fully initialized ``tzrangebase`` subclass should also provide the - following attributes: - * ``hasdst``: Boolean whether or not the zone uses DST. - * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects - representing the respective UTC offsets. - * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short - abbreviations in DST and STD, respectively. - * ``_hasdst``: Whether or not the zone has DST. - - .. versionadded:: 2.6.0 - """ - def __init__(self): - raise NotImplementedError('tzrangebase is an abstract base class') - - def utcoffset(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - isdst = self._isdst(dt) - - if isdst is None: - return None - elif isdst: - return self._dst_base_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - if self._isdst(dt): - return self._dst_abbr - else: - return self._std_abbr - - def fromutc(self, dt): - """ Given a datetime in UTC, return local time """ - if not isinstance(dt, datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # Get transitions - if there are none, fixed offset - transitions = self.transitions(dt.year) - if transitions is None: - return dt + self.utcoffset(dt) - - # Get the transition times in UTC - dston, dstoff = transitions - - dston -= self._std_offset - dstoff -= self._std_offset - - utc_transitions = (dston, dstoff) - dt_utc = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt_utc, utc_transitions) - - if isdst: - dt_wall = dt + self._dst_offset - else: - dt_wall = dt + self._std_offset - - _fold = int(not isdst and self.is_ambiguous(dt_wall)) - - return enfold(dt_wall, fold=_fold) - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if not self.hasdst: - return False - - start, end = self.transitions(dt.year) - - dt = dt.replace(tzinfo=None) - return (end <= dt < end + self._dst_base_offset) - - def _isdst(self, dt): - if not self.hasdst: - return False - elif dt is None: - return None - - transitions = self.transitions(dt.year) - - if transitions is None: - return False - - dt = dt.replace(tzinfo=None) - - isdst = self._naive_isdst(dt, transitions) - - # Handle ambiguous dates - if not isdst and self.is_ambiguous(dt): - return not self._fold(dt) - else: - return isdst - - def _naive_isdst(self, dt, transitions): - dston, dstoff = transitions - - dt = dt.replace(tzinfo=None) - - if dston < dstoff: - isdst = dston <= dt < dstoff - else: - isdst = not dstoff <= dt < dston - - return isdst - - @property - def _dst_base_offset(self): - return self._dst_offset - self._std_offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(...)" % self.__class__.__name__ - - __reduce__ = object.__reduce__ diff --git a/venv/lib/python3.6/site-packages/dateutil/tz/_factories.py b/venv/lib/python3.6/site-packages/dateutil/tz/_factories.py deleted file mode 100644 index d2560eb..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tz/_factories.py +++ /dev/null @@ -1,73 +0,0 @@ -from datetime import timedelta -import weakref -from collections import OrderedDict - - -class _TzSingleton(type): - def __init__(cls, *args, **kwargs): - cls.__instance = None - super(_TzSingleton, cls).__init__(*args, **kwargs) - - def __call__(cls): - if cls.__instance is None: - cls.__instance = super(_TzSingleton, cls).__call__() - return cls.__instance - - -class _TzFactory(type): - def instance(cls, *args, **kwargs): - """Alternate constructor that returns a fresh instance""" - return type.__call__(cls, *args, **kwargs) - - -class _TzOffsetFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = weakref.WeakValueDictionary() - cls.__strong_cache = OrderedDict() - cls.__strong_cache_size = 8 - - def __call__(cls, name, offset): - if isinstance(offset, timedelta): - key = (name, offset.total_seconds()) - else: - key = (name, offset) - - instance = cls.__instances.get(key, None) - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(name, offset)) - - cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) - - # Remove an item if the strong cache is overpopulated - # TODO: Maybe this should be under a lock? - if len(cls.__strong_cache) > cls.__strong_cache_size: - cls.__strong_cache.popitem(last=False) - - return instance - - -class _TzStrFactory(_TzFactory): - def __init__(cls, *args, **kwargs): - cls.__instances = weakref.WeakValueDictionary() - cls.__strong_cache = OrderedDict() - cls.__strong_cache_size = 8 - - def __call__(cls, s, posix_offset=False): - key = (s, posix_offset) - instance = cls.__instances.get(key, None) - - if instance is None: - instance = cls.__instances.setdefault(key, - cls.instance(s, posix_offset)) - - cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) - - - # Remove an item if the strong cache is overpopulated - # TODO: Maybe this should be under a lock? - if len(cls.__strong_cache) > cls.__strong_cache_size: - cls.__strong_cache.popitem(last=False) - - return instance - diff --git a/venv/lib/python3.6/site-packages/dateutil/tz/tz.py b/venv/lib/python3.6/site-packages/dateutil/tz/tz.py deleted file mode 100644 index d05414e..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tz/tz.py +++ /dev/null @@ -1,1836 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers timezone implementations subclassing the abstract -:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format -files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, -etc), TZ environment string (in all known formats), given ranges (with help -from relative deltas), local machine timezone, fixed offset timezone, and UTC -timezone. -""" -import datetime -import struct -import time -import sys -import os -import bisect -import weakref -from collections import OrderedDict - -import six -from six import string_types -from six.moves import _thread -from ._common import tzname_in_python2, _tzinfo -from ._common import tzrangebase, enfold -from ._common import _validate_fromutc_inputs - -from ._factories import _TzSingleton, _TzOffsetFactory -from ._factories import _TzStrFactory -try: - from .win import tzwin, tzwinlocal -except ImportError: - tzwin = tzwinlocal = None - -# For warning about rounding tzinfo -from warnings import warn - -ZERO = datetime.timedelta(0) -EPOCH = datetime.datetime.utcfromtimestamp(0) -EPOCHORDINAL = EPOCH.toordinal() - - -@six.add_metaclass(_TzSingleton) -class tzutc(datetime.tzinfo): - """ - This is a tzinfo object that represents the UTC time zone. - - **Examples:** - - .. doctest:: - - >>> from datetime import * - >>> from dateutil.tz import * - - >>> datetime.now() - datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) - - >>> datetime.now(tzutc()) - datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) - - >>> datetime.now(tzutc()).tzname() - 'UTC' - - .. versionchanged:: 2.7.0 - ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will - always return the same object. - - .. doctest:: - - >>> from dateutil.tz import tzutc, UTC - >>> tzutc() is tzutc() - True - >>> tzutc() is UTC - True - """ - def utcoffset(self, dt): - return ZERO - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return "UTC" - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - @_validate_fromutc_inputs - def fromutc(self, dt): - """ - Fast track version of fromutc() returns the original ``dt`` object for - any valid :py:class:`datetime.datetime` object. - """ - return dt - - def __eq__(self, other): - if not isinstance(other, (tzutc, tzoffset)): - return NotImplemented - - return (isinstance(other, tzutc) or - (isinstance(other, tzoffset) and other._offset == ZERO)) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -@six.add_metaclass(_TzOffsetFactory) -class tzoffset(datetime.tzinfo): - """ - A simple class for representing a fixed offset from UTC. - - :param name: - The timezone name, to be returned when ``tzname()`` is called. - :param offset: - The time zone offset in seconds, or (since version 2.6.0, represented - as a :py:class:`datetime.timedelta` object). - """ - def __init__(self, name, offset): - self._name = name - - try: - # Allow a timedelta - offset = offset.total_seconds() - except (TypeError, AttributeError): - pass - - self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) - - def utcoffset(self, dt): - return self._offset - - def dst(self, dt): - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._name - - @_validate_fromutc_inputs - def fromutc(self, dt): - return dt + self._offset - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - return False - - def __eq__(self, other): - if not isinstance(other, tzoffset): - return NotImplemented - - return self._offset == other._offset - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, - repr(self._name), - int(self._offset.total_seconds())) - - __reduce__ = object.__reduce__ - - -class tzlocal(_tzinfo): - """ - A :class:`tzinfo` subclass built around the ``time`` timezone functions. - """ - def __init__(self): - super(tzlocal, self).__init__() - - self._std_offset = datetime.timedelta(seconds=-time.timezone) - if time.daylight: - self._dst_offset = datetime.timedelta(seconds=-time.altzone) - else: - self._dst_offset = self._std_offset - - self._dst_saved = self._dst_offset - self._std_offset - self._hasdst = bool(self._dst_saved) - self._tznames = tuple(time.tzname) - - def utcoffset(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - else: - return self._std_offset - - def dst(self, dt): - if dt is None and self._hasdst: - return None - - if self._isdst(dt): - return self._dst_offset - self._std_offset - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._tznames[self._isdst(dt)] - - def is_ambiguous(self, dt): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - naive_dst = self._naive_is_dst(dt) - return (not naive_dst and - (naive_dst != self._naive_is_dst(dt - self._dst_saved))) - - def _naive_is_dst(self, dt): - timestamp = _datetime_to_timestamp(dt) - return time.localtime(timestamp + time.timezone).tm_isdst - - def _isdst(self, dt, fold_naive=True): - # We can't use mktime here. It is unstable when deciding if - # the hour near to a change is DST or not. - # - # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, - # dt.minute, dt.second, dt.weekday(), 0, -1)) - # return time.localtime(timestamp).tm_isdst - # - # The code above yields the following result: - # - # >>> import tz, datetime - # >>> t = tz.tzlocal() - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRST' - # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() - # 'BRDT' - # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() - # 'BRDT' - # - # Here is a more stable implementation: - # - if not self._hasdst: - return False - - # Check for ambiguous times: - dstval = self._naive_is_dst(dt) - fold = getattr(dt, 'fold', None) - - if self.is_ambiguous(dt): - if fold is not None: - return not self._fold(dt) - else: - return True - - return dstval - - def __eq__(self, other): - if isinstance(other, tzlocal): - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset) - elif isinstance(other, tzutc): - return (not self._hasdst and - self._tznames[0] in {'UTC', 'GMT'} and - self._std_offset == ZERO) - elif isinstance(other, tzoffset): - return (not self._hasdst and - self._tznames[0] == other._name and - self._std_offset == other._offset) - else: - return NotImplemented - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s()" % self.__class__.__name__ - - __reduce__ = object.__reduce__ - - -class _ttinfo(object): - __slots__ = ["offset", "delta", "isdst", "abbr", - "isstd", "isgmt", "dstoffset"] - - def __init__(self): - for attr in self.__slots__: - setattr(self, attr, None) - - def __repr__(self): - l = [] - for attr in self.__slots__: - value = getattr(self, attr) - if value is not None: - l.append("%s=%s" % (attr, repr(value))) - return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) - - def __eq__(self, other): - if not isinstance(other, _ttinfo): - return NotImplemented - - return (self.offset == other.offset and - self.delta == other.delta and - self.isdst == other.isdst and - self.abbr == other.abbr and - self.isstd == other.isstd and - self.isgmt == other.isgmt and - self.dstoffset == other.dstoffset) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __getstate__(self): - state = {} - for name in self.__slots__: - state[name] = getattr(self, name, None) - return state - - def __setstate__(self, state): - for name in self.__slots__: - if name in state: - setattr(self, name, state[name]) - - -class _tzfile(object): - """ - Lightweight class for holding the relevant transition and time zone - information read from binary tzfiles. - """ - attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', - 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] - - def __init__(self, **kwargs): - for attr in self.attrs: - setattr(self, attr, kwargs.get(attr, None)) - - -class tzfile(_tzinfo): - """ - This is a ``tzinfo`` subclass thant allows one to use the ``tzfile(5)`` - format timezone files to extract current and historical zone information. - - :param fileobj: - This can be an opened file stream or a file name that the time zone - information can be read from. - - :param filename: - This is an optional parameter specifying the source of the time zone - information in the event that ``fileobj`` is a file object. If omitted - and ``fileobj`` is a file stream, this parameter will be set either to - ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. - - See `Sources for Time Zone and Daylight Saving Time Data - `_ for more information. - Time zone files can be compiled from the `IANA Time Zone database files - `_ with the `zic time zone compiler - `_ - - .. note:: - - Only construct a ``tzfile`` directly if you have a specific timezone - file on disk that you want to read into a Python ``tzinfo`` object. - If you want to get a ``tzfile`` representing a specific IANA zone, - (e.g. ``'America/New_York'``), you should call - :func:`dateutil.tz.gettz` with the zone identifier. - - - **Examples:** - - Using the US Eastern time zone as an example, we can see that a ``tzfile`` - provides time zone information for the standard Daylight Saving offsets: - - .. testsetup:: tzfile - - from dateutil.tz import gettz - from datetime import datetime - - .. doctest:: tzfile - - >>> NYC = gettz('America/New_York') - >>> NYC - tzfile('/usr/share/zoneinfo/America/New_York') - - >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST - 2016-01-03 00:00:00-05:00 - - >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT - 2016-07-07 00:00:00-04:00 - - - The ``tzfile`` structure contains a fully history of the time zone, - so historical dates will also have the right offsets. For example, before - the adoption of the UTC standards, New York used local solar mean time: - - .. doctest:: tzfile - - >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT - 1901-04-12 00:00:00-04:56 - - And during World War II, New York was on "Eastern War Time", which was a - state of permanent daylight saving time: - - .. doctest:: tzfile - - >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT - 1944-02-07 00:00:00-04:00 - - """ - - def __init__(self, fileobj, filename=None): - super(tzfile, self).__init__() - - file_opened_here = False - if isinstance(fileobj, string_types): - self._filename = fileobj - fileobj = open(fileobj, 'rb') - file_opened_here = True - elif filename is not None: - self._filename = filename - elif hasattr(fileobj, "name"): - self._filename = fileobj.name - else: - self._filename = repr(fileobj) - - if fileobj is not None: - if not file_opened_here: - fileobj = _nullcontext(fileobj) - - with fileobj as file_stream: - tzobj = self._read_tzfile(file_stream) - - self._set_tzdata(tzobj) - - def _set_tzdata(self, tzobj): - """ Set the time zone data of this object from a _tzfile object """ - # Copy the relevant attributes over as private attributes - for attr in _tzfile.attrs: - setattr(self, '_' + attr, getattr(tzobj, attr)) - - def _read_tzfile(self, fileobj): - out = _tzfile() - - # From tzfile(5): - # - # The time zone information files used by tzset(3) - # begin with the magic characters "TZif" to identify - # them as time zone information files, followed by - # sixteen bytes reserved for future use, followed by - # six four-byte values of type long, written in a - # ``standard'' byte order (the high-order byte - # of the value is written first). - if fileobj.read(4).decode() != "TZif": - raise ValueError("magic not found") - - fileobj.read(16) - - ( - # The number of UTC/local indicators stored in the file. - ttisgmtcnt, - - # The number of standard/wall indicators stored in the file. - ttisstdcnt, - - # The number of leap seconds for which data is - # stored in the file. - leapcnt, - - # The number of "transition times" for which data - # is stored in the file. - timecnt, - - # The number of "local time types" for which data - # is stored in the file (must not be zero). - typecnt, - - # The number of characters of "time zone - # abbreviation strings" stored in the file. - charcnt, - - ) = struct.unpack(">6l", fileobj.read(24)) - - # The above header is followed by tzh_timecnt four-byte - # values of type long, sorted in ascending order. - # These values are written in ``standard'' byte order. - # Each is used as a transition time (as returned by - # time(2)) at which the rules for computing local time - # change. - - if timecnt: - out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, - fileobj.read(timecnt*4))) - else: - out.trans_list_utc = [] - - # Next come tzh_timecnt one-byte values of type unsigned - # char; each one tells which of the different types of - # ``local time'' types described in the file is associated - # with the same-indexed transition time. These values - # serve as indices into an array of ttinfo structures that - # appears next in the file. - - if timecnt: - out.trans_idx = struct.unpack(">%dB" % timecnt, - fileobj.read(timecnt)) - else: - out.trans_idx = [] - - # Each ttinfo structure is written as a four-byte value - # for tt_gmtoff of type long, in a standard byte - # order, followed by a one-byte value for tt_isdst - # and a one-byte value for tt_abbrind. In each - # structure, tt_gmtoff gives the number of - # seconds to be added to UTC, tt_isdst tells whether - # tm_isdst should be set by localtime(3), and - # tt_abbrind serves as an index into the array of - # time zone abbreviation characters that follow the - # ttinfo structure(s) in the file. - - ttinfo = [] - - for i in range(typecnt): - ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) - - abbr = fileobj.read(charcnt).decode() - - # Then there are tzh_leapcnt pairs of four-byte - # values, written in standard byte order; the - # first value of each pair gives the time (as - # returned by time(2)) at which a leap second - # occurs; the second gives the total number of - # leap seconds to be applied after the given time. - # The pairs of values are sorted in ascending order - # by time. - - # Not used, for now (but seek for correct file position) - if leapcnt: - fileobj.seek(leapcnt * 8, os.SEEK_CUR) - - # Then there are tzh_ttisstdcnt standard/wall - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as standard - # time or wall clock time, and are used when - # a time zone file is used in handling POSIX-style - # time zone environment variables. - - if ttisstdcnt: - isstd = struct.unpack(">%db" % ttisstdcnt, - fileobj.read(ttisstdcnt)) - - # Finally, there are tzh_ttisgmtcnt UTC/local - # indicators, each stored as a one-byte value; - # they tell whether the transition times associated - # with local time types were specified as UTC or - # local time, and are used when a time zone file - # is used in handling POSIX-style time zone envi- - # ronment variables. - - if ttisgmtcnt: - isgmt = struct.unpack(">%db" % ttisgmtcnt, - fileobj.read(ttisgmtcnt)) - - # Build ttinfo list - out.ttinfo_list = [] - for i in range(typecnt): - gmtoff, isdst, abbrind = ttinfo[i] - gmtoff = _get_supported_offset(gmtoff) - tti = _ttinfo() - tti.offset = gmtoff - tti.dstoffset = datetime.timedelta(0) - tti.delta = datetime.timedelta(seconds=gmtoff) - tti.isdst = isdst - tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] - tti.isstd = (ttisstdcnt > i and isstd[i] != 0) - tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) - out.ttinfo_list.append(tti) - - # Replace ttinfo indexes for ttinfo objects. - out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] - - # Set standard, dst, and before ttinfos. before will be - # used when a given time is before any transitions, - # and will be set to the first non-dst ttinfo, or to - # the first dst, if all of them are dst. - out.ttinfo_std = None - out.ttinfo_dst = None - out.ttinfo_before = None - if out.ttinfo_list: - if not out.trans_list_utc: - out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] - else: - for i in range(timecnt-1, -1, -1): - tti = out.trans_idx[i] - if not out.ttinfo_std and not tti.isdst: - out.ttinfo_std = tti - elif not out.ttinfo_dst and tti.isdst: - out.ttinfo_dst = tti - - if out.ttinfo_std and out.ttinfo_dst: - break - else: - if out.ttinfo_dst and not out.ttinfo_std: - out.ttinfo_std = out.ttinfo_dst - - for tti in out.ttinfo_list: - if not tti.isdst: - out.ttinfo_before = tti - break - else: - out.ttinfo_before = out.ttinfo_list[0] - - # Now fix transition times to become relative to wall time. - # - # I'm not sure about this. In my tests, the tz source file - # is setup to wall time, and in the binary file isstd and - # isgmt are off, so it should be in wall time. OTOH, it's - # always in gmt time. Let me know if you have comments - # about this. - lastdst = None - lastoffset = None - lastdstoffset = None - lastbaseoffset = None - out.trans_list = [] - - for i, tti in enumerate(out.trans_idx): - offset = tti.offset - dstoffset = 0 - - if lastdst is not None: - if tti.isdst: - if not lastdst: - dstoffset = offset - lastoffset - - if not dstoffset and lastdstoffset: - dstoffset = lastdstoffset - - tti.dstoffset = datetime.timedelta(seconds=dstoffset) - lastdstoffset = dstoffset - - # If a time zone changes its base offset during a DST transition, - # then you need to adjust by the previous base offset to get the - # transition time in local time. Otherwise you use the current - # base offset. Ideally, I would have some mathematical proof of - # why this is true, but I haven't really thought about it enough. - baseoffset = offset - dstoffset - adjustment = baseoffset - if (lastbaseoffset is not None and baseoffset != lastbaseoffset - and tti.isdst != lastdst): - # The base DST has changed - adjustment = lastbaseoffset - - lastdst = tti.isdst - lastoffset = offset - lastbaseoffset = baseoffset - - out.trans_list.append(out.trans_list_utc[i] + adjustment) - - out.trans_idx = tuple(out.trans_idx) - out.trans_list = tuple(out.trans_list) - out.trans_list_utc = tuple(out.trans_list_utc) - - return out - - def _find_last_transition(self, dt, in_utc=False): - # If there's no list, there are no transitions to find - if not self._trans_list: - return None - - timestamp = _datetime_to_timestamp(dt) - - # Find where the timestamp fits in the transition list - if the - # timestamp is a transition time, it's part of the "after" period. - trans_list = self._trans_list_utc if in_utc else self._trans_list - idx = bisect.bisect_right(trans_list, timestamp) - - # We want to know when the previous transition was, so subtract off 1 - return idx - 1 - - def _get_ttinfo(self, idx): - # For no list or after the last transition, default to _ttinfo_std - if idx is None or (idx + 1) >= len(self._trans_list): - return self._ttinfo_std - - # If there is a list and the time is before it, return _ttinfo_before - if idx < 0: - return self._ttinfo_before - - return self._trans_idx[idx] - - def _find_ttinfo(self, dt): - idx = self._resolve_ambiguous_time(dt) - - return self._get_ttinfo(idx) - - def fromutc(self, dt): - """ - The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. - - :param dt: - A :py:class:`datetime.datetime` object. - - :raises TypeError: - Raised if ``dt`` is not a :py:class:`datetime.datetime` object. - - :raises ValueError: - Raised if this is called with a ``dt`` which does not have this - ``tzinfo`` attached. - - :return: - Returns a :py:class:`datetime.datetime` object representing the - wall time in ``self``'s time zone. - """ - # These isinstance checks are in datetime.tzinfo, so we'll preserve - # them, even if we don't care about duck typing. - if not isinstance(dt, datetime.datetime): - raise TypeError("fromutc() requires a datetime argument") - - if dt.tzinfo is not self: - raise ValueError("dt.tzinfo is not self") - - # First treat UTC as wall time and get the transition we're in. - idx = self._find_last_transition(dt, in_utc=True) - tti = self._get_ttinfo(idx) - - dt_out = dt + datetime.timedelta(seconds=tti.offset) - - fold = self.is_ambiguous(dt_out, idx=idx) - - return enfold(dt_out, fold=int(fold)) - - def is_ambiguous(self, dt, idx=None): - """ - Whether or not the "wall time" of a given datetime is ambiguous in this - zone. - - :param dt: - A :py:class:`datetime.datetime`, naive or time zone aware. - - - :return: - Returns ``True`` if ambiguous, ``False`` otherwise. - - .. versionadded:: 2.6.0 - """ - if idx is None: - idx = self._find_last_transition(dt) - - # Calculate the difference in offsets from current to previous - timestamp = _datetime_to_timestamp(dt) - tti = self._get_ttinfo(idx) - - if idx is None or idx <= 0: - return False - - od = self._get_ttinfo(idx - 1).offset - tti.offset - tt = self._trans_list[idx] # Transition time - - return timestamp < tt + od - - def _resolve_ambiguous_time(self, dt): - idx = self._find_last_transition(dt) - - # If we have no transitions, return the index - _fold = self._fold(dt) - if idx is None or idx == 0: - return idx - - # If it's ambiguous and we're in a fold, shift to a different index. - idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) - - return idx - idx_offset - - def utcoffset(self, dt): - if dt is None: - return None - - if not self._ttinfo_std: - return ZERO - - return self._find_ttinfo(dt).delta - - def dst(self, dt): - if dt is None: - return None - - if not self._ttinfo_dst: - return ZERO - - tti = self._find_ttinfo(dt) - - if not tti.isdst: - return ZERO - - # The documentation says that utcoffset()-dst() must - # be constant for every dt. - return tti.dstoffset - - @tzname_in_python2 - def tzname(self, dt): - if not self._ttinfo_std or dt is None: - return None - return self._find_ttinfo(dt).abbr - - def __eq__(self, other): - if not isinstance(other, tzfile): - return NotImplemented - return (self._trans_list == other._trans_list and - self._trans_idx == other._trans_idx and - self._ttinfo_list == other._ttinfo_list) - - __hash__ = None - - def __ne__(self, other): - return not (self == other) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) - - def __reduce__(self): - return self.__reduce_ex__(None) - - def __reduce_ex__(self, protocol): - return (self.__class__, (None, self._filename), self.__dict__) - - -class tzrange(tzrangebase): - """ - The ``tzrange`` object is a time zone specified by a set of offsets and - abbreviations, equivalent to the way the ``TZ`` variable can be specified - in POSIX-like systems, but using Python delta objects to specify DST - start, end and offsets. - - :param stdabbr: - The abbreviation for standard time (e.g. ``'EST'``). - - :param stdoffset: - An integer or :class:`datetime.timedelta` object or equivalent - specifying the base offset from UTC. - - If unspecified, +00:00 is used. - - :param dstabbr: - The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). - - If specified, with no other DST information, DST is assumed to occur - and the default behavior or ``dstoffset``, ``start`` and ``end`` is - used. If unspecified and no other DST information is specified, it - is assumed that this zone has no DST. - - If this is unspecified and other DST information is *is* specified, - DST occurs in the zone but the time zone abbreviation is left - unchanged. - - :param dstoffset: - A an integer or :class:`datetime.timedelta` object or equivalent - specifying the UTC offset during DST. If unspecified and any other DST - information is specified, it is assumed to be the STD offset +1 hour. - - :param start: - A :class:`relativedelta.relativedelta` object or equivalent specifying - the time and time of year that daylight savings time starts. To - specify, for example, that DST starts at 2AM on the 2nd Sunday in - March, pass: - - ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` - - If unspecified and any other DST information is specified, the default - value is 2 AM on the first Sunday in April. - - :param end: - A :class:`relativedelta.relativedelta` object or equivalent - representing the time and time of year that daylight savings time - ends, with the same specification method as in ``start``. One note is - that this should point to the first time in the *standard* zone, so if - a transition occurs at 2AM in the DST zone and the clocks are set back - 1 hour to 1AM, set the ``hours`` parameter to +1. - - - **Examples:** - - .. testsetup:: tzrange - - from dateutil.tz import tzrange, tzstr - - .. doctest:: tzrange - - >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") - True - - >>> from dateutil.relativedelta import * - >>> range1 = tzrange("EST", -18000, "EDT") - >>> range2 = tzrange("EST", -18000, "EDT", -14400, - ... relativedelta(hours=+2, month=4, day=1, - ... weekday=SU(+1)), - ... relativedelta(hours=+1, month=10, day=31, - ... weekday=SU(-1))) - >>> tzstr('EST5EDT') == range1 == range2 - True - - """ - def __init__(self, stdabbr, stdoffset=None, - dstabbr=None, dstoffset=None, - start=None, end=None): - - global relativedelta - from dateutil import relativedelta - - self._std_abbr = stdabbr - self._dst_abbr = dstabbr - - try: - stdoffset = stdoffset.total_seconds() - except (TypeError, AttributeError): - pass - - try: - dstoffset = dstoffset.total_seconds() - except (TypeError, AttributeError): - pass - - if stdoffset is not None: - self._std_offset = datetime.timedelta(seconds=stdoffset) - else: - self._std_offset = ZERO - - if dstoffset is not None: - self._dst_offset = datetime.timedelta(seconds=dstoffset) - elif dstabbr and stdoffset is not None: - self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) - else: - self._dst_offset = ZERO - - if dstabbr and start is None: - self._start_delta = relativedelta.relativedelta( - hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) - else: - self._start_delta = start - - if dstabbr and end is None: - self._end_delta = relativedelta.relativedelta( - hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) - else: - self._end_delta = end - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = bool(self._start_delta) - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - if not self.hasdst: - return None - - base_year = datetime.datetime(year, 1, 1) - - start = base_year + self._start_delta - end = base_year + self._end_delta - - return (start, end) - - def __eq__(self, other): - if not isinstance(other, tzrange): - return NotImplemented - - return (self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr and - self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._start_delta == other._start_delta and - self._end_delta == other._end_delta) - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -@six.add_metaclass(_TzStrFactory) -class tzstr(tzrange): - """ - ``tzstr`` objects are time zone objects specified by a time-zone string as - it would be passed to a ``TZ`` variable on POSIX-style systems (see - the `GNU C Library: TZ Variable`_ for more details). - - There is one notable exception, which is that POSIX-style time zones use an - inverted offset format, so normally ``GMT+3`` would be parsed as an offset - 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an - offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX - behavior, pass a ``True`` value to ``posix_offset``. - - The :class:`tzrange` object provides the same functionality, but is - specified using :class:`relativedelta.relativedelta` objects. rather than - strings. - - :param s: - A time zone string in ``TZ`` variable format. This can be a - :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: - :class:`unicode`) or a stream emitting unicode characters - (e.g. :class:`StringIO`). - - :param posix_offset: - Optional. If set to ``True``, interpret strings such as ``GMT+3`` or - ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the - POSIX standard. - - .. caution:: - - Prior to version 2.7.0, this function also supported time zones - in the format: - - * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` - * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` - - This format is non-standard and has been deprecated; this function - will raise a :class:`DeprecatedTZFormatWarning` until - support is removed in a future version. - - .. _`GNU C Library: TZ Variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - """ - def __init__(self, s, posix_offset=False): - global parser - from dateutil.parser import _parser as parser - - self._s = s - - res = parser._parsetz(s) - if res is None or res.any_unused_tokens: - raise ValueError("unknown string format") - - # Here we break the compatibility with the TZ variable handling. - # GMT-3 actually *means* the timezone -3. - if res.stdabbr in ("GMT", "UTC") and not posix_offset: - res.stdoffset *= -1 - - # We must initialize it first, since _delta() needs - # _std_offset and _dst_offset set. Use False in start/end - # to avoid building it two times. - tzrange.__init__(self, res.stdabbr, res.stdoffset, - res.dstabbr, res.dstoffset, - start=False, end=False) - - if not res.dstabbr: - self._start_delta = None - self._end_delta = None - else: - self._start_delta = self._delta(res.start) - if self._start_delta: - self._end_delta = self._delta(res.end, isend=1) - - self.hasdst = bool(self._start_delta) - - def _delta(self, x, isend=0): - from dateutil import relativedelta - kwargs = {} - if x.month is not None: - kwargs["month"] = x.month - if x.weekday is not None: - kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) - if x.week > 0: - kwargs["day"] = 1 - else: - kwargs["day"] = 31 - elif x.day: - kwargs["day"] = x.day - elif x.yday is not None: - kwargs["yearday"] = x.yday - elif x.jyday is not None: - kwargs["nlyearday"] = x.jyday - if not kwargs: - # Default is to start on first sunday of april, and end - # on last sunday of october. - if not isend: - kwargs["month"] = 4 - kwargs["day"] = 1 - kwargs["weekday"] = relativedelta.SU(+1) - else: - kwargs["month"] = 10 - kwargs["day"] = 31 - kwargs["weekday"] = relativedelta.SU(-1) - if x.time is not None: - kwargs["seconds"] = x.time - else: - # Default is 2AM. - kwargs["seconds"] = 7200 - if isend: - # Convert to standard time, to follow the documented way - # of working with the extra hour. See the documentation - # of the tzinfo class. - delta = self._dst_offset - self._std_offset - kwargs["seconds"] -= delta.seconds + delta.days * 86400 - return relativedelta.relativedelta(**kwargs) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -class _tzicalvtzcomp(object): - def __init__(self, tzoffsetfrom, tzoffsetto, isdst, - tzname=None, rrule=None): - self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) - self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) - self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom - self.isdst = isdst - self.tzname = tzname - self.rrule = rrule - - -class _tzicalvtz(_tzinfo): - def __init__(self, tzid, comps=[]): - super(_tzicalvtz, self).__init__() - - self._tzid = tzid - self._comps = comps - self._cachedate = [] - self._cachecomp = [] - self._cache_lock = _thread.allocate_lock() - - def _find_comp(self, dt): - if len(self._comps) == 1: - return self._comps[0] - - dt = dt.replace(tzinfo=None) - - try: - with self._cache_lock: - return self._cachecomp[self._cachedate.index( - (dt, self._fold(dt)))] - except ValueError: - pass - - lastcompdt = None - lastcomp = None - - for comp in self._comps: - compdt = self._find_compdt(comp, dt) - - if compdt and (not lastcompdt or lastcompdt < compdt): - lastcompdt = compdt - lastcomp = comp - - if not lastcomp: - # RFC says nothing about what to do when a given - # time is before the first onset date. We'll look for the - # first standard component, or the first component, if - # none is found. - for comp in self._comps: - if not comp.isdst: - lastcomp = comp - break - else: - lastcomp = comp[0] - - with self._cache_lock: - self._cachedate.insert(0, (dt, self._fold(dt))) - self._cachecomp.insert(0, lastcomp) - - if len(self._cachedate) > 10: - self._cachedate.pop() - self._cachecomp.pop() - - return lastcomp - - def _find_compdt(self, comp, dt): - if comp.tzoffsetdiff < ZERO and self._fold(dt): - dt -= comp.tzoffsetdiff - - compdt = comp.rrule.before(dt, inc=True) - - return compdt - - def utcoffset(self, dt): - if dt is None: - return None - - return self._find_comp(dt).tzoffsetto - - def dst(self, dt): - comp = self._find_comp(dt) - if comp.isdst: - return comp.tzoffsetdiff - else: - return ZERO - - @tzname_in_python2 - def tzname(self, dt): - return self._find_comp(dt).tzname - - def __repr__(self): - return "" % repr(self._tzid) - - __reduce__ = object.__reduce__ - - -class tzical(object): - """ - This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure - as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. - - :param `fileobj`: - A file or stream in iCalendar format, which should be UTF-8 encoded - with CRLF endings. - - .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 - """ - def __init__(self, fileobj): - global rrule - from dateutil import rrule - - if isinstance(fileobj, string_types): - self._s = fileobj - # ical should be encoded in UTF-8 with CRLF - fileobj = open(fileobj, 'r') - else: - self._s = getattr(fileobj, 'name', repr(fileobj)) - fileobj = _nullcontext(fileobj) - - self._vtz = {} - - with fileobj as fobj: - self._parse_rfc(fobj.read()) - - def keys(self): - """ - Retrieves the available time zones as a list. - """ - return list(self._vtz.keys()) - - def get(self, tzid=None): - """ - Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. - - :param tzid: - If there is exactly one time zone available, omitting ``tzid`` - or passing :py:const:`None` value returns it. Otherwise a valid - key (which can be retrieved from :func:`keys`) is required. - - :raises ValueError: - Raised if ``tzid`` is not specified but there are either more - or fewer than 1 zone defined. - - :returns: - Returns either a :py:class:`datetime.tzinfo` object representing - the relevant time zone or :py:const:`None` if the ``tzid`` was - not found. - """ - if tzid is None: - if len(self._vtz) == 0: - raise ValueError("no timezones defined") - elif len(self._vtz) > 1: - raise ValueError("more than one timezone available") - tzid = next(iter(self._vtz)) - - return self._vtz.get(tzid) - - def _parse_offset(self, s): - s = s.strip() - if not s: - raise ValueError("empty offset") - if s[0] in ('+', '-'): - signal = (-1, +1)[s[0] == '+'] - s = s[1:] - else: - signal = +1 - if len(s) == 4: - return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal - elif len(s) == 6: - return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal - else: - raise ValueError("invalid offset: " + s) - - def _parse_rfc(self, s): - lines = s.splitlines() - if not lines: - raise ValueError("empty string") - - # Unfold - i = 0 - while i < len(lines): - line = lines[i].rstrip() - if not line: - del lines[i] - elif i > 0 and line[0] == " ": - lines[i-1] += line[1:] - del lines[i] - else: - i += 1 - - tzid = None - comps = [] - invtz = False - comptype = None - for line in lines: - if not line: - continue - name, value = line.split(':', 1) - parms = name.split(';') - if not parms: - raise ValueError("empty property name") - name = parms[0].upper() - parms = parms[1:] - if invtz: - if name == "BEGIN": - if value in ("STANDARD", "DAYLIGHT"): - # Process component - pass - else: - raise ValueError("unknown component: "+value) - comptype = value - founddtstart = False - tzoffsetfrom = None - tzoffsetto = None - rrulelines = [] - tzname = None - elif name == "END": - if value == "VTIMEZONE": - if comptype: - raise ValueError("component not closed: "+comptype) - if not tzid: - raise ValueError("mandatory TZID not found") - if not comps: - raise ValueError( - "at least one component is needed") - # Process vtimezone - self._vtz[tzid] = _tzicalvtz(tzid, comps) - invtz = False - elif value == comptype: - if not founddtstart: - raise ValueError("mandatory DTSTART not found") - if tzoffsetfrom is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - if tzoffsetto is None: - raise ValueError( - "mandatory TZOFFSETFROM not found") - # Process component - rr = None - if rrulelines: - rr = rrule.rrulestr("\n".join(rrulelines), - compatible=True, - ignoretz=True, - cache=True) - comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, - (comptype == "DAYLIGHT"), - tzname, rr) - comps.append(comp) - comptype = None - else: - raise ValueError("invalid component end: "+value) - elif comptype: - if name == "DTSTART": - # DTSTART in VTIMEZONE takes a subset of valid RRULE - # values under RFC 5545. - for parm in parms: - if parm != 'VALUE=DATE-TIME': - msg = ('Unsupported DTSTART param in ' + - 'VTIMEZONE: ' + parm) - raise ValueError(msg) - rrulelines.append(line) - founddtstart = True - elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): - rrulelines.append(line) - elif name == "TZOFFSETFROM": - if parms: - raise ValueError( - "unsupported %s parm: %s " % (name, parms[0])) - tzoffsetfrom = self._parse_offset(value) - elif name == "TZOFFSETTO": - if parms: - raise ValueError( - "unsupported TZOFFSETTO parm: "+parms[0]) - tzoffsetto = self._parse_offset(value) - elif name == "TZNAME": - if parms: - raise ValueError( - "unsupported TZNAME parm: "+parms[0]) - tzname = value - elif name == "COMMENT": - pass - else: - raise ValueError("unsupported property: "+name) - else: - if name == "TZID": - if parms: - raise ValueError( - "unsupported TZID parm: "+parms[0]) - tzid = value - elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): - pass - else: - raise ValueError("unsupported property: "+name) - elif name == "BEGIN" and value == "VTIMEZONE": - tzid = None - comps = [] - invtz = True - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, repr(self._s)) - - -if sys.platform != "win32": - TZFILES = ["/etc/localtime", "localtime"] - TZPATHS = ["/usr/share/zoneinfo", - "/usr/lib/zoneinfo", - "/usr/share/lib/zoneinfo", - "/etc/zoneinfo"] -else: - TZFILES = [] - TZPATHS = [] - - -def __get_gettz(): - tzlocal_classes = (tzlocal,) - if tzwinlocal is not None: - tzlocal_classes += (tzwinlocal,) - - class GettzFunc(object): - """ - Retrieve a time zone object from a string representation - - This function is intended to retrieve the :py:class:`tzinfo` subclass - that best represents the time zone that would be used if a POSIX - `TZ variable`_ were set to the same value. - - If no argument or an empty string is passed to ``gettz``, local time - is returned: - - .. code-block:: python3 - - >>> gettz() - tzfile('/etc/localtime') - - This function is also the preferred way to map IANA tz database keys - to :class:`tzfile` objects: - - .. code-block:: python3 - - >>> gettz('Pacific/Kiritimati') - tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') - - On Windows, the standard is extended to include the Windows-specific - zone names provided by the operating system: - - .. code-block:: python3 - - >>> gettz('Egypt Standard Time') - tzwin('Egypt Standard Time') - - Passing a GNU ``TZ`` style string time zone specification returns a - :class:`tzstr` object: - - .. code-block:: python3 - - >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') - - :param name: - A time zone name (IANA, or, on Windows, Windows keys), location of - a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone - specifier. An empty string, no argument or ``None`` is interpreted - as local time. - - :return: - Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` - subclasses. - - .. versionchanged:: 2.7.0 - - After version 2.7.0, any two calls to ``gettz`` using the same - input strings will return the same object: - - .. code-block:: python3 - - >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') - True - - In addition to improving performance, this ensures that - `"same zone" semantics`_ are used for datetimes in the same zone. - - - .. _`TZ variable`: - https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html - - .. _`"same zone" semantics`: - https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html - """ - def __init__(self): - - self.__instances = weakref.WeakValueDictionary() - self.__strong_cache_size = 8 - self.__strong_cache = OrderedDict() - self._cache_lock = _thread.allocate_lock() - - def __call__(self, name=None): - with self._cache_lock: - rv = self.__instances.get(name, None) - - if rv is None: - rv = self.nocache(name=name) - if not (name is None - or isinstance(rv, tzlocal_classes) - or rv is None): - # tzlocal is slightly more complicated than the other - # time zone providers because it depends on environment - # at construction time, so don't cache that. - # - # We also cannot store weak references to None, so we - # will also not store that. - self.__instances[name] = rv - else: - # No need for strong caching, return immediately - return rv - - self.__strong_cache[name] = self.__strong_cache.pop(name, rv) - - if len(self.__strong_cache) > self.__strong_cache_size: - self.__strong_cache.popitem(last=False) - - return rv - - def set_cache_size(self, size): - with self._cache_lock: - self.__strong_cache_size = size - while len(self.__strong_cache) > size: - self.__strong_cache.popitem(last=False) - - def cache_clear(self): - with self._cache_lock: - self.__instances = weakref.WeakValueDictionary() - self.__strong_cache.clear() - - @staticmethod - def nocache(name=None): - """A non-cached version of gettz""" - tz = None - if not name: - try: - name = os.environ["TZ"] - except KeyError: - pass - if name is None or name == ":": - for filepath in TZFILES: - if not os.path.isabs(filepath): - filename = filepath - for path in TZPATHS: - filepath = os.path.join(path, filename) - if os.path.isfile(filepath): - break - else: - continue - if os.path.isfile(filepath): - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = tzlocal() - else: - if name.startswith(":"): - name = name[1:] - if os.path.isabs(name): - if os.path.isfile(name): - tz = tzfile(name) - else: - tz = None - else: - for path in TZPATHS: - filepath = os.path.join(path, name) - if not os.path.isfile(filepath): - filepath = filepath.replace(' ', '_') - if not os.path.isfile(filepath): - continue - try: - tz = tzfile(filepath) - break - except (IOError, OSError, ValueError): - pass - else: - tz = None - if tzwin is not None: - try: - tz = tzwin(name) - except (WindowsError, UnicodeEncodeError): - # UnicodeEncodeError is for Python 2.7 compat - tz = None - - if not tz: - from dateutil.zoneinfo import get_zonefile_instance - tz = get_zonefile_instance().get(name) - - if not tz: - for c in name: - # name is not a tzstr unless it has at least - # one offset. For short values of "name", an - # explicit for loop seems to be the fastest way - # To determine if a string contains a digit - if c in "0123456789": - try: - tz = tzstr(name) - except ValueError: - pass - break - else: - if name in ("GMT", "UTC"): - tz = tzutc() - elif name in time.tzname: - tz = tzlocal() - return tz - - return GettzFunc() - - -gettz = __get_gettz() -del __get_gettz - - -def datetime_exists(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - would fall in a gap. - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" exists in - ``tz``. - - .. versionadded:: 2.7.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - tz = dt.tzinfo - - dt = dt.replace(tzinfo=None) - - # This is essentially a test of whether or not the datetime can survive - # a round trip to UTC. - dt_rt = dt.replace(tzinfo=tz).astimezone(tzutc()).astimezone(tz) - dt_rt = dt_rt.replace(tzinfo=None) - - return dt == dt_rt - - -def datetime_ambiguous(dt, tz=None): - """ - Given a datetime and a time zone, determine whether or not a given datetime - is ambiguous (i.e if there are two times differentiated only by their DST - status). - - :param dt: - A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` - is provided.) - - :param tz: - A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If - ``None`` or not provided, the datetime's own time zone will be used. - - :return: - Returns a boolean value whether or not the "wall time" is ambiguous in - ``tz``. - - .. versionadded:: 2.6.0 - """ - if tz is None: - if dt.tzinfo is None: - raise ValueError('Datetime is naive and no time zone provided.') - - tz = dt.tzinfo - - # If a time zone defines its own "is_ambiguous" function, we'll use that. - is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) - if is_ambiguous_fn is not None: - try: - return tz.is_ambiguous(dt) - except Exception: - pass - - # If it doesn't come out and tell us it's ambiguous, we'll just check if - # the fold attribute has any effect on this particular date and time. - dt = dt.replace(tzinfo=tz) - wall_0 = enfold(dt, fold=0) - wall_1 = enfold(dt, fold=1) - - same_offset = wall_0.utcoffset() == wall_1.utcoffset() - same_dst = wall_0.dst() == wall_1.dst() - - return not (same_offset and same_dst) - - -def resolve_imaginary(dt): - """ - Given a datetime that may be imaginary, return an existing datetime. - - This function assumes that an imaginary datetime represents what the - wall time would be in a zone had the offset transition not occurred, so - it will always fall forward by the transition's change in offset. - - .. doctest:: - - >>> from dateutil import tz - >>> from datetime import datetime - >>> NYC = tz.gettz('America/New_York') - >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) - 2017-03-12 03:30:00-04:00 - - >>> KIR = tz.gettz('Pacific/Kiritimati') - >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) - 1995-01-02 12:30:00+14:00 - - As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, - existing datetime, so a round-trip to and from UTC is sufficient to get - an extant datetime, however, this generally "falls back" to an earlier time - rather than falling forward to the STD side (though no guarantees are made - about this behavior). - - :param dt: - A :class:`datetime.datetime` which may or may not exist. - - :return: - Returns an existing :class:`datetime.datetime`. If ``dt`` was not - imaginary, the datetime returned is guaranteed to be the same object - passed to the function. - - .. versionadded:: 2.7.0 - """ - if dt.tzinfo is not None and not datetime_exists(dt): - - curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() - old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() - - dt += curr_offset - old_offset - - return dt - - -def _datetime_to_timestamp(dt): - """ - Convert a :class:`datetime.datetime` object to an epoch timestamp in - seconds since January 1, 1970, ignoring the time zone. - """ - return (dt.replace(tzinfo=None) - EPOCH).total_seconds() - - -if sys.version_info >= (3, 6): - def _get_supported_offset(second_offset): - return second_offset -else: - def _get_supported_offset(second_offset): - # For python pre-3.6, round to full-minutes if that's not the case. - # Python's datetime doesn't accept sub-minute timezones. Check - # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 - # for some information. - old_offset = second_offset - calculated_offset = 60 * ((second_offset + 30) // 60) - return calculated_offset - - -try: - # Python 3.7 feature - from contextmanager import nullcontext as _nullcontext -except ImportError: - class _nullcontext(object): - """ - Class for wrapping contexts so that they are passed through in a - with statement. - """ - def __init__(self, context): - self.context = context - - def __enter__(self): - return self.context - - def __exit__(*args, **kwargs): - pass - -# vim:ts=4:sw=4:et diff --git a/venv/lib/python3.6/site-packages/dateutil/tz/win.py b/venv/lib/python3.6/site-packages/dateutil/tz/win.py deleted file mode 100644 index cde07ba..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tz/win.py +++ /dev/null @@ -1,370 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module provides an interface to the native time zone data on Windows, -including :py:class:`datetime.tzinfo` implementations. - -Attempting to import this module on a non-Windows platform will raise an -:py:obj:`ImportError`. -""" -# This code was originally contributed by Jeffrey Harris. -import datetime -import struct - -from six.moves import winreg -from six import text_type - -try: - import ctypes - from ctypes import wintypes -except ValueError: - # ValueError is raised on non-Windows systems for some horrible reason. - raise ImportError("Running tzwin on non-Windows system") - -from ._common import tzrangebase - -__all__ = ["tzwin", "tzwinlocal", "tzres"] - -ONEWEEK = datetime.timedelta(7) - -TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" -TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" -TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" - - -def _settzkeyname(): - handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - try: - winreg.OpenKey(handle, TZKEYNAMENT).Close() - TZKEYNAME = TZKEYNAMENT - except WindowsError: - TZKEYNAME = TZKEYNAME9X - handle.Close() - return TZKEYNAME - - -TZKEYNAME = _settzkeyname() - - -class tzres(object): - """ - Class for accessing ``tzres.dll``, which contains timezone name related - resources. - - .. versionadded:: 2.5.0 - """ - p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char - - def __init__(self, tzres_loc='tzres.dll'): - # Load the user32 DLL so we can load strings from tzres - user32 = ctypes.WinDLL('user32') - - # Specify the LoadStringW function - user32.LoadStringW.argtypes = (wintypes.HINSTANCE, - wintypes.UINT, - wintypes.LPWSTR, - ctypes.c_int) - - self.LoadStringW = user32.LoadStringW - self._tzres = ctypes.WinDLL(tzres_loc) - self.tzres_loc = tzres_loc - - def load_name(self, offset): - """ - Load a timezone name from a DLL offset (integer). - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.load_name(112)) - 'Eastern Standard Time' - - :param offset: - A positive integer value referring to a string from the tzres dll. - - .. note:: - - Offsets found in the registry are generally of the form - ``@tzres.dll,-114``. The offset in this case is 114, not -114. - - """ - resource = self.p_wchar() - lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) - nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) - return resource[:nchar] - - def name_from_string(self, tzname_str): - """ - Parse strings as returned from the Windows registry into the time zone - name as defined in the registry. - - >>> from dateutil.tzwin import tzres - >>> tzr = tzres() - >>> print(tzr.name_from_string('@tzres.dll,-251')) - 'Dateline Daylight Time' - >>> print(tzr.name_from_string('Eastern Standard Time')) - 'Eastern Standard Time' - - :param tzname_str: - A timezone name string as returned from a Windows registry key. - - :return: - Returns the localized timezone string from tzres.dll if the string - is of the form `@tzres.dll,-offset`, else returns the input string. - """ - if not tzname_str.startswith('@'): - return tzname_str - - name_splt = tzname_str.split(',-') - try: - offset = int(name_splt[1]) - except: - raise ValueError("Malformed timezone string.") - - return self.load_name(offset) - - -class tzwinbase(tzrangebase): - """tzinfo class based on win32's timezones available in the registry.""" - def __init__(self): - raise NotImplementedError('tzwinbase is an abstract base class') - - def __eq__(self, other): - # Compare on all relevant dimensions, including name. - if not isinstance(other, tzwinbase): - return NotImplemented - - return (self._std_offset == other._std_offset and - self._dst_offset == other._dst_offset and - self._stddayofweek == other._stddayofweek and - self._dstdayofweek == other._dstdayofweek and - self._stdweeknumber == other._stdweeknumber and - self._dstweeknumber == other._dstweeknumber and - self._stdhour == other._stdhour and - self._dsthour == other._dsthour and - self._stdminute == other._stdminute and - self._dstminute == other._dstminute and - self._std_abbr == other._std_abbr and - self._dst_abbr == other._dst_abbr) - - @staticmethod - def list(): - """Return a list of all time zones known to the system.""" - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZKEYNAME) as tzkey: - result = [winreg.EnumKey(tzkey, i) - for i in range(winreg.QueryInfoKey(tzkey)[0])] - return result - - def display(self): - """ - Return the display name of the time zone. - """ - return self._display - - def transitions(self, year): - """ - For a given year, get the DST on and off transition times, expressed - always on the standard time side. For zones with no transitions, this - function returns ``None``. - - :param year: - The year whose transitions you would like to query. - - :return: - Returns a :class:`tuple` of :class:`datetime.datetime` objects, - ``(dston, dstoff)`` for zones with an annual DST transition, or - ``None`` for fixed offset zones. - """ - - if not self.hasdst: - return None - - dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, - self._dsthour, self._dstminute, - self._dstweeknumber) - - dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, - self._stdhour, self._stdminute, - self._stdweeknumber) - - # Ambiguous dates default to the STD side - dstoff -= self._dst_base_offset - - return dston, dstoff - - def _get_hasdst(self): - return self._dstmonth != 0 - - @property - def _dst_base_offset(self): - return self._dst_base_offset_ - - -class tzwin(tzwinbase): - """ - Time zone object created from the zone info in the Windows registry - - These are similar to :py:class:`dateutil.tz.tzrange` objects in that - the time zone data is provided in the format of a single offset rule - for either 0 or 2 time zone transitions per year. - - :param: name - The name of a Windows time zone key, e.g. "Eastern Standard Time". - The full list of keys can be retrieved with :func:`tzwin.list`. - """ - - def __init__(self, name): - self._name = name - - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - keydict = valuestodict(tzkey) - - self._std_abbr = keydict["Std"] - self._dst_abbr = keydict["Dlt"] - - self._display = keydict["Display"] - - # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm - tup = struct.unpack("=3l16h", keydict["TZI"]) - stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 - dstoffset = stdoffset-tup[2] # + DaylightBias * -1 - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs - # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx - (self._stdmonth, - self._stddayofweek, # Sunday = 0 - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[4:9] - - (self._dstmonth, - self._dstdayofweek, # Sunday = 0 - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[12:17] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwin(%s)" % repr(self._name) - - def __reduce__(self): - return (self.__class__, (self._name,)) - - -class tzwinlocal(tzwinbase): - """ - Class representing the local time zone information in the Windows registry - - While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` - module) to retrieve time zone information, ``tzwinlocal`` retrieves the - rules directly from the Windows registry and creates an object like - :class:`dateutil.tz.tzwin`. - - Because Windows does not have an equivalent of :func:`time.tzset`, on - Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the - time zone settings *at the time that the process was started*, meaning - changes to the machine's time zone settings during the run of a program - on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. - Because ``tzwinlocal`` reads the registry directly, it is unaffected by - this issue. - """ - def __init__(self): - with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: - with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: - keydict = valuestodict(tzlocalkey) - - self._std_abbr = keydict["StandardName"] - self._dst_abbr = keydict["DaylightName"] - - try: - tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, - sn=self._std_abbr) - with winreg.OpenKey(handle, tzkeyname) as tzkey: - _keydict = valuestodict(tzkey) - self._display = _keydict["Display"] - except OSError: - self._display = None - - stdoffset = -keydict["Bias"]-keydict["StandardBias"] - dstoffset = stdoffset-keydict["DaylightBias"] - - self._std_offset = datetime.timedelta(minutes=stdoffset) - self._dst_offset = datetime.timedelta(minutes=dstoffset) - - # For reasons unclear, in this particular key, the day of week has been - # moved to the END of the SYSTEMTIME structure. - tup = struct.unpack("=8h", keydict["StandardStart"]) - - (self._stdmonth, - self._stdweeknumber, # Last = 5 - self._stdhour, - self._stdminute) = tup[1:5] - - self._stddayofweek = tup[7] - - tup = struct.unpack("=8h", keydict["DaylightStart"]) - - (self._dstmonth, - self._dstweeknumber, # Last = 5 - self._dsthour, - self._dstminute) = tup[1:5] - - self._dstdayofweek = tup[7] - - self._dst_base_offset_ = self._dst_offset - self._std_offset - self.hasdst = self._get_hasdst() - - def __repr__(self): - return "tzwinlocal()" - - def __str__(self): - # str will return the standard name, not the daylight name. - return "tzwinlocal(%s)" % repr(self._std_abbr) - - def __reduce__(self): - return (self.__class__, ()) - - -def picknthweekday(year, month, dayofweek, hour, minute, whichweek): - """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ - first = datetime.datetime(year, month, 1, hour, minute) - - # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), - # Because 7 % 7 = 0 - weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) - wd = weekdayone + ((whichweek - 1) * ONEWEEK) - if (wd.month != month): - wd -= ONEWEEK - - return wd - - -def valuestodict(key): - """Convert a registry key's values to a dictionary.""" - dout = {} - size = winreg.QueryInfoKey(key)[1] - tz_res = None - - for i in range(size): - key_name, value, dtype = winreg.EnumValue(key, i) - if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: - # If it's a DWORD (32-bit integer), it's stored as unsigned - convert - # that to a proper signed integer - if value & (1 << 31): - value = value - (1 << 32) - elif dtype == winreg.REG_SZ: - # If it's a reference to the tzres DLL, load the actual string - if value.startswith('@tzres'): - tz_res = tz_res or tzres() - value = tz_res.name_from_string(value) - - value = value.rstrip('\x00') # Remove trailing nulls - - dout[key_name] = value - - return dout diff --git a/venv/lib/python3.6/site-packages/dateutil/tzwin.py b/venv/lib/python3.6/site-packages/dateutil/tzwin.py deleted file mode 100644 index cebc673..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/tzwin.py +++ /dev/null @@ -1,2 +0,0 @@ -# tzwin has moved to dateutil.tz.win -from .tz.win import * diff --git a/venv/lib/python3.6/site-packages/dateutil/utils.py b/venv/lib/python3.6/site-packages/dateutil/utils.py deleted file mode 100644 index ebcce6a..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/utils.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -""" -This module offers general convenience and utility functions for dealing with -datetimes. - -.. versionadded:: 2.7.0 -""" -from __future__ import unicode_literals - -from datetime import datetime, time - - -def today(tzinfo=None): - """ - Returns a :py:class:`datetime` representing the current day at midnight - - :param tzinfo: - The time zone to attach (also used to determine the current day). - - :return: - A :py:class:`datetime.datetime` object representing the current day - at midnight. - """ - - dt = datetime.now(tzinfo) - return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) - - -def default_tzinfo(dt, tzinfo): - """ - Sets the the ``tzinfo`` parameter on naive datetimes only - - This is useful for example when you are provided a datetime that may have - either an implicit or explicit time zone, such as when parsing a time zone - string. - - .. doctest:: - - >>> from dateutil.tz import tzoffset - >>> from dateutil.parser import parse - >>> from dateutil.utils import default_tzinfo - >>> dflt_tz = tzoffset("EST", -18000) - >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) - 2014-01-01 12:30:00+00:00 - >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) - 2014-01-01 12:30:00-05:00 - - :param dt: - The datetime on which to replace the time zone - - :param tzinfo: - The :py:class:`datetime.tzinfo` subclass instance to assign to - ``dt`` if (and only if) it is naive. - - :return: - Returns an aware :py:class:`datetime.datetime`. - """ - if dt.tzinfo is not None: - return dt - else: - return dt.replace(tzinfo=tzinfo) - - -def within_delta(dt1, dt2, delta): - """ - Useful for comparing two datetimes that may a negilible difference - to be considered equal. - """ - delta = abs(delta) - difference = dt1 - dt2 - return -delta <= difference <= delta diff --git a/venv/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py b/venv/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py deleted file mode 100644 index 34f11ad..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/zoneinfo/__init__.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -import warnings -import json - -from tarfile import TarFile -from pkgutil import get_data -from io import BytesIO - -from dateutil.tz import tzfile as _tzfile - -__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] - -ZONEFILENAME = "dateutil-zoneinfo.tar.gz" -METADATA_FN = 'METADATA' - - -class tzfile(_tzfile): - def __reduce__(self): - return (gettz, (self._filename,)) - - -def getzoneinfofile_stream(): - try: - return BytesIO(get_data(__name__, ZONEFILENAME)) - except IOError as e: # TODO switch to FileNotFoundError? - warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) - return None - - -class ZoneInfoFile(object): - def __init__(self, zonefile_stream=None): - if zonefile_stream is not None: - with TarFile.open(fileobj=zonefile_stream) as tf: - self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) - for zf in tf.getmembers() - if zf.isfile() and zf.name != METADATA_FN} - # deal with links: They'll point to their parent object. Less - # waste of memory - links = {zl.name: self.zones[zl.linkname] - for zl in tf.getmembers() if - zl.islnk() or zl.issym()} - self.zones.update(links) - try: - metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) - metadata_str = metadata_json.read().decode('UTF-8') - self.metadata = json.loads(metadata_str) - except KeyError: - # no metadata in tar file - self.metadata = None - else: - self.zones = {} - self.metadata = None - - def get(self, name, default=None): - """ - Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method - for retrieving zones from the zone dictionary. - - :param name: - The name of the zone to retrieve. (Generally IANA zone names) - - :param default: - The value to return in the event of a missing key. - - .. versionadded:: 2.6.0 - - """ - return self.zones.get(name, default) - - -# The current API has gettz as a module function, although in fact it taps into -# a stateful class. So as a workaround for now, without changing the API, we -# will create a new "global" class instance the first time a user requests a -# timezone. Ugly, but adheres to the api. -# -# TODO: Remove after deprecation period. -_CLASS_ZONE_INSTANCE = [] - - -def get_zonefile_instance(new_instance=False): - """ - This is a convenience function which provides a :class:`ZoneInfoFile` - instance using the data provided by the ``dateutil`` package. By default, it - caches a single instance of the ZoneInfoFile object and returns that. - - :param new_instance: - If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and - used as the cached instance for the next call. Otherwise, new instances - are created only as necessary. - - :return: - Returns a :class:`ZoneInfoFile` object. - - .. versionadded:: 2.6 - """ - if new_instance: - zif = None - else: - zif = getattr(get_zonefile_instance, '_cached_instance', None) - - if zif is None: - zif = ZoneInfoFile(getzoneinfofile_stream()) - - get_zonefile_instance._cached_instance = zif - - return zif - - -def gettz(name): - """ - This retrieves a time zone from the local zoneinfo tarball that is packaged - with dateutil. - - :param name: - An IANA-style time zone name, as found in the zoneinfo file. - - :return: - Returns a :class:`dateutil.tz.tzfile` time zone object. - - .. warning:: - It is generally inadvisable to use this function, and it is only - provided for API compatibility with earlier versions. This is *not* - equivalent to ``dateutil.tz.gettz()``, which selects an appropriate - time zone based on the inputs, favoring system zoneinfo. This is ONLY - for accessing the dateutil-specific zoneinfo (which may be out of - date compared to the system zoneinfo). - - .. deprecated:: 2.6 - If you need to use a specific zoneinfofile over the system zoneinfo, - instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call - :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. - - Use :func:`get_zonefile_instance` to retrieve an instance of the - dateutil-provided zoneinfo. - """ - warnings.warn("zoneinfo.gettz() will be removed in future versions, " - "to use the dateutil-provided zoneinfo files, instantiate a " - "ZoneInfoFile object and use ZoneInfoFile.zones.get() " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].zones.get(name) - - -def gettz_db_metadata(): - """ Get the zonefile metadata - - See `zonefile_metadata`_ - - :returns: - A dictionary with the database metadata - - .. deprecated:: 2.6 - See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, - query the attribute ``zoneinfo.ZoneInfoFile.metadata``. - """ - warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " - "versions, to use the dateutil-provided zoneinfo files, " - "ZoneInfoFile object and query the 'metadata' attribute " - "instead. See the documentation for details.", - DeprecationWarning) - - if len(_CLASS_ZONE_INSTANCE) == 0: - _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) - return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/venv/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/venv/lib/python3.6/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz deleted file mode 100644 index 124f3e14c68f4e94808400af9a8d4ecdb2178880..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 154405 zcmYg%Wn7e7_qNIb6a=IjltH??5u{`Y=?3ZU&JhvG0fug*yBi7V2I=kw$zf=i|9#H$ zet17zthLv5t=O9(elz!`j(GVpk5o(l1=7se&D`D1+K$!7(ZSr>!NQTv&DiDHUxQa) zUVGkWhH`&%UW*hy8Etd^5lTBIy*&4WVlxyM*V$2}M1!(KozlQ^Wiv{sWs&B@eXS)w z)c3PH9J01Y%sO*=be?e2xKMxp_pcqTFx)ru+I#JqpIZ^-;}LHWv97WYlpkfqueFtA z#lH>fh3Lw;SjW}bWx}#(n@Fqwa@(1pCk_s~<0noz$92A+5Pj{k968Q5{8r@s9*xO8 zax5utY5WGc&DSHHt#=@X>ukJX>}+hXb)YA64P`X=cwS^tM5Ea!a~bvWh&U?+(>JW}mIcE4HI zObxP>+{$A9@l&~ z?$>8WyEEm#I@fLg?vj%p;GPg}93LDVZ|tp(vQI8e4Ug|{oUDwpcY!9?clHjZ%7vji zj=C4;9yk#kZV_j%_*?G(s5KQ9D(5NBvqM(c$s2f{9`kYe9x1R2w#+L zlWw*CufrUSk5@j9lxUY!XDyKa)BTRXmvGxhelM^5BV<}rut#Xrr5D1!aJoNYn6!6z zHv2iUd%XD#iTVHErm;3oD8)P>h9z^l><~$nwRcQXy zc^DeIK4BQmV-g4*8*v;ls&$4m+XYu_*Aa^mt{a(+_h03tmHAZX<0Ce?We(r>UC)8B zvE~w1%Mq#>WQX}_H;3jn%?IYC1hd10t`ozSkUXscb%}DP#+XBo?4S82U0R9ZA=D)a z?QJERhOWBhT#l8x?iWP$i~EE&2!T?Ymsv$Eo~wDjJxVj8w&Rm+y4aI#5qfUa(NQhq zaSkojR^MzyraoNzPD_+mH@6zX)miIjzdftJ>cg|i;M|#RnR-7X&K05S=*HO=ux`%# z^Iok@qIt#U%h!*N$<`@k>bth0`+Z8HJ>HpAHI8Ac`wt{-=NC8#I^W$AMf%H`k7)$_ z!h)7gW6AGK3r2KmEC$nAU5T7M2cNoDc>BQ-$lJ`gy4pzUQz9Qab9d^a_yj|o?K$^C zp=SBmiM6mR&G$j^$lIeIbcJ29>I$&%j)#EvL2AW&r{s@DenvxUdDcbK@zmw}6JN7* zd4I~97T0v@VZ45wkKT4$+Xq(Kyc#Bl82gFl>ZQ3iP)3T3k$9JJUU;EQCYx?M_Yasx z!}_UX`rd+~KfZ-Z%TPssAd8C>VI%dnyP)peii7ntfTGpjLN#QlDkDWKNWGW2F6Pa* z&xZ$0lVSawplEX$s_{sX0#a{%Q})Sui|zB#0n>C?zaS{u{Vmi}h6)}jauUdMMe6;) z&4o^qf*v`7Dzl3Ma(&?<-TKAMfrCq&Bn1>Xf-kd+0fM8-r5l^AJ_{3$9HExkeFK7r z!kT*q{7lT0IJqiGQuHE6*kyJ}K=4RdbKiiUso5AO*Xh?DQJG!p7=eKS5i>J3E-q@) zlsJ+kq>Ew8pK*1B8%(!f%GH8wRv7D1JY+a&tD~^8(?E53P0HnhJsR1SP-y>jefV^J zc6lzphIBMTqJJ;AeS+{JJ+H4g^6rt1Jjnjg+fwm8$(T%Jhx9$vBz46sZh;vQ+jzvsXVn>PmOAqF#PH*zeie+}J_RL~Elx{ScN_w+d?;%(hjYDEe<7?r0B8(J0p^ZX=& z>~DwDs|kw!3~K(lWd>)nxZ4HyEAkPFi7sf`-M{laKe$zdR4Bm|{qtqcI~g7orx{*| zBb<0f>eMiq;29KdTR9Y)ts2-1Ni6(A;#v-^e$~rn<662?(PLJliDR>={XGK(=ec!6 z_G@#xS5bd-NgD^U6~EQZqNZ8fAC62eE^}MiUu$tgf3~K$7Irv|Cm-C32EpOqkFHP0 zbZXmN#^b|c9r^RtMFo03PsS5|Qj1DbD<@uq+}6t#NTwF-Dx`~O@Oe#i!@bXZG6lkd z#d`@=hULV@2GS)IgS=2^PdK6n5lZ#7>G^H|fL`PMlnIcLJWacEDtxsaqf zC?q=QBs!QRIyfXc_&0qmUv+V+hd3L_GUpt=IyL{Bxp@)m}O#Xp*XdA>P(JX%Ec*}XMISpLCTrj$|= zv2`&pL=21<26KnO>R~X1%5?0vsu7{V0o_-$ZBo&}JaOl1C24H+RtA(6eEDmu4SJ>f z$weIXj&@RHyGXIh5#&W44)AltbBWYZqkfrNTuF%SVk*-nd_7!bkvh7A4zHLY9{EM@ zG3vw76m9rrbEWK0q9mZ~WZOh{rC4C1a_g0Re7{5+9Z9{o6s zPq8+z@alp(n<3TBVH_M3ao>wtYSBpK6!jRLX_InkbL0am$t{@Uf;jL~4m(Pml+nwA z&~}1vBL3A`z4wa#f|?4Z86|ylmC3AW{7Of6S-`&okMfV>E20@l>jC6QG0yzT^xOq{ z|K;~j#F)$CB%dY3=UQ`~K2{>G*|%Azw@mo=zJr_7^77QR@KxC!=eOi55?^U~P#ilW z3beZF*<*e5*Non7^v#d^?5{rJ#W!1&t1HB7`l$PtE7ZP$GVH{k2mf-VYXMJwaB^7p zRbbCi(G;0F@)S3Zg*FN`bLEr8Jz7ac_hCsQFOp0u;KZie?C2*`#Ae&Xp%G^$X|v1K3e?D zR+m#^)WUv$y{gNn41db{m2_Wv=XmyM4#_i(R4B%Y$5%BP{)=q4rZf>GSzLA54|eE|l`rupSlmm(6}L|pTHa&GL0!@yIQapOba!r1GQLds)SOCs={&Z5^^IhGdX z$F+p^EG`Z4Il)1ffOQ8%h|pXfCYp4cc1e~S3+ zSha)T$-`{iFh+?;F(5>m`>h5KiXq;pcx|Wl($>{MFo}2fyM{k~AFH8!Z^P~Da8>Mb zvBFHq?G*@?tb%gpWylGo`4+1_EH*JTOrXgCDz0#Q>~rKrpT#!T*Z_G3j9&!g=-0!y z_&l-||IL<_?AyMHu@8~Cl;22Xg;A@a9-Rn?{=dJ0x%BG^E!hUr@Sed;rI^1_qkqAX z9=^|!(e-{AWU3f?X>~IbpU>zrLm&)jkkCA?P zbWqKk;jAe~+|PeGYIX>ig|jm2K#Gi}eLA~me5$AMkMeT}6XO3$$H&VqBn)d&F>5Iq zYE_&FmFnNSbh!9eD>k1m$nWglyAt@H^$(S2<|ZtyXlb9;)#yTACtI~02~OUrr9930 zdUuy!XXxpju31~T8>oI7;%qC5hofjQ&RH2nQ0!R@QW_?ke0j&}_!|Tk8xPTa;?k;r z;jL_onsIG|dTKX;k+EjS>NikAcx#cP={9B2JU_YPa!#5x{JziDWib16K>k#_S-tSY z<$x~RB}F&TWr0b|rP+jV`=TOv8zbdK9foHcPb20|UAQcURYHUL1XE>mrACUUPN~ps zo~He!L%Gi~-RPTk1=mwQ$*WKUknij&gKvF0jW2N)!eYm=C`Rum}-D4ym`)xm^)= zJ-AyTyIp0hRp;P7H@qTzI?)AIiUZlO&(YLY%;bdI>Z%W^m-okMvkG~~%)%9lW^IjS z%df8jT(YihY&kJ?2mO8Hr>8krT{?*Bx911Gxj?S*T=dsbygxX6imJ>sikKT8d?k2l z{p!kbM}l9;DhQu=;NzA3^be(bUGfIM0zSV#BKAGsaZ}?{FqI)^nFm_Y;%?&?zDS4- z{P%nK8U%9n*U-~#!>llJeAt7%ovlD%=Vr&hqBP9Xr{i}-*7p8tA~~Kt%57)0SPZp^ z5Q`692M=A(kvJ&euR+VS2P@m*KltnB+AjlXe?+o`Cf6zDgbp6kMkbdjZT{=TB9N$;el_%13AM3UBd&L5xR`8I6x5`AWY6y)-X;GCKo7z3xvrHir|jXEHYM4VzZQ(##k=wkdZ*@dXYo zEQFXo8@-+RiWpN&L@*4%s}8HL5UlNIZ^_o-|ERuwgLTIF0sH0IIsw6YCnLx2AT_72 z=!MUo<0whIph|y%Dk6Z2>E(n$0tk4%qeEV7uRlLkeEyG|#6JN03SjX8mH}YZf%K|> z(*uEY6d)Z9%Nv8oYCx^Zc>8PjfCM_04~B@C##T+5y>#4)D_|bgA3MXDVcSV*6dWG=;+YjJ= zhROU&0>nFCDPB%k6@FnLJHF`BQqrjuc+XG%m!QYUuF91IUG;iGzznF19qy6}Mpx`SE`W%#oBE$DG z(hiSgfq|n&=ZjhQ*UJP26c9PO3?ELU-CL3cskij+K%D~?FXVoFgDIlMM52PyFlG2i zBJH?H7OXgG7J!^UL#>trFaLt>U@@EZtm4V(+Eh>8kM7q@iYr(TL+=$YkgLX=8`io= z#FZ0d=LQF7?GoORXa0oTOz#UWu8eRh%WSBMH5fsP7LN;5>!%4imh;&L75Iz!lvq64 zW$*}_)Yg5O|Lp4WpyNHB-*>Spjau*T`eOSruGQ?9Ic!1f4t(&NK5jbb)RBk!@Jc~+ zxhc{kZphD|NacEl6gV4R%NmTMSVM4Y5ud+bKxoETW=2mJn?Ul3FGpM_ z)TsuiJYSVQ`tZQ}Vkv=s%WF)5wANy5Gd*x`$o$Q4hc^<3IlZ>I^+}j?O=^BkszOyw zZZP~Tc57!Y;k5{9Wik!n&Cczu?)Qv>;y}8$XTd36?B6qp3)UY=6C+ZFJx12e8#+W> zZ)x#B7Gih?CgOMos?VI&uhic%KL=ct5i>gJ-y>$KsI+c*ujRJoC}V!ZB;PuN5_0^IvDc0x1Wh@_5iD^~y zIOOu=%ws;lCPSrc>S^(cy>n6|N`j=q<7H`8Q3A_80CaGS91QOrjD}Z|MmqevRJi)9 zFg!;5nCXM)_X=#kvn*-iT*AIHy3=tnCd(!FD41z-!8pR`Fql*2l4W$Hy6PnHUg2Lb zGNc1bc^YH9?5G(fT7^y8cV#{YVQM~`@tFdTdp(Sz7>|Vv+mczuhg4_aL%7I!ai$or z$mm@M>r}u~bm#En%&quAKcv9?X)pcmtB#!i@zCtMW`E-0F*}^;j1WHXFYdO=Q>sID z$}$~UWFMDr%tiBf23GgR~$aDz!YmcwU2!D;qp7BVek^@w;vd||TCK}yJzZP>!bb3)_K2S?f55=OXQawOVM zB_HluIC3r%kcB!Tb)UX8S=Ih_nkWd|$Qeb+l}>vp6TyccsVQwN=jGaIH9;3Nku!>x zr-&XTorZJM;n)Q^O;3hQmFsBxX^*#Cmmm#k?<*;gj&haAHdeT}Wq$iu|DCw41lBPe zs>3qN*;}`eWzaAZ=2bFEnW2+Py#Hs{zwUR9Z+fXm)kySVXO-k++u`}5f8*d}kp6L9 zh|X4i$Tj*vwCdug=;+3TEZVu4OuE~_aU!C)JmrWXCDSZ_VY(%jg~alBgG<50zflXRM`4wD6b&l6V_3EM`VnO{BKGm|zRPlg z<3C?AI=VHbSNg}qi|1wQ|E|nYugXc&FLL{t@5D#kEcXK=k*0CjR(jmLmOwaJgSj11 zlHW7LtUI{i%PQ~Z+&nGJ%#yDs9yffs3`#&e8M4h;HLHsu`CWqLQ`I*PJawg$w2LsA=N&C;m4p^}J;UDR^GvqtpKSK+7z%^Dn3=6@nYqXOKJHyx#VC&&9!eoi z+lGp$eD6Z5Q6A~)x}q6vThgq~$r5c1);a`E;-CKJI=q8Xm(teSMj>oTX)M(e8fi-^yoEMIR%L-)GK3hWg%s4Ml^6x@UG zVl9!>08g)a=(;89C+?f`)@2eq>i8&HGha9C#ozVZ4{|cQ6+5Qo)Z9%-6lY!I3pSjR z6+@m*x0`m=@|p^x>%6v@zz+-Dx{}$0iQid`tbM3d^h6I&e^@ueV3i`mUHSnIP5rb z#7hC2ZWbl%cE1+)J}~du8C~8BhA%==qTgqpA$Zb?LaLa4X37kG^;xV_(Y6TwAZi~Y zu$qwY0>Pt-IXxu_=rmB=>%w!Wx()+{boo?pj0vB|#{LG!rUGf<-+ai5G5)T|17Y{T zHc&8t1OWK}>H+iumNC*TqfuKp4(4s#}tqWj2vDAJ8jOc@yCYOq=aG<#+kBaP4sJSMe zifme#xu$@MEIAMq0#;GTEJEe!i66!(2{`UNs^V`M-vWKVeNdI09+$%k%nJo7Gv!3d z421&yK|re*C(*oEAgD@%nfWb9@4cqN^n@I*iY#bHDxMbcPwcmx02w9>+V|3rk<$cn z;o(3U6OdL0q_hBaFd~2e%c0dqPQaiI;4`uTWD7uE0OVnWkS?}LhZ!N7Au$51X}WF* zEHu#A@PUOT&?XyblORLMYBe$`xRn?{9{fqJ=3tPG**;3Le>c zk3RFhZ8Ee8%tUC% zRtM_R?2G-F`0h8AV4oRK5#*ezs zs8poJHT(Uzi^32bOJh!FK63iiKB|9yG!iB6+rgn_sM@D*z98T_gTs|l>OViM-?sO@ zHd|S();Bl9BARakdt4qoVkicsTxFw9^~X?mzggDVK6_+L>h{`430Z^bMUj=>3)loC zOz9tO?M5xuy18aNokX>Cp(SwY%c5j5$IfqK3A=egS9qWxMZrK}6w$8yZXy`kkBHaDpQZye)ct=1S4O(~2wh%NFZ>qC>HmnX6coc53$YlfO(}0|@3d+%BfEFX z=6MS2dETV5`Fxqd`ME><%`<-a^snUa4Sz%N_}68^sGV|qZ~^)iK%)Zm3)(S&as1jt z2?TLs1pXnG7kPn~1SpjOB_j|d1eBP75|v+`B8dEbW+xPH4xk+YS`eTq0UC!b{sx^E zJ7-=_zNsHZ` zfENug!vJFmFg!pKnL~UN7Qg(%5Ayd>-B7%7fIb3fPkxOt0-y=N5P$^$2LK)b0s#Ez433R@Cl!|n*v#jj zrbu*vqy4l5U=6?)fIR?508RnG0bGAYK09_T4)#1$cfT&$*fGGQE>X)bre3Ui&`c;W zw38|IojkhSM2dGjt!*HXQETN$U$3VYbSCoMWF%+5tmc=uCwvfe%l>-zrk#$}v13fiA`J?|#g zTkU-3R4%={!xZf-CEfNBOUVC+m+E%DuqJ>l_Bkp$DlE z$OZ~U4Av`XyqzGF4a75dwh+l-1*s9s2IKiVt23yO01otDhZJx!{&mOzr~F@s9B|tH zbtnL5)R~7th0>KR^yEWMt6Z%ZTv@qxa;of-=V&d-;t@={7Cfhl(O0S(SD6K`;7@bo zh02AVtTx`+UL)IAS0N{N%9*j787IiK>8DESS?)5OwfUydxq2j0?`dxbarA_f56L{Z zH6q)EuxBzM;-=?i_!m<#FB%Go zIYR>+yiAY%LCysMcp@kE*ZrH?!!vVF}me?=7l z^Ov?i4_%5dCL!93<8A9cv1K8@s%B5JG?Tu@`LT?6&ozOKeD#T zwt2iocCM`ZPkZumGM=p4H^TC{)E72T%d!SO!9_J&p-X(*T6f@JGSGAt)k>^cZR#5v zB9rPhdu?<->aER6QWG~eoQsv!H-G+Ut+koB?4ZY)|2Xy0CB$1fAPU0O;+Tr}Gv+vFKN;?{VMLSIBAG^Z0Z~2wBw0p&6Hyz> z{33jlyL>MGWqxB@@-n@cT$s+e`pHs3PENpf%>AvImdSkp>-9LqAk0=wIm$P2l+3Zt z`rNWo7awDE037;%WK5<$ogXIO7t zSzAqVBJTuKfq8ihu-XX+Rw_hOH9XP^!-wn<);iTdWJVW=ND^|g4r>999|0$8X#!n= zIiYG#9Sf&Hr1S@cVWy%mYaP@w6Wv)9MTOxON5ZKZr7)dqT5KRW9ppzodKvNf(WMlJ81`lWnPCrfQ1%|=f^R%1q#bdPIT8(7^gW00@x7;I<((_`mF!%?laJl7LckC zOx4VWcawjj0IC)LTkR7kt8ENWtw&X1_)dyqRW66Iz#NMpgg@3R+*;?j_psxI*@F+dDh4HFQW~d4n=RLBy)kehC1-A2>Ew3Eufe#P;3*3j^YC; z-+Sv=(vmZ!O@MP5#RUTKK#d8YhR(N2mOm!CiR;Xm^}xxm0X-hOQI~=>$rK&?N6Pq5 zka}35ig8cTg`JOcPg#>4ms{eiM94KkgnRq#k?O#Gzrd_?17{_UfR&O9f|Fj9^y5=U zR!TV<^{1Wbi7{J`J2K*CH9Sf-to#dl zx;A&kmCJTIGR5AK#tS@8Iny*FdbSI~IyM@WKZzF2s;70mf7Tas8QL&sOfK#iSy}JR zm(_RNx90~PyOwXkRuh#X_y#lP_-frq`N1c5VVx#X4FksAY4R(QErTn+)RVfH`C>qa z+le&r*+Jd!pDUsXDq3JJ1y zY^gsm{PI;72|g$*vHUEIF|MV4iowgka;oBAOOWf7oDf4@{{;T(mF1tfX`aff3h#Nn}jO!s_ezM{j1>;*~v@fn< zeO)n^f+O>BecdoJWpS*!Yx3#jbA6C~PW~2*&8AR#m~r{G)S%CrU{}6$cV8RB(f!il zT{giifoN+EPH^pOAG6HF#G*~{eU-*G7ST+*xm+Bgs!NgD&!_bl?fKVN_BL5z^sJni z?Vs)8Zq|-qa;a2Z@2xSZG5w#98W* z@Z&%()|8x8&jtY{&*4{|3ny$rGXD=`flZ&Wp_2LxAu>4}QRlH7Q92sN6ic7F1PoPz zoy*?6+G>&|>{Fo_9s&`1{bw{u`Xh;qDLVSNq798H==Zw>!cBF&X)(7LqRe-2StD}= z5))(_IHGb{y963O2Rla+{D8@{a71ltYf_y9;sVNOKVypC>avk%|1@4>iHm@`Ku#6-u=`t}V5 z>>S1I&0l7HZ8}2|D1$51Co!`QZP;oogALu_IJ+jcXh*%`?E4Jo&K~vK1LPLc_IZgI zE%a{wwhCKJR$+OV5Cv zFLpj5$WO7h^mX?Hneho~MD#AidyIY?6v5e$gW;`9vbyI^{y2*-o7&OsU+Ry8Bre6h zgML=ZlcUO!qdiVq(D0n2Ng}yQU(U2<|LM7Y=pkTD1yXcbw0t`SvWuhYwE8;0X>19T1AmBq^V@i>e5ry*cvAnT&(neX_uR^Qa zFvc?M@AJ$KE}=T;1lc=6(SIObA5r2`#(4lw&RluzJQ%gN|8RSRfbYb0RE>yDSm%9K zmfGRr5#Eq)&r3_w@YIr?y~}=Qn4kBdLj3gT*KdA5;)bFc3k+jY|C;MT#H5_F6^H1t zRon_gBuebg;!0Y4&y2$%x8rzwjx67qNzb$BV<{Hhw|CHW$R)N3nNy7>_&g#lJyf+ z9&ybTN0WoR@%er!yYaW=G9-IDPFf!`1^Gcg3;HB?)7Wtr((*@AqXQCaX-zOo&(h+n zoqDl0*S|IQa4kQMTqS!Vmh*=mbwJW>N2ge*i8N~m;Uf!)in9inoV$OcJT>OeHAv%|aNOBNfC<1{3-MNA7KM zKmJ-2B0m?)5B*MH@);YLVXRO;t`cnKVke(M*-LeNbC=ZqD&oWCJXbgAEqSN%pT=*K zfA~=*8$aq7It~o1#lkumK@dHer@qLCa?;y0?u$mtZPwoddV#RUe?X2{4=Q!iF|Q{z zCAhS8DU=5E&y#TwY5W2kzsOLJ;9Upe1@2~u~8WSBHI;u#3|Qn%oI_Ly;)9V#`vVTgTSjeR@P z-_C3;Z0KL^lOy%9|C6A!xVL-#)-)OW!6jO+bKiNko5!fwuqV8|MnEhysojxVL8vhw zcH0`A^cX^Vorj5vG0)yd>7qa6|0h;;BPBj|ayip|A3dr}8Si)@pvA;PXWg2;VW&O! zrb+J#&o?s#oLZO?4`Uw9mz-S$d#3C#=@-y#`T=2XE|r zrj#%AQlS|Ye9^xwIo*pZbFXHErR;a=C-4^YY8Hjo6u@arg42BBQB`%rMkJ@w#xk+RQ%Su$B3}--p1v5C z!mXPV-e0Uj%%z+s{Y7Eq z+st0=(4}T%JF63WYw60^zKFv6^y6223_L!yioKF&zt6(`tmfBbjDJ??$HLu)*LqJ; zLLyI5<~`p}N~SQo5;%`7UxQT{UpwxIp?{Z0YtQ?{8C-d3cj+11E6nlr^5wf%Sk}Ea z;jvO=cEdX7;6=*ug(SLwSS@g|UV|)8$hGU6>BgfB@x( zNgTKMPanVLf%X}Yv_jv?==T}o3V!A)w0w)=vc?n|(sceQX!}}d=wol-d>QXPL)v#& z<53CO6gPBA^GPZo7xG7i4(mlMc6P3xqLkPy$m_)^G8muf)IwSgl@PJ*8yV;4`GJcq{dro zW`M2)=)QmH2%u&8K0`;4`Kl5s3-YmHr#(%)NK#)B(>&d>~ zdu10NKX2 ziCg2A&2$s<3QXtF_{sIB=38auYl=-3R3rNQWA$d+^QLv@ z7W(!}D~jg_@n9Ooh7c>`rpjlhZA4pN5x4I*-+4W<+(r@bN%O@@o}uRW`f3~>$$cw+ z3dIysS9Q%H3$?VI(6r-sY-e>l9Qruis1U{MCSv^|k+Tzps)cMzjgJ;bM1!Bv*fV2W zBCSJ_td)n)YoP4DFPnNynaZL#okwI>>MXRXLZ8B~~HsA(?9q30R40UN`U}oY6 zo!Cm)L>7CfA%8M^=-6qkU5HCo&-%fj0iL6e*LnBU*PC1I^?}kEB~|(1YX>Om_ZqJ9 z`e`WhnP(3%K)rsTJV$$qYC?g?J_L4vushdqqpOUN$jU6FLa3Pa!?28382w8PD4WgTKXl?Iqp-Gz$;qAu033SAese4Re&T$ zAc^oP>IMTMn;zT&TJ2iH^#te+fHnqbPJku_=+^)(v;Wo*C2R>`9A$O|fFUV>A+aH{ zejy#8vF(U-}g0&gpvi~{8LD*<1DTI<{?L8r7GaOm7vV%;vD!QR6sJz95^6W$rI?kycf1w6)d8d3!`dBJK1O9mo^)u#N7u;Eojlgs z^II%t=5@GHK)^po{7Q$Rh_uzga75a@^6xv=74e=6E&B_Th1oC6VjVO5FSC#>%hB3M z#q3UPq&*RT8S<*rrEJZ^n>4X8>#FA>$^hF>%-fY#ti#n>tZa?3z(2oC#u8B7U^Ho+OdSvGwj+d9oV`mL{)zjxyhBEQ=1EE>!c3-SvSyhv2zf0{; zeoymbw>J;xd79b~lk-<);)3?CXS2vo!x4oc7X23~$zj@JOU-OzNCUXNBdtYy%KcI3 z2wVMGijan=trwZi2EKa<&jq6NWJ4n#77|Tw?DGRLK7?$My~K`qr9AvF-rc>GaMa=D zo%i1_=?-Gsc`Q|IxcGBp_~rl;h(BHFmbc0*8t*QOgVY>$JxH z!d1uqdraH=dGCLZY2%zS@3q|Oj9&{DKYZ>eLP$<24)v-kmnHc{>8aHatFRH@O#0jo zneGfJbC+p(W@uDUQaZG?&lfCfTQ;ovL85Y$iyhl$TnV@p< z8P?q8VRDByBd$0Xq|(Qupu(j1kdWoFXw~%cXns^CcBLTqv74lmluXfIOyMbbMzV*A zW1{*2M5kX+@0t^=#M|EF;+0h z-4m*1TCZ&m1=c|V>!O`636Cw)dy=krW3Kq=Rwu;JCjyQ&iKRLe|qC-(G zg{)Y9&Pi-NZ^I_T-*^Ah^kGca6a`pN0}GPQ;6&^6b;)5K%|Gy;=if5Xp`>r2=xEK#E%( zNvD-t9no5M=WCAO2OI_ejQf@Slf{YxnXL*nYH|<$lO(6A1DnUsynvkZ zeM@p8s!}!f1F!lT%xLj5uuuv&yep1AoGzm!v*yfgGA@tX*OOI5xG?nmmwVDO#iJod#f12ViLN&>cEdqB{xaT=lW~L!+rAj`n7}S!*L(A@3!-=(lV|mg3H{cHkWSb zH9VwQbij;HtciU6>P*sqE4S8wSZsCf{lQ?O4`vqIwXzRi>4>DY!@KukMQPsIp)JzYBcNw0 zNixrK*r5O5x<{-dw{QcCz_~Ag7#a}6mNFoF7uv!j|Hj=q@9p8FmBe&DE=rzCkQEq& zE|-fA$ld@l8bJ05kfj1LsdtByMUvC`uwbALCLkaI1YCf?8W1c3f}Vc`AM1c$T2|m( z968V%K=TgJNC6sXQverKPG3Al_WfTHs6za_lys^raN$%{hN?JuSuQsLCd=X>0M(73 zwBCMUeQELo75as9$^<>po7PlmOGO2?iwhkRgxaW&t);?_C2dQ{rm#AXT2OmsqXC6AB|4P`8*YBT;_iS8w1r7V78wCNaa^VyokFgw{IfdT7P{hDx;-}j*I;vx2h*eU zP|~60cZ0Luqp@Z1BQaH?cy4AR_4x{xGIePkFGRb0wc9|;ja-2Fpt*rgOKxM0-JXQ` zE%9hP;jF}LPc@qP4(%`ZgvFi~A&&%iXmP5$y&SrEhD1Zfb!a2qPK*6YOksw5_h|Pl zJ3Dx&Z>uq0v2c+oEe@MePgATu0CAw(%hy&^!)sh}fmo*>;y*@XTx&?ST(S6DWLH_O zu`qE`f}|*(%^l)YpQ5LgvZ;4uYT*W5I>H) zVwWOc>7$J|Auf4Rj7P9L6cG&2+9tzlSdTWKg z=#+f()lR_9-C@Wqc~-f)Lp#3tj5u|{G6rUKj`$3NssyA&9eW}x)*<{Ue(USL@`7f> z({#-HS$biFsG9NO?R z2-1l)>?(}pZXbHiGAWl@jIMN9jB?90?7C-}j?SGl@Iq2bb)Ro}l>dYqmVeF;*3E`9 z*L7X5&4v4Mt1#a^61!CA5ASU1^$-4csb`_hEZ?(C|1hKAf0ufy4*z$lM+RE|-=&^} z5*xx3A8SGb7i+?3f23SCjce9&y~_J`pEd6md+3&<8q_b6+giEz zO#{!ZvKQRSCQckeui}pQNAgg=6DN?z|H?x}6M>OiEEc0C+$v$WIICbguQG-F1t*ec zWHIv_wre}o>@k-gd_Q-o&9ANB<>aifeqbn_N ze@uO4KvZ4Vwg`xHh;*kYjihvUOLt0051rE8-AJc&NQX2?4xIx?cf-tkxS#LG_hVh_ z+N<|I1~X^o@co3a!S$Ya2v6$<4r7;e>rFW(9O1xVUm1 zM#6OzF<*7!+3K|?pyNJl%S!LI&R--4k#@3JI~c^2w=zXbh@11?CXb@cG=O&G`c_FXf&9QxCL? z|KA_v?+#n9J%&1?m;PEA+VrJN&W2!SltiboiauTzk?iH3n(W8I%>Inkbfz<0`u(=N zKdpc*WwQj2c&4Z+eiz?dfT!2Ys`{`!dV0wLE;?vNk=WAdf&&+|$X&OqNZeUABu08l z@L4X84-uj8fAn;&Ec!Q)kW+9cl@VdUxc||!wy+8?bh6j|VXvDgpqJB$;@g*uTSx&5QQaHeI0(TWo= zc}4>!i*5Ow!eyRBdMoaU4Vi~%)bWH6c$q=R9_?u-$DT!9;dvK5hZGl5{4Cnn|SntfOETGQd>zVmf^>d1o;sxsZ2QPy1e#%D^OQ_=w}SU&Wcgm9OJs{{2~;=1;~s zfOu8Pz?CPZa1JM69fAE%MJ?-OmRFnB>x&W~@}it+e4G$HaJ3dg?(X$(Z~o5-CF7l-CS7 z>`wCLkd~35+T{4{Pt=22k}>sBBsPS+YwS*AfMay1HZ^{mg?i9PGNvzzBr*UumXMbY zo`IRe$<_jLH#WqU5x@PJdeB-j2K=iRw>}`X-_f1qHzBVthZCA5glA%?HYIk1;k%P6)-pK5D{#q5?Zp%pn!bz}b&sw)gp6#KKw&`UpcyEK5ZIUCc_uRA{DYq8Z9{Zs7 zuI8A~?ZupR4$F}{?)7@};Rn1Gh8>ns`Zlk$$Y-nh1pdelZJ`Z17B1% z&c{v~MbsSW;`f;GFo3VpJ+4Q1bTI_!*?#lt-f8%-Tb1)6!ei07E5?Kj^_!>4X)OM= zufB_yfthK%5=&hX{vy;$qUmZ&$J6onJ5zO0W@6WH)<%@&iJpa*fzD&={Eg83!t0-J z${ejSVO|f)b+E%rScxHwLjibmZZboF>KcSG0ZSAz*p~L?>F>NeR~W1&Ww=@Wotk;; ztvS`{wtpL=If>Xlmb5jqnss6Nbtd_UcuX@U?O3x$V78{}m|A~bNVJRXy2*vZv9TQU zqh%}Sd4cb5F|U0^7}Y^6=b-LM6IAcHOx$i%AUE)#!!lqxP&9oGY5g>TOz>=m+~>Lf z;%*l2dH5+lr6kj?V)9RAVxjhqOG)Wy3L5oN#RTO*QIxUeoP6@0bUM7#p|)PER$U6Z zdi~lEU8|YwU;P_!omtnFxSM|KT!LxI?+oW@eFpHBgMN&2Jp{U;#;J6usJfQ6Wi7aV zTJ$uo;2F?+&xlw>euzl^MlN`+)pJu#j8Ve3vIu{bN;`s7Wo|3iONMrSd`?cMfN^Bj z)Vl1~zUqEY_AEO(ZkcT}igl|~72}SL&rgjZehsHj(8LqheK|Q=t1tsAO?|nYQ_a$^ zDOaiGzCMaIT)qKCZJafIe@Ev|1h=aDeomqPwgj7E)X2L2c3VZqAW!}3YS{eKV*Sgb zg0FVsm~W^?hIOI0MRn_gsah2ze|L!-lXXseVL-3hPd5?P-xK4R{jhv^dkA@3I=8fz zCqXJ@ltW(}auV?~|7QsUkJ}rmvxk)%tY;*tpP1ZfM7b{P+{nwe_UzT!KZn~xi#6b* zVl?Ci7j?5&%rKa~Oegj1bFTWGCdE&(Oe?o@>Lmnty@PzH4+k^m1@KIIY)ooe-Km*o zFdhFIS4iFPZvWgA@VV0w?Mt-d7-pVh%st%ac{U;cKfielPj6#{TszmydAeAf;iSF& z=jat*h2#Hv>POe|-9lm%azseUb$>moq!+M5-`Rcf&eG^UoEvbH0&Yye{T6T|0Pfc~ zxklIFYQkIZ0V8Tk?j7Q&5=r0=ea_~;&$~p-#r2A|5?dr(eCILot)a92ccJKtB;wY> z{DA0*%L;poTZZ!>x5q0$miiSa&fk^6IpUVVode?dzO0AF5vsP!`1;d196{4`488#!?blW`!REnwEa z0X{$M3cd`^>Mh{I28z&1qf0Br5ycf#&{F6dn|+5%=mc0aT*3rUwo1fW*fVg}|1(k< z+&%`7K?Y<90U3cn1_mW31(PTc5CjAulLfgT17+XEU4_SUqDb=v1HDjytoJ|`Bao#l zJk3n8k6$TdMG)tLCav?v=gzO#vgT6O0}j5e<7CtidU8$P?$cqg4|50Jyys+kUXPTD znf&i2EaUM_qxnw$aPe2i?B~QUl1qu0jc4m$ITku?)X0}}>adFiI5L(oyfar_e(mZn z=eL3tLLtf`|Da7z7DYD}(f)7gjrkOqYN%~xq&plK2dVIr>FE6-5zMlMP zH08V@wW4XY%Nu%o{2nEz{;T^#@bK}Ph}S^fO?s*VfLDl)Xx(W{$F(U25Hs$xTlAM%V4^9?BSOZ7tQB z%L5fwe;#D?I=iq-ASAO(=+&4;lyBHZELEfxa2T>mu#6fVcsPcZfs=4<)*dI9AN%EQ z(%vs0`RL}>SCIP{pYAXJI-65_^BK<+y{*BU}?XFw^-V#90?5K-O*~K z^+Ggh1p7X*mrT`A7+glTM4u`>0j8Kp_6uG=cZn^&cB*qy;vYPiHPY>&QZy&<;jjW0 zj(M7qA%WVQ##aXMSAcwmGl+KqqydmCk)-&VjoW&~F!l>F#(IP9PYD*?pHlFMWvA1`s(w6aY~IMCGk+p&FpS0-_0sHXynMQQrN)%;B$P`osyf zZMo;!RU{2C%oN_4o1qUa(E&fU0(TYPnM0I+f1q|ls9S@Rxl|^jTZPZT1Fso9!za9n zU_WQrM3h05NWUXnvs4gIPjkUGH<^}93YQ^Qp`(j_G2pDlHQ=<@Q#f1zu9>e$UW#@bcs27|i+hJPG&I1qKVb@e^K8Yx$QJ zme%rXt;PxlYkrypk3MTYu8+=usRdwOYWM5*dVNO&rVEv2H7cJUhp=WHJWEiECqr|V z1$7TB<&M}L9!CNipEc(ak1CeSnmCI(S|b=#+sm^`^AOy*|x;4ABqJA zOrCD9I?aS!9;lZ$3%3vC5Zvk(0M1IXW-OW;8hTV-Euu(EG*n1>R`TK)JhjqPPQurYR8HO4 z6K^2?zOiUSaHzR#NgT3GPMDqSNnJkc8k_yLfy*8C-`{twWohj%yw=^}&864ShUHWp z47uS{C>&c=tW|jx^svvbcla6=gz_$}*Wa)Oruv!sogSg13Gya9`EZcSe3or}ivC?s z^u(;$G^gb~pzLgkp<8u(OJL2?ZdEbk+?s#lg=7n@a;2R=tX=sS+jcN*2@KTAE@lAbVexg2GFJ8byyvDSG- zSvS>@GF)Hm_rlmz^1HtFr|+wxPBgBQnoD>mt{iMngHcUG{M8{t97Rt*?N(W@7LJa{ zeY)-b@oxX{t4Xdv>j*tLJLPb!qb#1Gf^`PIdpD;;es_Dw%D~pNg5Lz?+``#rkFH1m zFiq#!9c<8a7y73^U^!z6M-fkkkJ^>~{+^?2vNQj^KsizS{-y-x zq@hk{*`4Jb!$tqrGGTg2T8gCJz!eAZ+3{>_OSa=k_elPPmPr1(oFQ;pU)qfX|DOhy zE%ss`>|D0vim)NfbZ{C}V&%oieId8S6Xzb&p3uU4F_O(A#Oh2Y-7eKBy&}~)^fZWh z^8TJ`|5ei~E4T}ZX?RFvA@UHu41OF-0#{f+FH(v&wSVEe1?9r`X|Cd)a6*z90p9U0 z@b^?!NI*Cy5Kak%O9A026oxbc9t?&w`Y+()!DeWd{Q?~?%heZnegRTX00dq@{{?(R zJEe&bX|hGvq;-P5aVc}+KYi(;BjYk7Xvw376M{v)Cyvq(rqE$ z@eK;{U-&Gr?oCV?>nA9cXsX1N0@r4(Ut_|F9Q z)cdR#Qqv}o*zNv1O9e?N0n;BL8;*-XEHD>@bgKXXe-G=u<*1kB(@m{I+ldG z3rG3QJ3wMT4cu8WNkMs3fTas0w(xL>TxL05$Q5flmy_(?6dkB>Xq+1nc0QRA`*fa9 z0IPf+B3FtypJaP=Pdxw!iN!^Y6H?_DhDx)+-ET{?L3z1e!HUj%itwt+wk0NWfIFJk zKgXs+S>~r^KV2t}l3&0AjGwouOvE0#a^yswa$vV-J_9hX`{G;}8`Co^5^O@8`#=Qs z>%)NEPCt9L8N$STpS$!OZa^f9Pr0Xs9WyvuUi;|{ZsEy??i`%+2IuYlAM@+>XnrDM z&Ryq+vdrQ~j258Yab#iVwEJpo#y_vi-L~)NaIan#d#!biUqZz=Q+%qFNXb2KpaWpf zrz0DIX(gs}?m1YWNpEkTfj#^`mMcGgE2F|&zqJ);4Zc*Fp`OcaLmKSX@5l#P@BI!G zI;D;)A@xp7JnOTBx|!BMRSc@5a{Ny!vRN19I0sX8@eh5q%Jt)H&SmX9)mG*t1Bjga zEt2Ws1+MD-DvtQ1sYzIyu+CFc?y|?OooD?T5X)J>ZM(WKNBiWhcIRbh035Vy5!Ngg z>M#3zi|`057rM){55_d$PO|qd_Epz1dc6BLBkDPi<~Tf`xNGj`^`}Koi@wsGcDkib zhQoO7)m&3~Ll+2?di-bZ9y+hInnED7x-N59>C$$7S+k2`Q?nRt*sexr=1pkZph*0o z^3kN$(Z_x`i|-KN=Ee=RtD6yni3m{npu z*f89Bk605y52ndLtuw??8jtyGv(FM%vK=^PshazKGNdhIGW7Ed^Jjy+nf5&RA@Cn|Q zcpeWVWOO*oMR-}=GJEc%EG=8zIv?z$n9t3P2(`xBXqDQ0wbpwIB zT~a!uZ;MQcEErV}TShdtAX-TEc~7Y*ae?QH!$2`UaffD+IEQvJBUQ~7b$@)m4OaO)on72+gbk?+fwl}fUyG+{E+38%omMmLXi}>UGmqnb&YT>GHBn^Dp)9dM16x6(ALJ z5|nd3_sZEI2@S* z=aSNFzGQ2N@6Zg<1ErhQztCCnE3p$?Z6UK>lv>>-1n1kcbd%ULxZRvJmdJmxPtvI8 z2dTsf++R^a7o+IoB*oVsohFVu?!MMesu(&i%%=8tW)fxeg?3UD2wiB$UrV(K&;l%L z=z)6j>(pOz4-GLt2pRfweD1e@u~}95LQctH&>?P#E2$W~De^@raX&LJ?C1C_!01Z! zKz(TaFLsvmN{VdyFL|p7kSYQ=)2SY)_kdxts0xJGWq}OK_?vNlPkYbC!TB3#%i8Fu zTV#?N*>F|wgZ+mL$^aoT*Cy>zO_`hPC(OoGTa|dx4b~06tXCiS_gXXchpF03aWi+Z&94X)*_03&VyHn^9U0*rV_ z8Di?=jd+)*12$g3i0_5HfYFy1_5w!yFYE=31eSXPHeSF;&?_c#Ly#Exrhc#GEwny< z_SFqPei{6c2eT^b!lh0jnNeiO;?d(&n?DoLpI^LHmZ zKsBAlQO_!a#=jog>ttY#KkUF9+Qrx=e}Bs#ejgxPja|4cX-thSgt8kBBX^88U~m8d zCRhJDN1$FhiRr+u*-H2{$N z4T!O{0YEbxfOXtYH~(Vr0Jy;hpa~xUCljEns}2Bd0O(hO4Lx0I4gB_}R`b3dO$~TH z7fInCUZ?*5p8t%^^>@y(cC_HTJ-xno@5Qd=^+#{NlMB#OaZ!Et*=Ie|oPB0) zs`09w+mMTdjN(A*4H+BB&h4QPf-TnqI^hG`9wz!=L+b-8{C&MyvMI!cuA>`hEH^Nh%c~$j&IL3&62pjd(mU^ zJaW$wQX%DbDBf8 z=7Zs}1uLTy6-jB-S?*y-30=#`T!GE#+<1`_x@r6o1?~{%J-ids1l<>+`Qexc_SPmY zrE2G+Pra6Oqo8I+TK-+1UgI^2C)#+2RIOfJDr8G!N^jbszokD*HVKt9dsTky{Na>C zul_1v2F_%5O5|uXYYwD!njzIRy9M0?w{suO1wYM;}o)PgTT#*y_yC3y+m$2{VwpHQDGQ`e` zd~~%hby&c}AkoWK@vo<6$#9RrqfAdTSK@=9BIfX8-0Zc3z~{@as*^rfYmxXPJ~21R zy~V{tKHSKvcYi7CJVfBj3{$I}V8hKtaCYO6umGu|u<4IZX2L1B9uwKKJAG~Dm{o(B zS3RF5;`rL7$upWVXV*2ajM*xrS4qnwjlb;t(`J}{Tl4;6sD)BBYDS)ytFQuB_o|L$ zZ%RI5d0j5Xt(0n@<~x0?pq~a&Jw?*&q?3=Hs;(-x(jjuZVxhswG(CbIJ;q}unIR)otYRjSANe~T~*QbW7e_VPnDJmV7PxAzi4pamae-(ad+_2cRY z8BM2mBM>923yZQOQ|9}bB*gT-NsL0^qqrY|pd`e0zfJT;kWGV7JG>rt>wh)eY#0hz zw)KaM{-wMZ{49JiZtjf2X-ne(kKCdhf}l<#-Hl6#<4}Nr%p)Fx5EVxO_j@GHKfqRz zGW_^J$_P2(S0r4*U&Bxu7fWXpHF*?10<^fG0X~{dLavfu15I}ne_!AXQKsdGj~|L@ zoOg$rbSI7Nln4cS!la2m1eH_x0>mzS(P&N`$on9fhmlY-_G;KII~w-}nDH;tu;xdx zi}%BE-iYip>u`h#lXPMiRejP%*a`MIXv4S3a0&KxXv593uZC|s;Sx^K;c3XZG7Eko z593<_#o2@}YA;I&qu7U~@^BMav_c@CfbCV-()=BvArH`$?!+NXIE_FXPGu3hIJTq? z)|2i;11upxr7!R_hB<&n1u8Co%hBX<*{@WTUa5DRG-fnC;gFZ!P7{i0OaDg7uf-9ORw{hMr zHMlpF_YQ^(PhuCFozv{ylBy^a&(mhoUA{Pl>lW~Z>pY~>r(*DhhAkNTx8-otlTx8q zx*xRqbP-jDG2*}cK%mi#(c?` zIO2RWU8K7>-bi;z@D;Ax0i{0y`Xiu~NvG{g(DqZQL>{W*D4#>5<9s>HrMs$u2}aoq z4LOjfYw$B?R1x>ViXm5DK8ZX86Hz>mfTX*cfMPR1D}usxX3}ZQ!<3ZI(p`aoQJb%j ze4dHPG0s=3xnWl1!FY8#0ck%JBqvAypCcg7H*lf74j0;iJ5A3E@AY(zyGB0k_w5xK zO#I5wC=X^o;fGnd0^3nagc&?|LFELCEXNa5Y&)pR>zDQzpTA{0E+TPW?ZATxG{?MU z3>W#d^*9d@pA-3f8s}I9D{4AFe#>q1q4x2lk$fatv}Nie)r+(=(Ez^E-;`N8N!xrNvyW`zwk^K`btvk@fLX29 zGuwj3ZgWz&Ta|@u`O8QN@2pN#d|Qu0{oS07LkOEpC$^nqA883=Q+=p*gt9;E;*X^$ zR(t3(4v|r3XOL1`?;B6=-{B8%l&wuK{+&13{Pi%+t0Z;q9hT~wFY@WQKuX|Q^=IbN z4iWd&s2hfj=ENTG09(81h#R}v8lPl(gGnBmeL1cO?p@nBJ4M|OYqDAtt7JJ;P=XxB zRS)AZcd*wl^%c*E8?_E*RhjV*RfP+{w5lx=_i<$R;V_<0st!$#!JR^}wvHUl! zLPhf~diJA3l7^pa96p#Uj)0e6)3Pu@_^gJ{0_shso5s^6b)af!i<)A|+jc>4By zvI>g$;dV$hvI=e_SgT+k7EawZMMxsrL;cKN_&8F|ly20--yGD1s^%k3O`Dx|+1qa% zGM68gX+8KbX8BPyWWO`J{avz{Ojz2H2s79}SA854p8C?*Of%*T{Kk|mm-TSk0z>K2 zlXW3Ow6kv&NDmf~q$F8ad31w~d6(epnA^@-zGHz>H^KO7&~OVKXqC=pa&kD?;LbhB zLSBmnB$r9&iU|`wz<$ScbETN@!|Mhs(7tM3tUbhr(!+i~UvahJ?^vy8O#zl7I#=UcEAT&YI@CJ}&90`afaR?Re7E+r9k+C1wm< z{kl<1M2rI-v-sOLZt+_-D>&QWHo2dZ`V|8^PF3g$jTt1??~BU zF2+DjE@om6i*-+#Gn_1ZE^4rU@=v*``Tmv6Gp!hftXL$Fl3~yG6Q}A#AUP*N=amC1 z|GCb^*U|+K9QC93c>O9^W|&`bD9n0T;l%LJ-U+`D34j=x1+c;t8!ygoz)3L%kbZy|y^taKop;J1BT6RDC=)Lw z#sS;(ONsH<#;@lc;9bg0UZHUZ{NR4Qkw~=SujIV>hOxN^-JKrat{aVb#sAz^F(5k{ zVI>_^_!XI5w=J?=_aW7aKO4@9KLNI#a2f*lh6J{qe=`DiUgKNtyiNq}Zj~V7jiNsE zZ^1@sxQI>7LJVkc!|c1?Qy{*!6DH$&q6vO&*DYy#Cxh~4`799^{ph?)Mclt#MAW}s zIY{@{D`MXS(I2mP2?71%$19ItL9aX_-f-tfB5+g624))dgya_f?N;~~gf1l06Og%4 zOf4+=S2_}N1py(t;Vq>w^E+&c^}&!_3t;)7M?o0yz5CB6{lCTgl^}DXl^~v%0q|FX zxPNWZGZ;C%GXb?A)1ljmhGB5$N5A3jc?Y!b1===YaQCPMWhxi;DU>qxDmWxzQQ(I= zysI|pQ&PR#pz~(wQP86ARbc(+@NO5d|DvR43UhepdI&779o}i|JG`T#lxFUGlh>f! ztDwcuqwp`UM~&^Tbf3Wq%4b1#y0BD7v%(DRCsC0rN z;}CBBalf^^t=J@s6Gg}0WNJYwH0qNesOXdUmfs_h#?&Lh8sUKbN7?~9Bhmr8m!{?I zoOsLIH5|R5ODesf2M0{)04aZO)Dt*{D+la@9s!5YXwgVPy`VWFy&!&4=3ZSuNAu|g z)dD&g(4Bxr2eR3KY(zk_Vl#`fufU7qZcw3xcnQCS*9&5Hz(}^jhsikD zEpN4IlH$50-X?EQspBLHiOZ7=r^5e$C6e{QKa;1hdU%_-Jl)af!=B;2{W@S3T^pMB zPp7#OFuyB~_rCQTUQ7a?#I)^r?|^@B*w8LvjJuKp8c}oKuhMGSGi`f13}642YYkQyr>NrmJ>^BqtH9jJl|;(^qFc~ZJ`NZ&(hGyz!$ zqN`9i*StKtY7lNon32|(`hwjRXwA>xG0$2NFof2(8c0a0zR~>X6N8c5S3IHmVCQLo zK~7YYhJWVN#oUn$mHD~@XJ;VW-z~~p(2#>e_)9Z}9~@KPlFO-eGrLnTaudg1>@0>N zNV#)}b*p+~tJ=in&^I#}=agI_KAAxqd!|zp6&0CknkA@gBIY?-D z@MC^d29ZA5EEN+1IjTuY+M>BnzA752aE;{Wyhh(?tW$ z08N*zeYQs#aJ|S7Sbk%RNxeWO`AZOJsd6$H*gW~ks9=0?U zvEhn!uhak0RKx)U=Y?Re4&g6g)VX|Rua(iDBOX35Gl{JH8!=T83Ac5?h~$_Iqwvfc z6Ce?&n|oBAmDY3|_;8-1$|lQSrXGdkm^Z_sLWSY-Kr7oHpx7lI{!x=n);qg`fcB5W z`=8<@|E}bc!r-%^WVG|W|M{zuP*kWyA|w7^lI_UXD4fyO2$e0XYLldvNFXss^`m6? zmn6XCRAiFGBUT2~VtD_v|1%K469EKdv_z?mMr*tENwuec&yC}2GP(NttZFj+*1Un$y!kR05-pTV2^ zSWRK52O)$U>6eN)!u=qeEiNH8QgpSFQkWB(iO^N`T|Ckg8~ze>%B1qzr}4Q{Il6!-_@N8pobhbj%yOC4B8 z*ok!K3guBv_FKUXn2%{m6crA|ZmsRBt1nMofw6x#!B0Z0znox`^usV40pOkjn7rLi zKJ|JFp8DYF`@sG5RpqQ(imd?O*VQWv=ycwN;OH(lSsQ;Fimzd@s+$(L)qx2^w z&!vaTlLs|u#$L+xeZzkE(`0$k_5Bg_=4ldD>RQ{&Nd}7lXb59AeZ2BMTuM8guaYHi zgRbItjb<@muCi8NyAex86SOgmo&O$Zxgp7zJ2`OE)oCPZYgDNcTs{CzuHS4Y3mO{u zG5bBnKl{|p^!>3QFRgv3h$duiV$97WWdLiQ>Rq|~tu76$a8tTx`=&HFHs|&8(9&ns z9eO~rT~%pcUTy!(Z;sV*xxcQm0Cohy{y>#5z z$whkEVk_io6}>O+gESg0=H+yOio-+?j^cQ4CZ(e|j=^oUFOo8IE?CS_R-FPol{FK^ z@MB|^&GXNF@36>c(ZB~@eBI;)mbI2-=T^zi=ZhX{Ud;*1<(lf{EVTyBM#IOC%Ps<@ zza&e%Qx0~0ii!T%Ghy`V zs3}05<(TYZhNsSQ22^s5%By5j_y*U;9qX=}XIB&A)a=LIzuX#kWj)l7-@IY)?qoZ0=a)GzjtBmXb@HaZOzAA!k%@v*m29SvIk1 za;Xi^_1?ItpONbSOx|+h8aXw+x%YZ^)oxLa(RPRbzDX+cB$&+nl-`q1Q1hR5T-w5} zQ*>|BVtn}Vg-*YF5fW|FWF^^l_Cdync0bBL^vq{Xr~UROPojEukGC*Jy1cK$$HMDw zrhKzQU8J*S3dVo$kCsVKJM_U(Nct{_@yB(;n}}z*gqSxxqkw zVOBBRhnqLOH}sA6*kw)T1IV%QD%bn(=RBD*S`(vUhAo2tT6UvOhPq$Tc;IXKdYgX>Hlyrfq@e>!JkhIij`yP7B@$`%bjbAU)ni9c2Xtq#jL^a9WCX z3qJM-ft~4iYZ~c81|c*lJ~lEa8FsQC?^Y6y*r$`So^xo6cowc?+sr)XPK+}$mewzH zG?>&5Sp7yDf;Hv+N(lsAboK(@eaZDiJH?3I`K{s2!UD@lcewvlzQM_Gc<*|{x7TyX zcf=9Tn#!VK73L>9>SL&S@|fVXo|^Q~8ThZA-nw%$6`q$Io4ZV!Xnqd%F$RfT1!*ua zmnFDO&Ic)eYpP6s4x8URo~lQ^_~dQ1=xTBu?|>#i|FDCk98$`@{AiSq!fr339?-+1 z#aS^|q3SW4)2(}4n`3tcIkBM7Zn8HX*<-Nbx^I5#voKNm||t964a?>`*w_T1i2) zIFPD;1d_&6msv47wVO#n?({LJ27O$`V^V<1QD4TrZbiJT&*zjP= z57^aW*90vX)xh1A&w8QV%V8v4;Q@ymPQ=xHmbQwAKmj+?voUMv_ZAEr6l`9L^^n6e zS4uskYrx|e97fVi-wS%n5_I^B2D>^?323n&?m+tRryE3&{uV3}7I27_^%k5j0!+sM zOxFNJVu6UO;WYsxN2Z$MD{xN&Kn(!DB9NE`oY@aXcZ=&b;b*Ca2}D^;B(i);>Anb;G3=M2K@ru4Gq*zW0SzvjY1FO zvIB>(50jmMs|dV6_}vZCvI#xJN`wakuNs^P|1-{XgStJ7R^C*@&wtwCaUlI=zyJo4 z?Rz*XlC$>K$2~}fySBl@O8*|$=MkQTo^HBfY*MjNmKTnLW4nAgS}(5cy2! zMocQHy3ztJa~Fl0K%wmrdQPO9F_8ecTJAmySSy?CCAEk zvLRi#J+gZh053`H$nhpLutJ^2(sB%PqV(<;@a&yGhw2s|5-Aik!tHkQ` z1`$vE8cM9Wupq6rcKQx2L#|4;o+7ip_KlH^wrf;J+ALR1>?)WC-p(OGfBY)7T0T3^ zKFc?FAcG+`b0o=)9)5X?i z-Nv*Rdh}~szJc{ej@v&+E?Rm%joeWp61V1C5y4b%MD4W36n))2QRqY7mrm91exh!- zJs4_Xkd2yQ=j5W14QG;7+vex{ceL##Nq*=kJKIC2f&EN97u0X}YyXX;esLr#{hQyWW9PyWZ(yV)kUQ zA#0~RpRep7=kBNYBt)K=b*FMuhzERX!EI2zfVP|(QT?tGVSOc){AyajEF0W*Qj?e8 zTptl);D>my)GMZcyZ!g?a`@nOhc5})Dah10LQxrP&Odvtk8z;04OC^rIA}}q@r%$= z1|xo)y~Y5tibeHVH1n!c%ap+!xTS}m-ye7qB7p>e;)>gO)$1Gi)l!bi;9CCK#_+&4 zjLrB(>c^s?LA$HWAzy~qTjv||DC|C%$*X6+z?w=_|Hb9+Qcqycz$)PvoaX$QF_P%^a$ji1@YDS z{P?R3W=8_~2nV;3^@!-#sO49yy)QlV_&D2G3LKtB{JtBo-}SxteIej5>FanUKxPl| zz#&N>ru$-LswGJkpN@#>8*b!P%K??`dCVS|f&P{xv!HMk&@(dyvCH7a@4^J2M<-=)I*|Mg@xZea1+*#!T*U&a8T%Sx zeW|jcD_)?Z<@Sg-mHwO=IfTd$z^(9Z~Av2wqjX)rLrN3)0Q_~XQgVEHZvphjz6^(e4&Ewc1ZV&IU0 zKOd=%Lszv1R$omI_^=L>HMmO;selHEbrxu*sJW0Rn>Wi36?i<)pd|y}9UE!@Q z?)>x+(e!)W+(+pzzhlj$+XHyg;u~*uYN0(z*Sju;MS35vd(`ty!OiygQ-@F_nx_uK zyHJ0DtZb}t(}`S%!OwFJ?Y5~-y*F)?T}DMtP6};$aRPF@oVqEESMED5PjBz#t?hSg zzbUmkq@_Ad)F*oj7UH|*i(vLi#Ja8wWHO;^P8_bx{qzVa=m>4`Grw&+K%hZ^!4$-7 z#R5WLrJ#SOp<+UJPap7e>$p7Op(=78ItUqqsJVlJ9U#TOuwquyP$sYAiTiuGof1Ft zgQe{7P&bFrO*e^Jtr+#FxnqKDAiK2=(JR7u)$y4_y_`^3P9+@M>KS?%AF;5PG{M4D&M5B)>pHcvzo))mG!|?#nXystpzd%Ud9O z9|L<0l~N!}WQ95!kDUs@D1k>9!Y(gsyrJ7NTd=;pQAMw_2kkq*$4a@TM&-E`zWJh2 zjNwO%=BC%p&-cIAWKg~q-)3OcTy;Yq`B~qX%vA`+*K>@HmJBNUXHl#9XECez)6^XP znk1n9>_RmXF{cUqO{}x}KGHhwA8Sf|VX?+21 zvymd2K7OTNXeD}5=lpy`(PVCDVqzix5#%qY+P=FwA6atDVD8fSUCp0v7@gtIi{sc1 zP%go%A@=vC`Tl8@{)m2|vgwQ9JT2_nN^h z&F$bp4Z-Px+2v|mw(Y>P{O(qb60saf%}z?C)wkbnjjgR$$C%$0y6$I>a=b_9<&RTG zAN6Z%FaKEMFUfCxc7|Ea*sN|JQW^Q}W>J zcr&I$4GxHg=&;&E>tW9CDOMxb7Y~7qsav_mGN{0z(GViG$Ot`p3zc}A0mhF`9cdC0 z)r4~X!yr>+)>c@@>GLiC};fQ}upZv-;vb z`f~P(siQUVmgMe7taGax{rYBOSjx(AF!wBZMP}=lCTXahz9PfP-UPm1NBSqnm8z7= z&dQbFs$;30*?FIzC3ja7yHqmD$UdGni+Q*{V0C&NSIMHveQg{_$JQIg2(}*W(`~_U ztDDWMWY8N_le&vZE&q`1Y*(s}gEn%yHN$d>Py##&fd?d3!UNKHB+T?43O(&|&{xJc zF;&Lr+bdwc`&f!vA*qPw+UBsABetoZKeQ|f;Ned?D7k?L)YMEgDLqB zdNsI8ZUdt&H%|XP_DVY0r;&@DU|t#z3780?wJO=t+(z89LVVfCLssgA)HTV@JEuIi z9=&(YH)6wJF0fPj0oXI&9d!4J-Z1~-X-^0hI@+}X>qpnV7HzD2lmv~U`x)DaQ2Fuy zL*2Yf#-Z#wx3Ynze1Jj(Eb}+EUqfeyV-c8oW4i=eQqD}Tjh{$$qa2#6f9tH&WGg>i zlXdo=$2FRPYCTV_(etm4!L!Bd?es15uskj`_{#Qmxa-CJbJ5EsUTaiT%kPeoTe_|- zR^L7DY}U3HJ%0?|)R&iRhHTvTEPH2;D$X~Vde6BwMgIVou6lcc@;9EhJ{rWSA>?%`Q;kqiI z>E2mW-O1W4&)OtZ=$fg`=;F7_xBhcoF@JD1(A#0*mCY}W!jRcqh9KLV$xPW4Z83bs zO)pZV`kR!!GM}cHx{&b1xlZ!*%FX=K%Uy1hU|(;kM%WORv44Iz(Nv#WSPO2yR8uLF zxbOCeZaE%u%3P86Y&v}J1~)43Q)mI`-zXQf=;&nLxY+#Z>}s#FK&gpHpdypCptC$C z%7!wEadBcWL>=D*^G~T`74uuiLj(0~8*l170bJJKS_|8k7y-nO(IM21-MC0cguf%{Cq-l`* zv(wt1LY|aO#OM3vcgn_FucwA!sbKomhOY*`EtmtadZGasb*s>J7t8(G6Tm(5D;lt8l<~xfo118e1Grr zd_M0Vd+w9h9kX-o*_m@@{OnZ6u{KYi(z}YtNzA&*%k$_{7EXJ6jP6M5jP97-_bLni z*my;KNLyw6sva(pDUBrDx2@DD>cc0EtpHANq}?Lpx`!)Ja?*cDOXW#nboOL`G|qbHWZ*FU)w2G*+j^PO z6(2Wj0&WA$l%E&}eC~<^vco6g#CGUgo)n$<{AF^PvIhYxLO8s!YoXg~>e~{TBN=@h z!Hff*(S00Ai~}3LG2Y=E-OdITLY)-LLSvKZW(2+O@%k&`-Vg#g2PDy9I=IHs<#!^`FL5#7a&m_(vH3k zH|ln#2Ixm)BYfI$&G|>2X{oy$Rb{ATLFTh8!AQUKlsg5k*iIcg37X$6%nleosr;QEiMhTJezke&DX$e&Bf9-EP;9^M)gMeonKu z{7Be8GSoZ`V0eB1C$SDq8*wB4zITS*zuV@x`+;GgyPM8_pem4}SZImrw!KnQT-e*U zIItUK!&~@%PW^ZM2HS7>ISt?Od)f&AN5j2Zdqcha6i|VUXksJBa2?FRr7n27^M;{< zUWYkEg$RToM(E_|hy}Gr{`?ug8#)mc$D_LlEqT!h{=9^JfEd8WJQ@j|RS3$yoVS1C zi(2a`g+n(qp81VIq>r>#VQTS^SaVFq;XhYf8=T!Vy0_8@clYe&SD^+;e8YOJ7qV>? zk0$h1`jAp%7Q{=MPBchzG| zXZ9aYWDk;DUk0 z&Wd8>x_<8%sqws_{O`V3=IJHKiC96tx^-XMbooN+de{*4-k?sD3C%k?-!?_M>NoLE z`RfT`-0!|Voihkg3*BZ^a4tI_ze#hKoczPNV*goC$Nq_r**PyV_v_wRZa`K^54R4V z&}9%`9?JLR^YIQ+cPDIfbd(J_Ud22%YC2LnLV;VP{asu;&)@`pYMhhb4&R@BGhy`d ze2aDHC-vjvQH!jtigKszT3*{47fY_B-EG&qf8I9{GBN~qhkWr#S?@UhI#gx2mw8N{ z*;`cMJq)>*3Wy8h*>Ne_eRX@@zJ4w{XYE6-$&yBP9Ee zSU+LAAi-5pzLEdS1k+YRv`f@*T$!fuaIKPjqcI@bC)n6k4nT5We(Jf%V=-!X$=ync zcHtU{E3+0J=2MYxEZa~R=cn7a$Y(KH0*&d>E~TK+9W<)RH|~N)anNYZ(sqM?E@)h~ z@?-Qp;*?v=DL-G3wBhFaph&qYA9}z;z$nPtupQ(

c@`-Fm1Brv8I7!wH)MgW3S6v#}2%>H#|>AEHwWVWtr zt}{p1H8G_O0ls#usIF%90Dn7H!EPWw*|BSWD_KaIrcui;-GJ5sr|*t_5&1s`3q14F)!kP zV&0guGy zpH6pgc~{3Lv}ElnvTSSHI5N4}+L~QDIozZp&e4I|J~l_L#%2Dd$l1diz(w-cZCg?r z2vJgrP1)TS{bM^RN=JD&6Ia*Hp1a_Vef8P&)QYdgiPAtt?LfNCiE4%|`6^-cMTf8H zafU_B#ivJd5LRC)7Cm{Czu#C19W8UDq{F}+Ny?j1bjn#|!oE9Z+dhlwgPog>Ta7!m z3!Zs#JD+04@qP!H8P?Z`A$J$L?7^Rg%F|=aFvW~>9UltmY92cNn2B}o>$A}f6{Z)w zBNN4MYX-d!une1PeAPM57j-Z>S8aW2PJDk#D_2!0??j<^^2(P?RcI&BoXl+uJcdrn;B1VSXkgzX*Yc(7-$%gu(}A!aL?8^Bt>hNTS(O zB+)F9j2P^{j2M4866gkRC>%}`3QI+7+{8UN>%~2A#KDrF;0%R7!?~ef!-(m!2cHj; z=sjvlbp1*Iih(x(HQmREnFb(+K*B+phd&Ud24q31BswRG5%Z0z1LC6zg}d{H!f^17 z(2N)>nova6{f_zZTZ?;kK*w9`14P6s-c14kUc!jU1eS;pRB*idh?>SFfv1bfN(M&Z z2uh$SbA-O#=sUV6BY{4fmO#U*0#Mcy66gX0N%Y#31Uf`t5{-BofT9_qmHc?C1JQ85 z1J0bLIf!-8b(QxIdokuxSrV^fz7uZ|u6pCT?kmo9T^oJ}!&!2`y-?UL4=yJO!~=Y+ zX1amrpIVbpI%n}NZy|D-z##`Kb{1GKx zp;>`@5b~zrYi}qF%a{aCgCik?BZ6znPsVubU?BJg<41zUau6b>2@b))>+Z3)4zkR# z53Ju@S+igyw%Sg(A0^zmH#v0|#aommcFeUG^cu^fqkxeRlZO|(iqp}5V2=??SGOYg1wv`pfNMM=OW%{I7&!l zC2d4Ush&)yyFO$f-j5)QW8Ra=xN+;M>h+9QFh;Y*_=94fV9S3Syvz!X|J&g8t~lAU zmM^i~ZJ013oj}{lXuMdpK{eNiuE@Z}zM=6vRjXZ(RRpggzP*Y51D8XKmhH|5=2`Jr z8{4C0;jWU#1pBJj?!A{@Eqr_AVM&GKSs4ns`nebvAJ#Yv$wue5BynbC7rdEeG6Al! z)ZXw{n{})E#q%sePAc9n&Z}|4AdGWIvvDx0{8` zCT#AHCrg~_h-*w$U{%4A!fZ98rh~JPo@1IA*ybf5b*PlZTtBeQ;aMQp^Q&1WUhOqm zoD?4$f+V610aGxJQFDj3`}Dx?2#2)_xX~&)Z%!e>dnXeUVOaCBJqz=eMB z{PZC!$2YH{h~vY&O29pFmry(7L5a=ryy6-7q5Zp$hP*;%|I|6Y)r8Yh#j?p}It%E{ zfQ>0)T4r{`x61{kb32)R-%dF&a~KbWbe1q{m=$sd?>vXd$1uHyLiv)|&&%Ne=MkFA4!?(}%MiUl>%aRO@Tz?Rd*$LsZe8#JRl!M+0y{I7+Xnf~ zGV)D?M%J=V>?@4EmzjQC`n?({rN@Gd>`i|Y7q8|(JG|L9WjCVmlTU{LxS=kVlhQCPs2ZBfLV zZF0Fv@`73h-hVZxpU6~g=dFIe+xfDcr73F3(x&B>$FZ?nI+?#t7Os<}>ty9RSp#7Gbr0Bn^veb8L@ty1 zl)wEvW(Lu9dex0RYV)?^^Xm9mDtERuVdGMbWf*0(K4Eh#Z6~C(d83Yc_K|t#1NZ z1rR;YpY)u$>c#KTyit6(S;2bHj9uzo!OB8d!5ZkI7cU9$%7mXN>S90XA=+LWO2t#I zs>I=pA7a5SjrkkS$``L6m#g}mGk#P_FU~}FB) z@$tUCX2AWPXmxYf2Rat)*j^;932$T@D}_pX?O1E6#PH^;gi41hm|-B)#ip{YU8^-# z?cs?odU5BTJo6LbkHn0-P2yOw%! zzHEAN9M4M>Bm?dX$p8~10`5bF zD_OEK;8z^*qaJ{B0#I!LF72upCjvk@fD!kB5lldS0kRCpWKiG7su$N{p%hm7ZR{?iW{s zt&I#;+_P<_Tk{5`As&R|QipTH(ix5KvcH(07|U|i%tcr$`$kiDRuq}%wx*>pq{l(EQgc4P=6t_jut}=i=PE%%MD(QoxbH(lfKe1j}1!+jpGW4IXskU zQtQ;=9Ns1TPpq=VMO(@r2HVGevyLK__uz_^`~4~GnQD)QU1E_Y3q%YrdS>kle2c>fFL=k8Hq$cigqBX}~BCqz?=kCiI^%ru+7$Fm^wLFPBM zq>T9Z0E$pWT#;OhWQ%7)-VKQwqIy1TzQM=eTOtJc_*A{PdW~MvQ9M>Lv><^d>kh}@ z?Dgk?dorRf7hLzWJ@LZVvrSBw`9fj8*p}LgsD~GArM7d0RpxJSv4vm&)Z& zL?t_z{fun=l{2Z!qn9`7zCR2g@>KpRy#}f;zDio)SH?YI8O&A#MX#@tyZ`)M^!($Y=hrPWq{3_yj&Ok07 z;h=IdjO5y7FOAt^vr;`t;18-H7a(#?;`bcAFW}zFX*o97$)yslXxPs{R(bSAL#3^QMlSTE&tKc*#$dmh<+UxXPkxH54 zyZB>QmunGN^!6tb^LCYifeG}Z%ReYwFpYBubIAq;Ew@kl*@tVnsm625rY&~Pg&KzM zGQgpeKDkPndq{|?WN~24x>q1M`oayuj zlnC_)yt{qy{Ft}mTr9*6sH-wD!d01b)K&CwI{ldB;F5x@ppt^*(2_1SDddQ%407oS z(^gUM>OJSUlXd2*Omi?8QxzHV=4n+XAt*Bdlm>vp1yD|a@-LQRI9YKg&B5j?c&aR0 zRM9{hvuu5N%(S%;1oCd6j+J@qnbbBZZAkY>-W}$cH^MT;*+g;f8iC!@#nQ{Zd6LVe zNT$Oi8xl`P$!*fVA>EfU0o@T}jE7drB%b9FB%bE1q0J9SnR#9YbbG4@bsr@$9vXn^ z6E+L69X<;&W8o@#RcWMVOy9e{H_Wl4gcf2UpxP6dr~*ul0#XzNi3wkSKn}=TKuQ65 z6JK8~JhX&eUkb@9BZK7C15#ER8E6-cq_+^Gpwm~gCe&AB2U2qCacQC267LSw`45DZh-@N z?En=pB{;amx*i*5kbwj-NHYtu+pZR38{o@f;ZeiS zvLzWTZ~NlEg3kX6Zg~yr?;Nx~B+aiqfw%=et;02s6W385?c^hn)W?_s$5Z%0ktlSe zxCw>CFuHl{r*1&@W8NusVj^u-j&9?$%VOD*@^cv$%>PIe@jpb22c8F=!}ZYJ z$xBO_|1fIebT{GEPy##;168j>lf=eNe<$X^Vhtnlr=scEQ2G~0*xBvHljO&|Yf#Hy zs_9@fK6td#czuWYWib3rS=9g`l@D>@+e#g~*TshlcY~hGH!7?vQ2v44ckVd3tnmB5zU4=l?BrHvS`|24C?} z;MtW5^qcwL>NLL@%aF#I73uz&&qu=U|1EXiW7?gXF`qe!v<(+GrExb`E&Jd+MYN*x z%WLIyf8|fi0>dMB+_Ivw}VxT6=9b}G?VwB));_kSgq zJG4_#cphrcm9%TCl=Xp2?zI|RQ_`h#1L{RXrAeENueE9CcCxS6I@^?9g}aZID%o>M zmpa?Q)-2MBk+bn_V}?h#1HOXE#W2GKn%tL*nbBF@0+H0+LLqYdQNnKjG7kurjZ%qU zQICIEFe<+Fs7&B$?4fn0`V0}DM%pe)@x6&|3BRG2)I!44;ehIV*^3yLMZ-w(elbd? z49CeW<+9I5l5;sK!fWa}xn9D5(yiODYWr45Ur%ql+B7w5b#nSDo^p4gHa9lZHGIo5=s-W`q z+f}JnU3HVPv&EOS14Bbf6Qk!g^D4fH``MaWM**iw2yS&7h*g#9A5uT+frn<;Bm3`i zw`g^k2V57qb_E1`=>^2UKZ&7f{uargQ4vRznt#A!$+H~XxxE}xP~c>y=T9wW+xq-h zZ+>)S+P4ltE=_akZ;2D%UhwtjRFl=i6T!_!tuPZ?4f!k{-QQ0_SGrak-!H{Wp$@1l zUrXh(^Yjh3!; zci+V4e6D2|21$yFbo`1Lb%jxf<2#&>d9tR%4{H$|qYYqXU#pz)xM`NYeB76(yu7A1 zR=-C7#!TV1)Jjph@afW2JxU0-KTE%Fp>D5bq3(Xa#`MSc`_+Xyc4=ba6fbX(cSs;> zlqHe5PTi?GkGoUFbz}MZG-CN$3K(s}L0<7rRsJ0KVD3~sj=uVw7 zWep81DdLt35baKN;n%>CLb`-T-3yE5Gcun~W+to3e~oKYFLKXiwoErxj7xXW_0m6} zY?|s$qeB9y=h`l!xt{CbHA6R`1$+p?@}HX+V+AL+L@wFaX6ED zIT?qNgo^t%CXtMoF)d^oKlK^ zId%VgBk0rC?v&Eo=Jd+r0a1eB10r(I2SnIH4~R%b2||cy2|^_3gWGSM9zF8<<%IX@ zjUcKzOFk}!=R1lr})9I0psbD&&uURn_w6k^&uX;)w?fjJ1iR^{!>jy-F_^Hf1 z1h><1R7f8LW2++b6$si*Gw!rA0Yg3}eC2rF?(|b}@Lsm5;6K*4;iFG!$tKg8`3XW4 zfKpjpFd`ryumAVmYZ#&b!DIl{W2S;WzuTN1shbKm1IxR0O$AHT-~0!G`k!jQ2SlF0 z{(9gK4e-Yt_;VLXUm(9cCI}h2LlANfq&ScqK%N5mh`-=IcCfVd{or;LT>aOYK$_wP zOS1z@2#NI5X^7jIVgeUl{cd-f=x%cw1kt(+&_n<#2Z+*5(A@_&I8f!&!c$rQg^HJ- z7D)1bIw@$H3cBdL5scJ`>*UjZBbcJ~fM^9M(>pzS`zc_7)^QKf-QkhS;^GcXH@<1Z~~nB}lXD}>QOdb<#83p%*$!#HB- zd^oS>*~1`WcSgk7zgqH6C`PQGT3j6cJ*8UcC3;uN(E&=ePTWhrf}NJ(Jk9s^R@-L& zVbJtukUzkwx{JbGCEzKZ6c;%eDKnL#%yxaL#@@-8J%YMDj54KRqc4f6n4e9>Z{azM=K3BMuW5 zhu{vHlIN4v(g@2-vw_ls*~HZBf)j(j)-l`pM%{8vw>0Cm%P6l|9^H)-H{s(8N3lWy zk1N7XsPnT@JA2aOsNM0;zen~o{%+_iC79-X&Tk=4w4!U|tgj7!<#0y)YInc57$ z`)%}td;w_FO>BQ;$}58F{z+1K`miAv1QUC^K;Lq*O?=ERRK8_%sO+`BKdYpGXVt)_?T`xqwcF@8~IzG?lcG%B9Cjzb~niPuz(ySK0U)qFpjMpOUU*NuM6V&z*N`TB=ZWfWRVG4;I+!`Ozl z{h`O)!oGYUN$n26^^!HPZhn#uzwv>J!suS(YLHE@s$Hl+>Evxj6nvx^T;dL(SpX-V)P(e@0LGJrNQj%MPjIcpLQEQo* zZ741xF}&c1g1&#bqzd<{eX>5y*GH5`q4}@>vV8yWEN1;rr|7pQEJH~VUNT19o20%C zDf0JL&FF_x0wuMs<(CLgnRnc)q@|!?^tvJqRJ;cjeV{^X4OC=+3Qz7u(jBU>3yHz3 z2v1fl08aJKj9w9o@<@T`M~Zy>&rZ>JT<+Dl7ZraJM4g_+um*r`u(D(9aI^%e;RUV-XH?YlAKg z*Ig(CcvuZ*`dBv>ndy5dSri{o9w_{Np!f%j`tTc!dI3h2fKjm6}AK{rUk&8 z*Il|m7g5k<`?^aEP!E?-U<44is1)z`$4Y}1RuE;2I50R04E9_GDp|qchah?yJz%gh z=wfl*B>{BFc-_HrZXqB^_D5DUUR^F;gL^S9{ds)ub^B+~UV06&0wA6M2&-#|-p=MU zO$Xl%UCB0-{OIPd2AzVN(?Qg;KW)8^6{s^3zVPLBT*9GZ47tzbxf+ux>2C}?Zv*mu zNtqS$Xdr^vH(_n03ruoXYjT~ihvL~oDgM%tzL@tJJ(d5^GW099G`aNq9DMa%Cdb4*3&t4!6hK~3lU9BKCaY>tf zzGfl}kKojU<|AQDab6G9dkVjMm+t~@+zr{$90=;Q1do$Kfn)1$aqAzabKyC3r;}d~XSQbJv6R17SHfB>gQ=ngHWX2`R^7bjKk9 zG=lOFQWm&4U>e^SJ_gN%7v40GbW0=zY&w&|zzuHDhZwhS{e%YJnj?ej4`6?%QI04$ z&ob1+y)U>;ZG0OOA{#=5K6{P{xgH6?8K7~IwxxYr4>}YBJ)Oh;d@qV&!5%GuguIv_ z6Q_?|q(Ij`j9K{CKSc5YY#0ERK*v%T^Gd~7YR_vcS#nqiG2T>APIDCW``i&YeopN{ z>w4Xk^DPIcpn|5oXrQQb1ZZm7{`Y?RTG>2*V=3@<5gt2U3gPi0;WORfeZChlZ_Lqa zJYS*am-C~C;p6bvXyJLHpdrcMXf?+rcu^UI*WiOGc!pW}ghtFuiW2FRFSvFMLJL@Z1S75^ z_!|{Nc?dVc@}TwlFZ5y1ZvrQ9NK%py<{v-@QW(U}t(f+*1=oM@2M6;hSi44Op4pSE zw%_PI1P=6m(Ez;tjM_Nl0(K5>zjj%=B?^+e8n}BtFNGT;4Xph4e_Pr?oc?QJu*CWw z;{R*;e`DJ3(1(~@AISZ6Msut}VE!)<_)(}i7Q*M>7hJq{OkU(jc_W8r!{8gW zJi%n!upYzLbB`6}(8}T%s&@^?49SVRVmoj>rxb7k-G|~^`*ho&r+lNKP?U$>ZB`lm zrq4E)48d)!^ED9D_aTU<4+C#lbQGoM_A7hh$&ajbk>;9xR`AJu(j2zM@V9txVrcvJ zWpu7c;z+^nv#fKg$PaG1;pa0*y}YNe->+-FeKPZ69o%-#%F=jP9^SAG9-G~edqe)* zuhr{Q|7W`UV>w=(a939?ow}{hCSD`kTkkiuXNqb4n(8^UjfjQ6mzv~h!Ts{|J&z^x zev8if^d{P^AZwmD_c39AHh7P}I%UuDS=gU+wNd5v`pM&QzHuJ%vT+#I>RYG6=v_ok zUCu(j$+29qnh-V5rIf&0-3x(+Ey9+Kuisa#mQEkle}Y@LwybUap{o_=zg?sE4d))c zGtPyL(mzZLEmw4;hJnNDowv2l`Sdjw{y8VqX>(SqtRanGBk0pFVue^Q`Ndv5?4srW z8bD9~w40XnFp6IP*&o^!6XURp#@2?NFXf-z_R{(4p+&L0!0NJ`rC`d$_45L{40CxUG&nkMR`6dx~vBpZK{1Q*yx`2)gAIH5PG=Ip!B!DcJ;^i4W*gX&ehDWi$^V) zdQ0n9lY(M?7wysFTV7kfAu4Wql(oo9*9VL1S;gzv=xGtMTbZNe{roNjcOV&tV{H*< zvmA1zMGE*)?5~VJgDam_Mb65#gZtdlLL6V0L@LbL(KlFxn?=m;DWqOjh(GRLd5|Hj zuU2-E$iik7n)$)5j)m`QOJ<{eBulB8wAfCbUe}Ly*$b2`$LjG-xTTasq>#CGcD&G| zLA$IyoriK-&#WySbgFFWc-M}N%9ihGyDiH#9M4?M3m^TTJKIcyIPzVl30kW43C?62mDe?}EBgyrFzy`y_M zj|?)oYpg5vjoIs?ff4DC@^#tHZuDWky{dIv|}pYaJvE^fdNiC9X!o7j?OjG+WL zU;G(gkmTM9n2?C2!3)5a{Jc z5$6_O9G2AE2a0wdNoW~mZsRRrN%?#vVPurS!(+NB75R~bl~D#C&-A8L`UGpd#Itw> zEGDf;RvigE3YIVml3%z%9#~9oBtjpMNZkrbsl;cJ#FiTUNW#z95k6TQaKZymS43)eaLtXn%+E$6MxR*5TxFfMiNcJ~aHEk8&4fhWdDhhaUFwqb>8XCLlzANv zg9SB;T>nMh`avzLL!rFwjthf^TrV%rE&bAfl}Kat*>Y{P?hj-A_wPQN7?h&CGKata zNZ#;knrG#FA7gBP^}LFNNY?Y1zKY15v;L(bQ*Ug^C@q<^PIib!lRcw;(5ljG{%oz+ zXW!2kmN#oI*!Ko$Xup@7?-k|>%p8{@-=2`R98}q-3e{&1FPCW>jQGncRljG@(1gEg zXjxOTS)A9nX|O(8!!U^LTV1F)pIa{&FQIZi;#HjC#`U(R#cPZ2s+ONu)74cng(6+p z5WlCa^waN1^y`8l<~ks)LaQ^1^jJ?`(i8hj5>GnEVz-URlu4h~XY4nt#1m zN69RQr--8@pMYaVO69A3gEQnjdc5;0*fCvRWsdVtH``KC5$9mSYX6uVZc9mws>(=w zG54sZnb$jxt`8@p9~rq^4ZmgTR5m7^YKZvGERav3WCK4R2iS~S)M`K1@hmK|`8u(- z_f&-dR$yV-Dy1@l#a6J#tf{&{B_^k1-iqj8(tn&`0jaUxgG@}wA(NPC zUy}GMSeynL@>4-0P1E`ko05V5W|6<0#fO`mea){?j7%vJ1-x!y40=T*zkmJ)ZDA|t=M*)pE z-eQyih$gos$^Pq?fPz9g&JrZTpn2xjtRN*c4MM$9V}kEpX!KpdqY-E7qFGPoS<01^EKwJvt#oQ3No!1G;O%)UgPC zDkrfxfH=T$Op{6Rjo2w&fS~D|t&c&WWyb4J)D@z62&x}WYCVkrv!2wt?Mj|)h8bP( zg(}|`@_jt!vYIA1FlqEY-`UX6>&}s3YPgQpNeX?D(&>_j+LxaBxSjp#WaaPQKG)ON z>sHW?JpPV*8V=nVe9Xx+&51aEF&X~p^zklk>byfi!FStu9s4Wk%*_Fq%qdfsrnxcE zWUyqW;mEBxqtL_A@CfF#SM!?e72=bD3f?VB);$!X)#EUIU0bL9$XRG<#GqZ}ayv?A zZ;YD9d&Hr?RJ9ND+))Cq=6^Z( za%TorfwWnBBPj>wdXz6vXpOyl2r@Qwyuq)q2HGom@jwrn%S+iXsqe4hAj7n&p1kLKjgO< zrcD~0Ib3beBH*7>$M7>#GGa8Gr^dy8jLdw(J%X9=Y92OfQ*a549B=3)YUYBzw8w~vcLE05R zpSPwMOMF(|szXW_1wvmrRisn=@!JM|VX&C;!oKb4gRHWKFEn{}B62C@v^ zQ_7_^??VtZF(&t{m7v0IUZ#3BKHb}5Ujn~t@SY@Do1xrCMa7nS%1)l~DLab^ed?;2 zDO@?7xng+d>AE&?#9xNYPp7ucYP9g@x7Ss>JpPhULv>!d%_~oUv=YMm5mHO(fDj9SUq%305kNBlrya>61Ov<# za|)O&2Qc6+DFjtK#K04c(2xb-1_1!T1SnJl05}6PIVU?I_G_-IjQEd_p=_i780kf1 z2wZ$p$sm}a4-7vM1?d95c?CLafnpL0dI!vG zqd$Nm02%rMZ3TdMwSjgKP_Ccogxr^H@zTHp8UQ`~>wqpp0o#y6^9o>aCJN3(6QqWQ zm_T^sMHiuYU_;g>@T5c$4LyxPL4Cjj4mS`6faMMXJwQ6ICdGk)mjIG_c>sw4OPsupgd{<<7dV5o(TM%~H#sqo`88Gj$49RL>4)vHFWiR^2IL@! z+!zzo9VQBh$LAAc1o*uIKQD?wll2pG)EEfS4^hXG%+21tg8!(*3;@A$IXvE4E3}%j z-Ag}hb@@*`CgTdR6M1=DBs1{EajI!x*Je$qzBU`|TAkNCejPVm^J8#{_v~xaUyJ&J zl9oZ8ez$((xy?H53fc-?ErIFBnBvTqs;U{C^Sn>=CIhsF_s?3~5VL9x#f=&}W5@wF zT{wn5=5YOidz2?freS_ZqoYd1!6@zUFC$LwLCvPptR~er_tj?e{p-9wM|*B27nv2q zhsWps{d<-Mb;<7#3iw*s)_666Y1J6Nb6L;Gav5%2J~pvuYLwwhD;L^+U`LF>WnO(t zUri#y&Rt@`@Kj>nfA{Crz=R=fm`nYP;P{-4bpY2}#nhIQR6N79FD=`f0gZM4Mzz!w zknarNzD#E?fs743B$U-;y5$TlI=^QY-twMN4?!H&h#fUH&5D3CA70rpYdZGcyNfVY z#LLgWWn?L5Ch2eTPKNbBmFq^+;2fV&53*FHti`*&tZsL{e7^%H%B5=?9l~B_W9>eZ z>Y_Ga-Z#^dnPRM559d3?6V5pllON6;tJcQ)s#olN zro>l9Urmp)olO<&Y`#eo=N%JgLB!dwh6-Mm$iL+0vA05s4E=51lH!$yaUQ6_$dt)D zszmsBXUlsafvN}yLV~H51>lQ5AkD$fX!5oWh~29Ls&@ycH{i4RK`zXER{0kX50Z0m z(z`@J?niu~-3+nak1+H0fXgI&V5FM=cfKVBPBkI^1LzE#Vz3noz>1U*Y?=!L(SwF( z4&VnIoZpo|J_C|#W)Z>$WOoH*u?LuFO#uZ*p-{OQIkaa>3BmsY{wf4CBo7d$cNQVt z=?*9joZXZYpd(;M6QBXA13VT85C_2Q!=D6#lE46odK6?07=&dO%us+S>|j6v;4OI| z&w$tEw&YOOA2>q*m~Zv70HOpurwlBN0*Z#q0|)Hkrg3jnhMMjZhoxB%!9us#jA*EQVJRn}>q3ffY{yQIgE7{o=qN;y@@0fc6~_Gc|yB>D>YCE!&2EM?tl~ zI568P0n7--yzRW6W&Hp%)&KDsR0Br7O8bxa^sb37;0Xv6_|bYuYBg`>ZHt1S(@?rs zhsxU+rz^P3vLhPq44OIh;U{Z)I8vc8|@{7J= z5yu=Ur0^F?K(SlKs69uS93H4;Bqn8MYEh$!@d=ykpn4~B@JhvFxMetEz*|tXyoT}l zv2512Z|-EdIJS?+Ma0`i8-8HFXM^KOYe7db=fmVnp(!-IP?L+%5w@2kXvqY#xer2o<#?Y$lDru(a zOm4nSDilnkt4ec>5#4CMN^tM{&M!;o@L&&*%FZoV=Ycg*gwm^aRr}}iDKy2T3{kE! znB!>P$B?n}G`7k2oz?meHS-3`+p+q6LkByz$JWK^Lrya9)n+x6np5yeTKbfsieIV^ zMc=H^GiiKw^ZcEE5!qBJBVSOfbopZDH;xWp+{L-)!&kVA^w{sS{C|u`x*+_2tVO!a z`~NtTn1M5z-H@UE7>Y}BaD(}wL{FEpk0>l4{C@b83c~-Iek24}C`Rel?AP?eZSWoJ2=@+#-?8wtpy$ zMCgVL{f)T063f^mLRd1!H{xg{mTyrxh-2h=#s1{U zL?M)@fo^)K)YJW!eo}~wlkGAXTxV@-L+n7ZGT(on!}2CEx{okco=^OYHFFr$#9o*f z36vsM#xC}Rj%fPom*Hl*CdIt%CsR`e88oV;bf`(0lv8N3N|6*=3!4F0IZZ z32FxX^>AfhTPy!9dnha2&~D|4;T^-mJ-29=mM!BvhZfuHl`E8G|5?nyd;1t3je3!o z2De+mBCw6r?Tc)4wv&)NScBh_tTN2KZ+d>h&7+>1$g(ocyBQlm0L&RV0=u!QN*Xo0%_1`)D)rk=`p(SIr=ip)lhvuo8-)A28Azgiperw@+ z%hQ@$j`veM`pFs*SrjPL`^&FRm;XXevpMTYO=skAikNii*lGs#)k@Y0WokZxOJeHg zA(CD!e=$%FUY{i})4dc)UnwRGsZs6Giz-5l8%XrGZEb1CQ4-;Fjxy^rD0LE_` z>i`CqA6M`mOQYWff;oTV!D1*4fKg$^#b#U0H)#7MPTd`Yfb*r%HeF>0Roz{Rbxew# z{N{Xp-Q99RvLBN*lz*20E???*@!Z!h z05>Tyo5N+r768f9X@KSuNU{g^Kbv4Qv~Rr+lA5C2iScT1W2!SBzmj(K@@AwsZ0VIk z^Mk@2_p&Ek10H}a50!~yY;Bm#M+V`z?4Us@hcoNk@lB?>`%%vPnnc4a;WMe%I8~nB zwEU>QlPG-z%tlMk&rieg-EJii5h1zMC!dI6}}-huh+dh?l6Kw_oibOR`dN*w&w7Deee z3?ylnnOJQAGkGOn^l$bXfp)H9+m+u}a`$IfE6;#>g9RX}X9Yo*8YQ8OgdJrPZU9e- z%;5{mg3tCN=H^yt{8?wn8aapO&Nq~X4C88|zMoYK{o3o2BYS=OmuX->^>c-f6&Pg^&1b{_gn2{ zY6Rdg)Tc3&zcR35b=P_bTy*xO?YEG~XP!G%N*uSNE29m$_F|>c#RA%Oxf1VTZul5}d|~zwUGDg%z|{Fg zLh;={%hDuk&ndN|y*Arbg`rBB1}P_%soF_`XXVpXGYf{L9&~oO6IsPaiDQoKg40cV zu$$O$ zAY$HONAt1fwn6=Q_ePpPfuL0D72nSft(WL_*Q@FLOYlZ%>pfSWQ{y#<(&#>d>*t$$ z^=0kz$Yxff`sh zs@m*zno4Quv*1yVo|+Ukd{Ty6^4*$*v9_rFW2uzOglgI;AE)$0dS!!FlWT;?MiqA2 z@UO$%EBA79a%(!NT0hyHL1J!??Ye$%?H_B^+jqntw)_aD22@!sK4QCg4I(ei^>eeQ z9GqC6Ok8g5#bA5!iyV4RnwXTr7zp`|vY?ZUZHN~O@^XS6m59EgO^JZK1Ldkq$67{y zz4=92sq1uZ1JS|^bzZFA6XnVhz~vtqw?ib$0gBFFQ_=zPU&Jh-l|g&kTcFZ3918l%%(#Yzx7 z9GCsECVZE?11CGgQWR2w6AGT?ONaNxvozCqWMixQ!;u~7LbD-zFJ`A7a|Q_(GUOGS ziXQq=d?J~-u&8E~+rYmkJeZI@v@iv<0y24SY=|Rzm=#(9tvoj_WUup9lK>+u3ax-W zrifFXn*j0yBdi9kKtP`R5rh{rY#gmXT%Ma4@&hyM46Q&`o*M$;#R?;Pnna6UpcaF> zsiewH4%x#BQ$R1!ljo*{IAVucp%<9Sb5lX~u){*p3#{b1X(Yyr-pO;*7OLLh+F~j! z1;2X6Y(R`xjHz%K{OUQgK|FG%lFr{A5<{Jo);xpt9?v z+ie1-3gjn?%X$XrIo!a+o^RHi%Y9;tc>vnLkNx0t9Tf_ujuLj>b%M_}=Fz@Ku68Im ztJX3dNGI|7yPY}Dd2B^Tq5cj<&Pcmo91;?%k;`0fe#ouYv9ai zq4L=YjDo!x=vxoHKAme@WndzSi0}VwdzT>}V8ARZuyG0WcU(lft-u9!C?+gK?ekx*?5rb#qERx- zOm>qS0e(5!@t(WxoH8SYeI^&r!Wr%UZQOs%k%{+2xNkon$?h{be`d#MH@p$>F^50i zbIZNt`ABk~$=NeiM!Wuvd*2*}c+U;@ch5(n5vL0}%VOs@hjh@vo5Me_EHnDDz4wx` z-D89GuZd{S8=C8_f6bTl)xOK&i1*xd|MGk!zHjdInFOO6Y*LCidXoHdWAZSGLy{KS)YR z%t=a32@G7>Aeo1q`GJo~L?rlNC-^=AZOgFtJw0z3tw8p6?Ad(rU&?sVVAOi_a|@|F zdp7#_G{DW~oAb=4YF{nR*8fRRQd<{_G10#(#iUUV49Ma zksY!bSgi8NduW<2uGT-!dak%Xz0CrCHA^&&-;?wkcFwKO&?n(_-^y}uZ>>qq19+-0 z2bsQKEPI!8p%Ufw^QmAL7k|sGYLS^4R7WBJpHDLJ=@b|Fdvr2_h@y1|#}wn|+ObK|&O{s;v)Pi2 zM?L!A&^hLsgeu%UwblFnig7SX%%lV?YUyKOB!+>8)k8#kp#QHZ2Rj=F2iTRdXs++! z;i-*32y%JAhVR1f-BPL(e+s|6d30A)$JuAfl&t z?uO1G&c-1@LiZ~4{3kkxB7VwCh^|MlrxYKqm)kdx`&h~F!nfNiXX#>lWhM09n7uY_7Zqy4SvzT zfNe#=O(>%B`9^h`56kcURbhsIKa5*AuRUN-QDrY`yi|D+yQp|%Mpl8{826-R%Al}P z7XDIFfOmc#?M_Lq`FdABQkCnYiaS-zt0WAIj3FP}d>j?D{0Jh9K;C;cSsJ!LDP~y~ zNIqGF4ncP*_BuIc^`SnHieN?!0x1_M_{I7F5iu(F&NV|9xJLPB>3sJi8A1`WmVL+tkp7+@EK_oYOtj=Jed` zam?(Yc}rdU!dUxCm2E9;OTJ@n;>|?_>-CxAbhq6%l9qf)#-tl_`GUjdLoxFF>YWA| zU|c5>K>5`ke(q^J@H<`FYDo8h&DNhaOEwsOl3m!!k)CdlVuj5NUCkYd)iW(zMtQ{` zQ}auXO-p-QW*B@$pF$nhMqr+%>wfY1$ok?ga)|@tx^~%e{dKzi@>%h?_enR~Y<=&m zUfPmNfMvfH7TZp%8p(_nma8^voiryg7SgKWi1JJ=4OtepkT)6QtN!X zuZ3<^#EeeL%^ZrY^g9@j780^y%79H0frde<#EgNjBoufyF5Dq8wa7_i{h}gEH(}zNF=7xnVW7hSwvzIUWAfWTjD_XCGE;S zSPubw_xOav9;*RIIR&G>15Dm?DA-%lwJ)gj5>&b%s<%+#)bkAhX9wOajL%$|$Tb}& z#3E30ag@w~L%LTT`JWDoQD?5N$tHA)uazIRM?UQ?_>CP%=JVZBFt0_#IX~F6th9?~ z9yN~gPTUjE31n{jSjaB@ZRXUi?M)d=qzo%CN(Ly)J{0~40)g4f~ zcsqsbe>QsQUL`lyb2g7^os!Y5HoKy)w3#zImY!L@EZgf4`TGRPB=>`Hw(-yU%bg0; ziJIYDYgel6ZL=vKqAcj}$?WH~y6Oe5yIX=05mhJC5!8A9$d_Z8w+<=SskOff%ZKP& zM&~xG2GU9lWs}AHZV4wFa9N{dsSm=?@ayh|wUTSKu)@sh^ zjaeXG=(e2409;P4RZy&`nF4Ar%<6x@iFz|i8bbwsd^Vkmy=t^0It>6 z<^7{;=BCr=!iP*P_nJ~TPA6Vm&3rIP*zVHg7I>7=&suj5$aVmXe85P9D1sbouID zeyTZ$A1O{aui>UH_*aQ8>F)D$eEsY~!2SFyc$9G_#SAHI1_5sPSq4n>B{_qGQrRfKomb=q7k4(IYR(qbW z%JIXT5P#5}IT#1DuRQ)Ky42ENak>9b^Els3g>IFpi0jdPe41NBi!Ap&gY!S5%9L&u zXck$cf&Lc?m#0J)N4iIgK#s%g0mI-)?70?O5>!`3fX20^hiXUN6X94G`es_Sw8UY+ zkZOh{R^*5OKdu6sN&ozcKB#Dc}3o&_Vdoc zQh_XLr^fXGL+}p;FL~8^t3{g@#Em0mW4hWrQ2gdABb9VSkU2glfd@y1JsuWiGoHeS zwhxuo7UVvVh^)mI%zhrYA>4!6OQhN#Yo%YM0txT{f^Sb?HsAOezY1I@M06~glYee5 zV~&A(vOf_Q?<*I8al=@ySU0&xU`SR$HJrO z#MHwG%K-`>$SjUONtjEV37qCrr-XQX z?z_ypByhk~Sf*BA4^EI&Ns_YEnd1xPAikUZxFu8J2>;X|N3jOSZ4PR7ir(bLIm1es0DDJ?2dcJaue^*C7e7nKx4t z*&pL&+On5nE4a|8dxRudFc(qb-S%LTJqmlYfzE@Y5O*5Pj>$6QE1wmTVArd##%Gh2 zSVW80{EmG9N1-7k!G(E~4i6t!p*JMK!+Qh#%6nZ5RqeGNd2hcD$rGWAX|C;nJq6@= zIM_t|Z4h-8DF<{1fFKlLAR}W`LCM{m{Vl5o`b4K;=ln-mqHhgen|e47_6w-%ySM>5 z|HZ^yr=9_=21uA8_{ek|oxjp?uql4H-myID{AFH83_i1Q8ThN~vyI2{bsgU|>~*2s z72?j?_Nk+3!+k#ThjD>k&zWuYOu7w&MAvg74CUvJ8L*{<&q4ap`7;~p;P#{Lna%s- zQ{WLR%3|K$r}gJLwkNmE@#B2L+api0fYlKy_jQ<8%WqP~N|rq8+_e3F4w6rDbx$&- z`MVto&#gYZsj@?C9sdY~9p=THHmLxvqx3Y%pNW{k^$Us$Z`Rzx65saTxPqa`XuD4P z5-0-DB5EBwhh=eydx%Mk>bJ`bZnium>r4f667i$E=D3~R1`;wEn$bI~I!Z3Dzu)p3 zA`^WVK0kjNQ>-0tWMG)>gzw^8ntY9_N+|uJ99wg;yt2)#*{#}~KFsGR0ne>{@Hezz z5PryISb5_Zd8EH{aFLMTGd_QK@Vqg!STom?*BkBV?i{_|KOm{R6&WK^(yO33T&`2z zc746>Z@7#!Guu9RFpc9|_KvR|wfiQ=*JkL6~$F0uCq-d;B<J+;|c1wVe1R)JVCQWgj z2^qM~jC?qk_9Lg7J0@qM89m-{PGcDF)V_KEY#-A$4a(-r*X@xqDcRBF*f(?hd`paL8nv12mFYOPG#SrKE){R&hocTw6bzLM9QzUk9tnNy! z?%9o~MmX7%AOpskz&T&6pF_8t>=SS9Rc~pt*kxg~P{r(S1PMquc3q%S&{r)BZTljA!%Iao0lPtxMji;8 z+Hx83s*5p(6b)nEbXhm9VzS4GM2*=u*#>`1&%}-@47z3OG?leV>{bN;7v-3vXN?IT zW|}{nb4-Z1T^EyaPKZFT%fbdO9w{mEz7>|i)CC8>FALWF5F7z%jG^G@)dw5LtQI*K z)Rbb)`UA>7ys7?M?qVJWfayrkN|YT(?8`v`q6Y-|Qcxj~xYm0Eb=U)X^ndYD$icBi z&HsHj#LXF{@j9m8=5zi-JX)9h z9xpA+i4mjtHrU@rF}e|NU44l1caIx5w=e+D|2_C9@c9x37XjFwiHaWhAYkS|yu&cS z9S5YO#Ka_-pr3>z;BVdlhX2LlN>Ki&=9N!!b(8LT4LeSm-@Y9NJfg(k<1oYgY%$lX zX)7qux5huh)F1d_5STQhX@yv@0bJ~o&mAELat@bPveXS`P# z_4TeylROPvlC}FK?!GglF`dC<>fL&slEdiZn7jx7y4f%wvXzHQPcjPp&f}_6BD~c~ zTvAqjsb3N}*)RIAi+>kmI4j;!ExRA0SVAZr`rsaNKG+czmDdC`B725R!r@gg9-q^f zrxvXi#-|=)G@ixb&($oRrvrGx8hh()E3|fLlRd6!UG!O5MU0FCGyOJY;WPS0j6t*P zmnUwSG#Z)wmnTxfA}PZ3BsP+e%n?JOL2qW^yu=#Dh+)L)t1HY-G-hKotp6)WAuohU z@)Hy9%PM*rS|`Qx-;bH8yVNSsLnVEKW#onMNPgnsaiVu}GE-w?l?Pish8$pozLo#^ z2(RSVEAoY4OM13n#CY3J$&kU8%#4;S5Up;^_u$4G-{90m%1gg@Z!+Mw2p!j(`iD{W zbTMIZ9w&qCq6)ogAQ3aBtqElm8ct4!Iu5u`7Ew(d#*sH71>C^m1Jx-1CAkt(VEtwy zka|w8k{t&Pb#os-bPqHBt_$#M17IB5Wm5k9?n0t|!%3-o&ao})ZF{}9=0at3<#ce5 zi`jXM7j{Up!~%xIu|U%ed+bz!{nQSZk$V8I=fZ1B{<+{hDuy3{Voa)0D*WpoudVNl zZ&U+BRqlM{1C&($&rkLcrp(iM*{k~FF0;M+QH_C-fhU<5Crs0@uF0WZm_qC+%ykVm zw~jCe0`F_zTyenbBu;I5!k0TNO>9LzM?3$x9Q1wIU+BkyU(JO0N-ra~2Un7oe(Exy zYVY$?(nNoJxPm>3jc5u0`XGf5*J-EunI+H}K+EjL#{{shK%S;~$tf!{6+y!d<+65@ z4CN*s)0PLhbQsF@@-kVcl1P}mX&Xe0?#x|UY@8$RnYLm6LpLxb?xncseOR)_U^eZg zTyxJ-lJI}o3!{wLDgK3{06uZ#qvrQOwNMxEAA4dQF}PuxvpapWJN^H0C4!baFQ%zd z^UNCHygWd6%w1cw!MDR+Qns@u>3$R7g%kduvxmF}`tX_1y+ zWMMucgZL0cwDzd^Jc?+=>CVMmKSFoM4QYx#K<>P(IWBA+=!ie!M#_SvijX@aXW`#k zhl1jdgdjed4~$C=%tzu#nU;+h@ZSWzhEHA~(^BA7AO$)IY!LV$h(M5npnMq=^=N}A zGEVT}&ur?p-#)N;Fg%_KAf=IVUsRN+kpTGp#)ud7j@;tD4Zr1|=9!$Ez|z0rHSQBn z0e{EvtIoFVmvbb-!}p%q<_`wvr72N+x)1+r59R+~_fQIuLkClNpd|`DdN;hhSz~nC zy#YkIC9MbC_Y@NKs34e3bj|gerr3JEhdd*Rt@wg;*(s)C<>AWF| zHfhv1UBurihsiMGXC)!&Ao6D=QxZ+ARw<<;6ooY(-CN=QQhyg2Vt2rtHt^)>i8ik) z*xF`|`(hEqy*2KbKG++6KyTU)_9o_jss^+4+&# zcl40eJ)cj#8&<`fh=W#uY5ro5v$p#yuK5dAZA*5GRRS^xOtxK2qa-!9rX(e)&Q$hs zf-3gJH*Oq}qM!KGi|EYNSr1n=$aZl-isFBiKP}p(f7JZ`P_mCxsEkvBx(z}}9slva zfP4`>byy_(9E8)cI91g-z#rUEmQKhSJc8hgza&op^ za-#&bLTdHWq9lXvnSXrX?C;z;B0p&Is(%h3dPvep%C0W3)>EG!p7|#N%BGtH7dh#v zdkmlwid^;7{WL(S++>aM_Z?rB!3n@;_v@l03UCc_oWP1aX5cQr>J=yd4PC=x@I>z6>65T|O1WxqdjgRxu>a}fCBr)M_?0Y!@1Oh3+laG|H!sfx zCc+1gOr8F=F7c(xd}fmFaN8YResH2k=D~DV;HPii%6{;h?s&)bFNu-9?z8{$kbx5l%j zAGO4z-9&?tw@3yqJLhru=m*Il>f1V$^-3wdoL7qx(ERs3Zorh_RNu3gMp>0PvR?7G z!xGjhwcr_th=yL?&$myvzftm2Ot6K#%SYa)2!Avjw&fC^8REcxHi-27pnF@+eVER5 z8~bp0%X+aG4||+1=y5v0`Z~$h=~ShiYH}741)eZ#>+!8Sw!?jZR5xP0_(*@+nAJ*N zi728Uom7UIwO>tl^He7a%r*SaaL2>+3_ji`vkqGl(uO z>zTX~Q^X_7q9ab*;R#?}|?UC|2$|P<)%5amrlDd|@n8Qgw!GV4bY2!>F z9EbazXWy5R!j|!$bj@gn7$nWDK-o*I38fX)=f6m1rXpJpZtdnAnuKSHHj-cQtU+7`{~Yp|d?S<;UI z$T!*zFis7?eW?~odPW}}KOklK;dj*fsOGm-ms}OoGgn|0dfj8QYx+v(C9`Am&gIMG zC&&>jvunAac@N7KsdDLp>aB29T8ew)pdVXY>F*wzO{Hhbf__cwA2ZhFT*d@I9Yvk%8g*TxoSjpy+sl6yLPI1k%#4y-kLtxM&zOw(X8NYTK6TNoa89cSDm>b* zXPG{P4O+o}#%3K*ZR@xB|GRDP8aZZX_YeOJ-O5dn?7nso4)={w--eKQ4$xbb8*W4P zg?%!P5{6l-oKq#c5d=2Gr|i+gEW{0i^y?FkGZouLcs(PKrZ&XxpD;2NpHH!RMu?RU zv$!CVB)fmT&QuKe2e!mh_0z9s31liRPJ(P(0nQ`Qc&xFd?L_95jpt-aJ{BsWM_aJI zB}5`!6k7b^D~jL*lDZu?%_|f6vQFZ14KqtNvm4VT`VuYKUKwv~gQzIm?J=8YqywWQs&@kC zSm*dYvb`D8bnUHCa_t8(2;x!OHwj?BYP9|B{b>Kokh!Zg6w@|<(^tA#96R!Ug2TaF zFMqfqhvE~!@fcwI08_nER~L*_4~B^Xf){6ZP^G4V|I*m0;zj>TJmmHQcei_#QbyjE zvadPL8{-BCy_aU{{;vH$s0h)MaelLcc>d<{`|HSnR6*VrAe~P16_PK|=Bv0*)zer0 zcqXIRZRr76E#l#D$LgQD0DSLZtHI(+$u*v#>%vJBNy15w0BFsrj)qRA zDFG6O8~(-{Wn^q`Tr>upC*46kC#sP^Om#L?W+WFjvTh7x)e3Zm#eLM7)bJW@ES+k< zB$m3aVzOHHDM@Pq>qKv3A{q6U@C&w?9#cuep2+>VPv$cLf1}y@%7Q8<^^aYi=3gzX zJ#Sx6*%%%p*qeimxQPZFqu1*$BjS4F+)5=c*i@5S$0th+8>F&0ct_raA8aqM=}5T( zn--p%I>*ln&yK|!WloCcuWWQYk(J6L#FgE7^7g)iJ)XozZ_*@$v$rH*~2uZ_W}Ov$y~$s4f$_o6fghv?~${VRS$l}}gM zwxT;vK}X@1_F7-%lS+oID7OhXM8zs>7d2@A?=d*^a21d{s*k}jd}S+Y|Aktv$p`wyo{mX6E~$01W_v|>Gwg| zUF0Qti_}BBx(;owaE<183SBkumW3sK7p4~RfBc5|tw8VFO1!ObUJ?txp_+_xTCHL*G=5cjE9tXT zLR4|d%iuP1EooN?Td%0>?8JNx#9uGay3E@+*Uj5HwH-K=e;ikr7^R}ViEN!~vc+!v zTxuw*7We;Es?25Y2{_s_O!T& z^!y}t$MotX%TYo_;q}>Vw12UfMS7Ii=SX?sXO&fXZN~z>(selpPEXZcF-#A<3%mg%o5b4hAGi){ zh))EaG^ua0#MXw58eXjF7~=+RZM=M+0!_HNN~L4= zjM)5ml&JXxyWz&pS=}?-Oci^{jxKk8Aj{k_&w^R=g4#k^jE+QDC1Jmjz&$~FmexCc; zG&-+*F&}`|?9GzCPNq^S;x=sTcw5eY;r@5$Kbup3E zoGY7R?VE1|`SW!NW&!ua$QyR$5~B(|8&m6K)(O^ficH1^^+cXgUgeSjaUNm*Ckd<* zkZg)f55l5;(E)ztl5PwhVIptM#4?Kzip=?z;{Hu2vvLVe0MSd2#7JKr(7g{@<)TWD zD2(aW7Z4^jYVKDGNM)Vi%>1a(2HJ@LB2?52ciu;Z=V`1HHK1Chr)J`A9_Yxdr?Kj5 zfU?u`Mg0_P*;3@A#r+hHZQn{`O&*!~N5ms+OR1KqE?)5Ei&~;XGpIpc zsb`b*-<_j@ci#Kov@YNptzCG82ecS>Vkj~d{=4r#;C-87gUolytojj#QCeNjdNw~C zQdz-7$S;r86U*?SipfR&2Gg9%B{Mg)cUWO6Pm^Av7Z}DA8Ohr}hO}UZd7~FVVzr&lUmDBH&9x0Mi}3UEAf<6W&+2sQLa7y@Y@@6-G;%A0PEXy*mdCTZ(8@dYN1e> zb$hC(p~~y_=U9>2pntaC$?IO%96;n9S*X3~^izx8Sz6_%HhrZWn=w~h13#SjfLwJU zxVlrpyQW)-dA9#$8ufr$g$x(*avYA0y#I9gz>YGgVIt} zQ{#7B+GKF<6CMo6kKZrTQbH0Z(eXZm9lV`!X(^!${AJkErh1RO>xyU(?A0Y_$CCWO zk=SoR!XJxd8H6+N^N@gA zOrVxLqWm|Efs(iC>BMM){msnDv}5+yg!8_`dVGSb2fjP_@PXbBKqU+d+5l*(*?eCE zG=8DXif`!RNuu+qj)7@3(2sU`El>R zQ9enIn{cz^HA*x76$IR98{FKHE%z4&{QI-bl=bFsde-S$rS$QB!+vs}t+gD3e_z!o z20ZHFdBb>>Me0iI5`&w~%n{%(dlgMfNe-X=WxK!pAQ_3w%ZIeWHokUap5OJhq$_bS zY7xfCh}H(Jr@J@yqU>(wx~sDQYX4^?M^)9XWxp=7rC%Q3ZFgUr#T(4F?)UfQyF6T- z^jfGI!CNk8rQFUcpeC-o8idNQ^f z(!n2(bCfUM-jn!4)y#!nwtZ^k@$L&vDQ(yGUcvUi!<-XS;c=z7rQXl!k_;0VX(w*p z8w2SRU6=tQDq&Bw;ww(J=Dy>M6zYaI<~aLr7Ze^Ye>{4R6!eJmHom;Nh@-<{h|L2I zE?&l~GSu5FOSGRKgzQ?}#bty0(D>(y$5xjzFt$F%Ty9?IOf_%dcb3 z!wLI|&f*fo?j|3xSgefdG7Et`XI6^0yGlAa9ezV5mp7-0(J$h!j`dMR5TAuDl4W#< zy9sOXP}5q=A4;N z`tSi*Y>`P}P|a#k#83SdjkL_AN z?Mw&kjy2Joni>JF1_*0mnu?OS6P5$#kha1nO)^p?9MTv4=1ILANr~DDLZ(G``{lZh zWwPz_rgtcQ>oS#k^Lhg+FLfT%r}l=F3?FnOHvyTIWXMqY76+N%ao|8^=YG40=g4i4 zEF$MvQc^i10ADVCAiY>JBVadxF%UWrYo-zgAW=lwf~*)>DO7w)cl!t0PB1;m41hzRjT{)}kNPd7!fgI=hIcw>5mY zWsVmgwK^&WU;Lo`PhN1F*^uEsl~YMIL(C+hTVZD0WWm45pwo_*EO>XG5kMLxbbI#S zpo()cbuQJFb4>9{C zW5E9d9TdHRm6(kL;U|{v0TKNbK~^UeTGzPJr!j)#%r%pAv}-K?>d8y|6%Yi zTcb;^3!<9;@vxG?NBrzPgw=yH0#l-ql+~I|DM*+9?W=_JXU{fbgu3e!sl`52LkxYD zW2OOB>eH7U;x(%rV&2oZ^WUA-_jYc2Oc&2Wen5Vyi*CP!^!f^r^>M1;oyM71DP*K4 zxV;U_*jZLnTcl#o+A}BBI&O{J7^NrUe6m--9Li zyLBwf?w>{2B0jCcQ3tQWkq{A!4;Lj%Bd!c?Zb?G+j+lZA2|CL)wjZ(7OO7$muth4Y zutkJ&*dh`;Y!Mob*E}=f`41MhNC-BUiiR!j8-y*kg7(}nY_S})4#w9?2be6^uP9lr zHAq=}f};|ojgUg?zuv3#I-g75RRK7o(kNq#pj@ znnv;Pn(07$&3eIk6`O#R!%)}@AY!@HEXKoEo9YsAGlS^hkbb}%fiE zFuy-a)_Rl&z~=qY0kFGY*mw%(-GpsjOHT92xSVXmM+05uE@V1`R#=_R1xysqALLGs z5CT6Ku6s=tzu^!TRNkF$MSTLGtpq#xJ{w&Q0^tGZw9!m=jJ=x!R0Y9pgczhZ3KY!A zg33ds{Zxf$Ndb}s^h_6ovbTX7gLAM3cRieKG{*V|PoGKPZhjjuD0`UUeJIHMId1)U zT`qfN_i(Y9B0a~AO_!-w?gDw4ATwN>eV zi%bE}IGQ#IU#WLsagdQ9mESYGbDf8I+j`A5`2aJ)(gSiYIOyZ%!4;merZ zqy$>|_5PaO(6iK9pre<4!n3zx?+@_Mekp;FyU@@P<}A(boj-sJ=fmEm-U6+|u(J(I zl%`uRwYBXE+QNAsB0oXuDaYTGl$5ibd^qek!zlFF&I~B`co59j^yHOssqr{BabMc9 z@jCY$`xD-Y`?(DVrLcS{IiHT;#8=&R9j8`FX(PA$CF}5)edQ;@7~W~b(e4kOUO{9S z47NDUPklR1O6oECh0m<7(c!@N8^_Ij9_#1hC#(N1NOqoyV#MG@`t4K`%VTuNsx@H^ za`xK>hiMk@XZLSe?w3f?I?u=pM8CEf*Zh=@_b4Jn6AM2bpG<&~-CRJe$tEH7!yM(p z_0M6HrrE(q!!&^8-#$|};Asi?Wk1LX1JJ8`&EkwC8;$as%ay+D+DlkP7GgzClA~Ps z06;&NL+=377Bu1M2*_CqC;Mz3%vKJ-vvySQuo^53Ky}EXHy#5M$UEfh36yeO4mr-` z%aO*|iqT@y@gw?TTNM_$ElE^NSqZ73OIj7>gdjGb3X=4Ul7yo&yx)!rY_H$P*F$H@^xX5x zU)QuK**>On@^0puqD?leEHQpENmdnI|IzTf^)r@v&wPR_z)@h@S|Z|d8Twt)-jlb@ zTkuYmi@(=7p0;*BNxaQB)^y3~)@@`yH#fv_runpi&*%Q9YiJ&Wt9I7d2F`^<+Q>f* zcWK1gK;7!w&2YGh7H0K$i(4zM{)?8b@^??2Z3%uvNVlQBH8C2Hv47`iFyl*e$t`=O z)0L|q&9NxS5#1#6qcu#r@LoCR<3Ei*nYbgYM==CeZgwg8Uqnb2slisQu1wrVpQH<` z9GgV;C_pF9p-IG%>XQhG>@>*ZT^nIHBJ$yUixhhG(NELkS0MFwS+!R%T1pO1d40C$ z4J4TQUnQctuHIUbF?T-s&R+5Y|I?Slt{?t13>oxlLejjEyCGQTm=1ik{x`y{eX}yp zc~ZUOaZwxO+yv5_BuTAV=d9@gPZWUb{SQP*`6K2ho0fxH%Z zzx3LY&f+f2%&TtvH|cld^Egj+s@=!yTO#M{%Okbc=G-cG)Sh_;y4qDfs}|9(>D9GP zdzRF5W>nd`3C+w(rj4kNxh$T)SN;{Z7HTtU0&e9 z*TrA$7(s&@Szsi~9=)pc_Y2Js|0AE{TVuK)R3oKTr5z<&|3ws5!dFB*26L$M>h!G+ z$D$Nmuee>g>#aCB%?<~29I>7WkzF9fE%=d$V z=h36e74D6yFTz{0$2a`f;rB6o!xCShqF%RZyG1;E@90lnxUF3!v-W&WPIL0<`rO!u zd))1&InV+r`|O&dF^%b33D89sqle>0kxI^u|NfgpO(U(hP9?)ZOt2?FG;*U&zdo-K zMeAoA!&{U5?ee45MZ8OQHLS4b00-7#LU%gCv)-3(|Et!U6p&G-V}M8992#p!+!zRN zzl*khzLMC>8Aq|ffKbDKyVMg#xKX5Z{n~21BPeNL`K4(4iZU&1yYAUr-Djt8j`{B4 zk?!NXNxg3jk?^H5?QuZk{y>AjauRErWPN?^8Rod!ZXAsZO$GD7o=k}C9ubgML zw-^q4i9uE)7mplF-%z>V?Yte2;UMaPpQlUUp@8Kb|HIa61T$bJ`LObAX#5kdZg&)# z``3y(CURZlgLF~7KSAocKJf&vAMdaczzBEPq5$Tv$8U7I{XdlzKME~V!!qo4gJ^wp z!Yg^KiC2OXg;!F;fu)}lVd+>DX1T{h+G*f}-#O{Z8d~<6EVQf(??*%dOU9t$E9FPT zos$xHKfE`(a5fZ$yrNzR)hj6q)knV&5>o~(cuiT(l5uCCAe73_n&GFaAS9-zBqRpf zDTWu6PmQtkDh%?cKCjZm z(05Lv+pu@fl4`N|;uNY1)k8r=+!vJYXm};Pk$5GL=buF6|6}H0gqHnp#Jy!u9Z&Q& zn4rN4?(XjH?h*(N!Gk-&xma+yxLa^{x8M!|fooRfM&3A6cO_B~f=+1O9D`Brc%_^PmCLUhZewPBVObWsBLzzVaWWCw+`Q z4pMGzQfozq(0~B{3q2^Tzr*dPMXOi!_O07K-yx8LV0V{hH*^lS7gT}FAtxnkj%wIF znkMp0Pd{7P!d<`U{Sp6#e=5Zx$EJFvX6b$aq;U%Jd+qzh2W{xw?nA>7-eXpm8+Z}$>7O}?f z!pzouhWM}!zXPJ3u)o)RE0m*=`5RL^L+vxAol02~=EqXM;3%Pm3Kp4?3K}~!EbVNd z=1)MIC_=#>fn3^;-Rw8|t%$)H{j9%}GX{$`LNB-GidnzismmuXZhT$gIFR1%L*Bd( z$bYz~ef<>B z+qjC^Z-wM+*bR#*rmrSBin{0iw6;N7|MH@%BK)e@8qn9G5>YCwf3L>1XW}n)=b~UG zg8XGHcSe>ox2=cOYaE|;_MqwDGaq_dcPahE223Ax&7SUiFE;VYt~%Xi8G^&|K#@wD zDE{i_zV!(>Ms&CP8mP7Jlra0f5nm{0u|Mm5R*T9X(u%SEtA%3BI^BaGU;)OP2`9cO z?uel5MTho18yLI8##c6icNzMO7y=~BvitiJu>AL6DhpMvJ_UL-=i#T9yPI&aHm74Di*R8imhz6-%HK!n_?#Z$V1%k0 zIr(*o_BtfVvTc%fN%1F;%w9{vP%_=Vb(N*Gv&-?V3M-c0ca87O8YR)O5RK636JuTe zNL4i{<7Av;)pPwN34>SLRKb}2PC=l*RGy7$yFg)U`m%D^< zQg=)%VMsx6;S;#*z%#itK@I$w#WnS}+b4KuZSFPLGQ6`0t%NTFuEn7Fv19^lAdQL* zcaAX8u1F+|*1l>=`wJbChP8LFR47~t0#Y;@yM`0EVEYn=77-~L9V>;DhCw7%=2$Xh zHx%vw5eeZ9R_q%Z0vH7j1h6S4ML&Z?h}n0PSXdf@cirC=Bti-`6l39O2vAdpY$QT< zfEq*^0!#%CDzK>nMSn&)lnMd4 zsHpn~~Iei4FsrSCg*12uDG&G`do9jgoS5=1Jg0mXyTAo6%0W zT^?|vx4AL3E1BNx?=a7}p(qOK4jM{oLOF2^5@fEVS|An#c*~rDCmfeVARPAuoRgq& zC1C@D5g@Uo`6@t_CqQONGwLcM;wmww&^kG$z(s3G!)$3u!wC!;-$h%*Rb@;8%SGm8 z?@xhEUv1={32CS8KE;t@R4w1s?Y=kAt{Z6MFsKR(DwqP%P-Yc%di^+$9D9rW^-sQu z=tC8$>e3%^*xC#h*-thfd1238V~Cf-gH>A2FN{YafzX41?n(dC&v;$L(wXS`t_;b> z)K6WEXlT4;l7(qjAap2}h|BLd35|Ea33NoNT-0~stsE)4dyXu1L_h4aKF@lu!f1&` z!~}_XZK3b0e5&yS`R; z+T+*v*Vp&G?vNN($fwkox|h}ypOR5h3bN}1BYf_qXAjeu>j4j1d!dFYER5XE7_=24 z{aa67de0Pl!p-RxM8Ppn-$m|q_am!k5PbGY!>7sHUt6{AKX5vFJ7+ewe~T^6acyn5 zObRU>Z?*`1Irhhss#muvYlu^==%gDQ{gV|gf1KrsW}1I@xh&X5NX1`wRPA0-m~Je@ zZ)VJPuVav7=ln%H^{#r@Y> z=>_Z@zp{Ie^Wm@25rfP^2_bt*W$xck?XirmLACc?m=NtknNO}0x^DM8JUXKo(M1O+0ex1R@7SaGcLZ(&2`eJY1NrCgFj_+qd-11=HR+7wO*v0R@G&du|Q=q zeQ7pxxL;MR3{0ARo_=Re#L@;^!*nb8ZErzp;~LeBpXDE{GhQl!Mh_K%#G+eVsL0fP z#6PxJUOJ4Elaey2_EZv#k6rB<#DXZu)xYDbV-l)=Uv_z*1T9sGt00{x6u~-g_V|ly zYq5!*ztL_yDqF?~%1JwE%GP0hU)0rNR{b2?r70M=Yf6Tg z&F_y<0NUXBMkaJXTmJJ&9}xTJ*(=RZ$x!iW2-p=kOu?o@6#Zby5YF^jfa*fctF}nV zOI9UqM^Xa(eW&8RTLxq*3#VcIh@X1uv;CKeRQEa@qw~LF1=GIVkdVAB85<&`kcF(q z;&d=a;5;h(qR+f#{Hss;VgH2W!#=x*LZZ5hLgLl}r7W8^myq6fC=je*MY-VNW}_goY?ujzkF(W zIuZaacH#wAues%5)bk&j44<|AEL;DmryCVadhzPg?1=;L5W)-}*( z{ko^iv5sD+v<-UC$YCd1NQEf5B@Fc%eZl|q$?)~F=MD%Z^LYdMY3^D-=T|^#L9$29 z+=fSp(zlk8QkxJtn3mF=V7RaCHtK%um^be!`05O0H00gq4jP+&?phZ9_TA3<={D** z=lFX6p$d0h*0>Z~5|??1Pn!Kb&sEftTE4o*P_{utDLLj1hn73L7=@2O?4k%VpHlA3 zS3BTom(FvXoZ6QypGCBZQ|2a1%xD+S1G5L4n=rq*^P96Cs^r@vnI1p>#wQRh!b(@t zM`*Qm6>m)}8T|cK?f0*h#WIS}F3;#{4b|Uvp7)+iaLbj5)&?k<$PU&Tn@3$i#?;pe z%V3}FUf6n9OR_mst^^6ROnHjXsXvx!mJ~*6XBH;JIths&u-=zkPRfQH4?eKgUKbN}vhyrZ*q1H}GD;UROW( zx!>hdNEBErMh)XGnf%SJ?)l^uJdiWXq5|7FpOpX0((0CGg4M$ zeU(EXUS*ICQPE9%|2Jst-iQNj!Y*LHw&*+;i`v;=wcPpywz` zwj2B;vIl*9-%N#OZ|4?XIz!(wS=myCGv^Xdk1 zU$=PNCucwHY&7=rwsD={=3WH4pJdY2s=oFdwsKa_-=&n>le_5KLZ?3qsZAVq8_H$C zLRO044yTtqMP0K_d^_^bS9@rcG?b*dC2F71rIr~O88hG+PMeV#PQfjxUmwBbTM_Nj zZ^(AoG)tAvaZ>l4g2RLnu+E7b3DwR?Quoj2p5)vRD`>nDmjsi96;DXUH;X;v_v7b< zC`GN!j#IPITJ9LmVcn2xYDS8q*A2ZB(MJPNO5+R>jd z##@5IP71GJ-Bh)cPGFFhEM9*AJ#U73 z>5-Qm71w|1HD6i8&$u*D9loSS zQt4X&MZa$8v7iZvXvB_6uX zb`i+K!hEB5&isq?=8?7IkZ@T>BD`|*ihfsy=6b13(Vi}+>!bxQ<*wFvMmB5wmA^bu zw!nv2uN-RHFi(7Jw%BB>O~a_D=O;xvKjFEXKz!Sm%!S=B*&UU62$rL{^=CStO+{AP zwZ!*FXFutVK~^8MbnI+ZF*uGBI}CiU;2F6uNjaNL`%(@>9+blbBn6Vy{0Mk-XA5=8xyyH*e{Cchr|y@ zt9e|LRu%0Cgm69!ALepQ1r2I&ff9veu0)8EX1d(Z=ntIV3{s-s-+t4MEhi{3FKIfJ zSd<9q)Kt_)H!1j{O(w|#_XiP68j3zyCdvVfS9~@6WR$|bw{f89B#bRg^Mj6IspFMOd8kz;% zyYF7K=(3WqvB+Mp@2L|Y=r8J!v2bV=Iy8MNTcGYH!>O_>|8WZ>c$-|is9O9S)NnMP z+`X)8mG^qy^L)rJkl_CfFRd>H$t(8f0Kb~mlkw$Jo=2co;FjK*l9UC;B^#p;^r2^6 zGlkald?_Br1fzNQKAiAv1eY2*xnbhLozOmnIXliTU(2hTD+u6EUAw z^2SCq&=!;MSr{M4M}4UKDvR+A@4y|VfDC5~?fc(ydNF!%gw}v0FC&>j2!OJTWK?!a zSD2a~qa$J?`y{Ox$^Lu=^q!I-PajBRvK$3mH!OXz zo8qG38Xa-?4?UwJR0!nYQ_H*=Nb-fs#fGJ2UK!D;=-yyW1?^IpBWs(CFalW#aWF`S z03^b}s2K&23I}8QA2I=E-3z^K&|8DvHWhmR7Ib(EdnoOyK@Om`j4LK5EmPf2iVFuL zQ^HObe5vdeCMSDz#GM5S0S9+<#F9CH{LvAW`Qw{I?14ds?*e!7=5&PuJ31g`^n~;h z$OXttpqc0RC-NB`sFv|AD6wSJ$y6}G39_tl*EbFIZ9M$6nGq`MN4WX<6HU0_#?HlI z;S^4=$aL~))|MjzGO3>wV!_5h0VJF2C+lk=ivDrKdX0OfKsvp<+M)(tksbzL*#uu% zxzZ8NW`?dM#s$%x=lPMbwPusfN3ii`1|wYK+6;DQ2SmD3lx-y?@3njx+O-IJ{?n2D z2(1s>57V|ta(u0uI>IQ~Qe^z73Eer_bC-tz%qigQ?&{N{n}gOdr!dHUJ8NXzg0(Z2 zyt?MI8ecPE=jhs(7Z_#XS0el8K*BV1)lZF`UPRWe>?YHG0v;{iI8z70x%Otq-HtF@ zwBkRuJ48Z2)_MP~%JqDJSYNyV(imV(D;XVeh2!^P>Ui4Lcvxeb8Lh04PtgZSm zv-^f!cv-BQcmAdUcU?!Uk|*eYH`_)GeW%uk?v0j4%@y>A?z4E8eh72LV8mnahIz+r zOG|H{73!L_hWj8G4*X57*@-`6-68g|??VB|OwKC%h$K#*Bgdll5uIu+B4ck4AD3r{ z5CFml;%9#|g85x9F}vS>;pGF)k2bH&;lIgi^(;0Z7E!C7V?x~zR+EulV$Ja^3avvk z26m(^ac!~j`F3%J=N9j&(k@3f6b1=q0seuEvvjuwOFrh(3fQF(F@I&pOL|v~)VcDy zrvD}oE$}6iH5@5sqgwG9L!Zu?YiZw;9QYbZUSsringQLhm%f%(slj|!ZDXl?d}A^G z#XdJ|837lH&R_kRhVW{~4*$DFvT(*p)t8^tLeeugkeB9eqVVG)(MQ4<5J$@0kT1;z z5!~6ZDJ*fR#~|ybzcR7&?Kjmn|Kq#$2u&V?3-y#ZbIxxrJPYzE=-dIxXc@=r7Q-rA zFeuhfOM09O>1O>ZTgp09H)u_!rOv!g+tkrfnY|q%)=vg{91Hxxqbi>Y-f{=L2NXuA z8OPV(jtbfdR{y9&p9{Vdtp05_x12QpK9-h@OKs>p=dNKF>)_aAYRSA6oygh*&mBMx zxVZ0zr#3|NiFFt@c($cdSS(E z{S?BQaje{GURu`!80p644!Facx>4QdTv(n7%IsAj;|}orH=5ETQ;CdY`zedk3lv}U zk+96(``>~DxaNDh0N?}p@PEw0QcF9$H2xck!FytQLrh}!7An3&*2uUrKz`N(Hzu?V-k=4jf^7(a4%e)Lt|(}$QM04dZis9A+whiVQ9n&SSr#+3O?)VjN@l; zUg&#jJ`+1&3E7y<_bTv)MyjeKwb9jbU z;y_ck#QXYgGIS0V()q3@udPv>13hxKh;VE+J1c5-uyyfI=uzy6TXFI9+sa-o>-}|2 zH#zs+2g?jumvVcdv-S3?hD)1qP^%Scla|24_SyQj3)+)kC?}|&c!(WecQ4b^*guKT z{>b>|(w~*@s`ollzvXLVljp0~_Uowz;(7b7>AP>UZM?7cbUhif36uY4LhILF(@K|@ zO^;sFb%vdx4I2~VUs?jT$ouN5&a64;UOONHW37heQzNt(m2!^Dlp9rvvNr zs@x4k??=f_*gt=%I>M-HFj;9b=xGlC*od9RA$Ny%kooS90|$iekIanh$bG}A+n@vz z_Ln*C5a@z^kV{L^_*41<_drYMYt3@)ofgWhuR%<_hi2V{XCvMbueP#IGHqZ(>Ds}e zp}mS&`4SFqZ`x&*Mz;9a@}eWopN1H3#6#YG$;sW5s^qT-^7sa%tb<>}4Y_L6F09K> z=LzhKItG81HFoI}XLnYrhIbichqq~D!{Zs|pL-j7IxU1ao7{)}G55;#>d>ZX7Vru2 z64jt+-e?H*GIV}#P~)AwY-OZlIGZ!wM)(v5+%u8wg3wYsvU_`=eEU794Botj=THJ=6vSkQ}~lT z(71|$pg}QznHdJN?L#_$O9%B}&>2RchK9YF%gFn{NPAUEyWjHu+B*qS%@8!PCwXDs z5!P8C%%>T+Z+f0YdVQ&)MscfdXws=FYK?1+kI%0fVddBR8u>AV(+fv@JO1NTNSsvKwYQl4y1mrh;@g*$gXFIj@_9S% zr5O072MsSbbF;|Yc%Kf>&O6}WfwoW?1t^^u}BN@+JwA|wU| z$qAXJoe8{MD-kmJEmtaP{anBBg(b+I)+C{=vYdq2Ik|gnhQ_+K*Wyk^~+_|GW z8AWg;@r2WRE>n3h=%c9nt^(HfZzC)^F9k@N=tL3IqXsz*=VoD-0X+LH*sdXjliaRJ z65lZj*zl?nAaRewjj*WIaTPdpB8#v%{{{_kX`twz4uR7}!P;PTw39Vr1B#7*g%SI| zE8w$nvPK+BU1HRxJ@z19^z#h97AiJE`E3tPSGf3OTy9LXk0iZOET|hiAHN$}nr+wG zB80R%C@Dv+mlD1s()gG!c~*uI;O$7hiNe! zJIH~{jlNl(;rhGMe58L7Xn}V_VkAB-fO((r@A}%5uz(P;5l2HY4HjHa6BK6kmWBn5 z!p!Flk}_Q5@g!5@@v2DUv9q@20(13PDSy>i=_iIWmo6DuzTBYdEEejq(m_Cvk1|#& z3ZVP}MfQ10m_z4h^u}WZ#j#SuBZ}+oIB>6}K07WH<iK+=$fDGV04pF|VRxo9aU!I%N5J)}tMW&al%=EEjC@>*1)ge-#76{I`&!fJ+%2vxCdR?nrUB9pNWR-*%nKKUbY+F#TrkvA5bx*Gj{Mt&EqF<{IW#6aIL zu=$#+}D#^^STwujjFe26E}8jMUe4a#mm)N6P*7uh#W_Iog<@f48!&+nKHWR zujbeM^d-lil|IY-+Rtmr{E*t1-N3CDWu@NOH;A%<7G5$^cfG$1mG1JsRT%tzey091 zZ8NacQoO&RFHB07-a&(-I3oG%4syeFhvaj5h-O!3}SH~unx*JmCWC0V^J5(DVV?0 z#nLKyO)5*7wooJDS8dwH*0{ude0-3#X`;7gQOTgww6AQTwi zz8gQwvY*x9`YMnheAMzEblp1RxH;rmu_`>$<5J_T^$)Bw?u$w~>E%s(q=)a0MFKy?cau#MYNg@ z?N(`jZq8ANx^4tp<~HEYDo%UtEVMF~EWPPlY}+H2s_kKKZ4;id+=(#?eKt|Dl;fU4 zCan+~hR0DtPJ*w^CrO{-+ipL_)aX>>dt}Bg`5wX)PXEv{Sid^?`Q^#j;5qPm5nWZ+ z_B?l9`+hg~Tg0HoVv%Xd%CS+{_1abZ`BKSHFNMF0ZARfL5fpcku?MF&1{)nV(u}zmivN-jV#sdv)p{YCiwA5>Qoqm+=|b zkBzv9=mTG5kM%UBuRI%0=w6&uq;4qz5f)3BiGI;npOFf)M|Rtt_@1Aaaf$|gD2|H| z=P6u7625ou1+j;Mv)A7xHoT|mD^KHK7*i(6ag`(3T~ml9>eP*7>ZX!Afl|vre-{t8 zpJ`frF`f^{uhW$8POgh%@m7q>y$$BGGfWZ^3T(h@^DkhMalne}zz*Zgiy3N&dN0*& zk(PmZNK0cxKuOohkA-y)A5|)(3%Bs>wEMsv5r!1gX{-3mUV+VR+_LZaXgBf0Ts7xD zSV*zXu7WXUvAHzg?l^JP&i4w5liMW`tY0rZ6q@|r&Nqsp6->M;wgr>6UQ|YwE;?y@a zCV#AUK*DNhv}%t|W#uH&X%(q5w^HUuV>UFX)LY%HG_z8U0&0eVnjGzhM%l&s#uB~n z6KN-ak@A}t>xg!B_-c>U;M|=^O)6WE;gC|gAdF7u6kfyp0V^)EF_vt#M<%deJBdQ1 z(&+!3*}r;!Z-h+cf3n!S~3YxeMVGiIbEtfIw%0q z=RLFW9g5!Z-}5!DF5vV(odQp zvQ}&b6s|+33MeEp@aPClG~`u!$8HjXO6dno){1P=8MKMoQ%R;r;x1wtwEZ)n?Jy4I zkX-0@LbEg)Y~jK8WnnqQMX%dAVE4W-U{R8#W^#+oN|X(81^MNk`)w(YMr_0d*!MRG z&JgLtbJmrC1>**W#`ACR6_iK4AwVbhK_&dyyU^tSpb7Bp4~*0VSgZqB;37%8+C#tE ztYlIGm?U46-Z7WtccpY$DxFT5cY4Q3U;kME%)VbCj2BR>_CWe)THm}RCL36W6(mFw zdYw+Ae+&QdpJBkn50m~(^AeObW`U2@9{s2I0=m^6l78^?F{x}eb=wXN^YCZ5Icz$e z1wA3x!%Fzgx-J92!m7=Z+kZZlK))?)HAsQJh0?3U6Xzuf6=q$6W+Db3FH$F79Uq{F z*ZfhXLzS!0y`wWAiv@6Uc*Jx7Iv@tpACN6P=1*SZMLGxG#MIy02|y2GvLc_qAb%uI zXu4{j|JC?$Lwm`ES+8Ie?YDEQh|4=txxfdvT!T}xA)FwW5|bR;@1hJSG8>_PQLH>n ziQ4a_v7I-cHOtos(&Efk36{jO%|s8K=0{rc6d>tboP~{IdnONC>=fH~F9~&ZHIP20 zD^L$W^cV-C&fY*lg8W0#ui`#~EkK(5-EKwy@hk?|Cd+ru_uu*o<xGT4`q!|M;iyG^IG=U*jqIm0FH*h>d)&LzVY zch<|HXDMmDJHrsW%a9H+O=cWONDVzy$|0EtJi zk5YBY_uhME>j=X=D1e;pM@s|SWm&w@^1znorkMLSa718e44#)f?)8kRjWE(Z?r_MQ z0lTRk^PWh%UyRkIY(JD7V%D`N*^Fts|4vOd$~eL@)`O_AZMa6W`R^&!Zhh4=hVp-5 zfJOhDN$vJa2f>NVEIu~3<}xN^Dl`NuwLjD8kg|41Nt?$ap6O4uNUNxZJ#@3{|BU0B z9iVfS;$JBYda~g>$^duGa77ZMWUcH0>G(iykGL7o45Xj z{2F{x$11epW?L+p>WKRwn#!3^H%=sS`lEDp8=vk0$WJF@c;=F*0+V)6-0D;%^9BX2+IjbSqhiKU76q+%gT&GoTZyd3 z#b};`^0&6gKyJQfn<7O^C<$p=wxq> zm7EQX-sJ_&S5+IlhLx%QBNb?t^Pp=77fIl*UnuK;J?N<8dx(V;@7w zPOgDi53AM?58AO<5BM0x{yj=#B*4*QIm>|g2gBq|`9;fnXG6{P|6-%qgdyg%Lg(3e zT{q=xUoL7CUh=b&L~669K)2}I{;&kOUKiSXI9f@%3R1Lz!iJEt)5o%e7!*!51x0?} zj|12L2w>ZfCxRK!YJfsm=>zG(=1Pj)BO`<^`#o>wL2q{)9{c{0i;qAoOrl>(9{WCS z?(?^;DIW^f{*7J+rS;~lON@92al4<{AsBk|g)<6q#rE{J5o_fv1OFuVlrj`hme|@d zDmWj74BeArv?o`LYNzoPJyE|lit19VLY3KR~^*a92UDzZgPjeMqMG`+p0%SHf%$-3U18%Oty=nG{W*rigsD?^v$3`f#Bjiyog( z_tm?HDI$DGu~7F*lUbf1oMxT%AgMeyjcEFrSHA@2jgrsz9lhgx4&rvKU-$f)2k~Bt zQIEVE>yGmBk`l2wBnyIS0{QdkM_NJ)0lIMFa+nVUlHaZ1K<*}1f&#Yprb}|`n~Qb@ z+Zo$VT&kQ{lY^Z7wOsK?5dDD_3E0P_l_2iW;UYp7yo3=BJrSYU;oUhW)DhIqMgs2d zI;f@*v4rgk1l%@ieyBmW^BHd&Onu*yw3@$B9?5tMK+JzIIg-);1;GM= zeT;xxuitd)`;__A8$-^ZVT-9Zp$n#u|6_CeEr)eb4Z(M8(ip}Q{9mnn|9`b14KP1ZFF6R9 zF==CQr_%mwv-e+{qyO4qgjJ|I48O^HX92$w{Z<64ln-jjp%Vn_^Bgpl*u-h7!F6hM>QQcA>*qyU^foXi%udCDh_@IN$pPBfCGGAQ_8JhY)hV z{Jj3pjd;ZsKw7K&`2d2iAvr|hdvP9wwmyI!AAq{`ODI6U(lum<(Ck9Tu1YN}%u|0t zAK@MzHyfb&*9|*RrXlD7;$4=|~z~Sw_6PnT>@VDrkeVbP@ z#-~v>huC%7-816G!5A-W_PPdY6dmk~Krm^GBiq%V?3MNd>`4B$iIEK>+t?!nABS&> z(OyOj^kKK@SZtOPQRjIdc6$j{EiY&?e_hZeh`TFh)!PtcTRJLqyXo@z24ZCpG7XnH&6l zTc6G1<09;RLS-Y`8xTizMVj$&FH({RytBfKDeIrxKOZo>2Z;yrwZsMX4{<^xiIY*u zj4WJ?#`L2VCEX=L?tO(?9c9?qtluMLLp#(=sKr4b*k$RdT0b{_1Hfi}s$`QdaiV;aQ7`$+T zBe)oX&+pB1&G%_cT_auNCnH_)sDPdk1oT=`v7tF|Eblqd-LUEvh6Ip+(f@JEMBitJ zr6NO3PpN@+bFKhW!4dPx=Xz*kLNqrui0BtMVeB2QqJDn<6Sh5@(wXoyFZdi_eNE9UCw9yFTN^2`^T}Ko1$4)c zXLTdLCjVUg&=VV4h6PEtBf9>5B<>d2>oo&He^>wC?=9OTjZ~bs?~d7LUFdep(?um+ zm9^F9XosLVt0l7TsgnJnrY?lS`)cvc!|ScdJ2R3-E;LGq{)GlhE9!Bw^S!s?{kjFo zuVc#v57GX((V=6@g^*_uR0&$GZ^M5IGE$OiWUR^>0^LANp+BHqx|Oy{N{w?sNaX8_ z)`tbrIKn*tN`O*0Kpg-Pg9NsXf^Nk2(brx;w_{LQAU{cn%LDOva&%VPZwt?c=f!Qe zW2&v){kNefZ<)8;M&&}+pV0iPaO(&?uJQl=JJ~liLiF{J{@)hq!=B+)1j{};@a8N3 zkJI=2=q?eqE4;2OUZ?JhCEPl>=?EZ(%73aK`N8#_8dgh{{CKd%qeUnB-?q4g`X)b@ z#h{jzl=W!70$AKaPLywS1WgZ%L7XT#mB>t?{{xkwoH1x3BP=FGM+I^OO&P#mrM2dg zZYRtCclmUeG3;`?lrh|7_m}}+PCtSvg+?Gv5Ugb%RjC7a)aLpUz4x&AbDvD~f~^lS zS|WnaFT<0?B(!k%{b4rZr6<7=a#S<|Vsz~pDbQLGG@abySjTt6bPkj!o_)llGT9}jvFcXQwo?>x)L{`! zP8^?GXaCUqSeJIuM&mY$vQC7YmV2Z|^2tJ@$k(-x)wTJ>L^$esFv;$r$qfJCPk~c- zRsMndGKuzNi`NOm*Q0ELXvhT=c+KiFbD4b7w6#KS{q3K?oFe{Hyjnuy$ayl%J|*SU zt(TAoIu&xgLs7an{wO;15hm;_a|y5QzFh;KBgJOKTU|7)S`r!~(bRqPps+3p+74`r z9jBl$VHm7)WQBHaik6Ij9>&DNRayLvi%wNFM(PXvL2z@=58k(w$$Y&M46c~kaVVS-qa3Fdf zdlf%Axd4?x+ZI8(CD6K$Pg;~Bfq=anSQ@YBSR7Wg}!)4mzgyCNGGRU zvxD8EK~j)DnhqZ;SuKAeY#MZ?)CrXCVmlvgM$zqPRZBBYF$Tg#V(;s|2|4 zVbNY`T=$tp<ZfMO0*w}7xCfm<{t|+lB(oMA{ zEj7q8R?^KmiER`hkJQP^Kelz%CogYaA-8wQz4+E_v^=;?hsRiRc#PUf$lI~R$cyUO ziRb2s2r;!;ei2v}12;57?S+?j{(g*kp2Ia775CB?5qp(6zByx3K5F}=-Rb}EZTR{z zbAmf&)UbOb_M-sf^7f{%_SJh8gst-}x4>`+LclBL%ogV?rp6H%YfOL;rmuR^^R=U? zWF0i)IvaRygXDY6T>G5$iB)u@?!dR#`@1>8->>ub8*<9dTlXp3y>`5>{I2fDQ zo&gbr50x@$-X;))!{_XT*9bdDA>|dBHvX(FHLfADawZz7G%owFb$3Tgn?D6p_Bt-F z(s5;>652>?67|;IO ziibU~=06T9h2*$o$hS~9D?}t@46KwqCuhoN^|m(5gR&%oHy=ze^9bC?fT@Bm%4+ z&Y=v65J^o;^;l#Y0z3r{Hn6EFMgJJUV*QVwiij19PD4PZz##@U4Wj6OSJ54|$N8P0 zLL>+P2HUhC0=j7|%)MfL81sUZHB9P~pWy^dC457Aj4hN1$`xpw21<^dyWXfACRwx; z2r5l*Ly;9U;VMn=>8{mi1Uw2Hc3HF!IHMLpt_v*vh5}@;u{qXAd81bZrl&m05~z7k zAoH7l7x~smkyi^WZ(ZL2QqCI%7dVQ3!XUVaH&{6^sY9GWa7S;jNZ=G)fP5I5L2#en zVv!&yxDZkFyZ-o^D}a%eN`W3`O&f__sE@R(V51WvfXx&o{T4~>tJ1EXM2P!aByq33>-5{H^Pyx!U1B&=aHjyF1U^pP=+f|8YQj$cA zkT<|QN9(qRk#P_d8dtjSAO+=OuD>`6*zN0@hTbi&KYaSQ+t-%~?UC}mF6tZV(Tza* z)T_s0c9HLEV7wVvLDnVdVY;M@zV`o0iJ^AUJ5jH^&?4u)kXhDP^pA;CI({ipW9v<^ zP+b*s*BpVew?QZ2VA2-pS}|wj(Q(SsBB0OEN!Cu+vf*mvVF>N{X*c{uGyQm|Ij=*? zf%^t);mZrXfpCD>Lhsi9&~Z48 zu77=7*=&tVdMT~#tS*thQi6YvuX!^r4kX zCE~~UVScwP|LKAOlYuU4I^@p)(`34U#z7tZJ(5uqPHrlSio^T^PVoDtZ||mXRA>9U z8DOaC2eSB_1T;PH2a6O z!;O{W5p!m5b_+(^8U2ESvK#5E__iHjXZW^_YJ#sVPVy_c&3l8PSR+|^RmDu}ev6<1 zvMIx>{JOrjU*XV7OlWFd?CID>IaFn8^8a_jNN14v9Ye5Blc%wI&S59y)H3jka}-v} z{15+Gr4d6rBK03V{be5=nlyA4erq|=UxserR}-jC4Mxwo;4btaaO=cAx$1Fd)dGqZ zJwc^&LY>$=?Rs;Mc64UBY&Shlg94ytS+JVmw&ch~yusX~8K~F;WKV93VH2Jqt(-&vmVyHR~t!_31^tM;5H+DvN zCBq?>-xrOuRF$JNfi#_?1NeCE+8roG4BDJh9D5VS2|FAH88O4J(4F4bOz4cpFU(h!!4Wcvx(%sS>hd6?OfS`1X;Gw(Y zNFP+X1WD;`kVd4D?(QxDY0lZt{@(X{y??>;(|BT0KD;Xm2d~}!HE%K zWhMH9GZ*tn&N6-&(A&AtjKo(-g{f9Jv;LgDApF}BfCS@jRsF8$+0{Mfqwlw= z+Wxb@&F*OIvrr0cSCe;L^}p~dVz3zsPq5HtRal#JW1ZXH#oJ-y&8o9DdB!>;-^Keq zB00h6RfZGqO2l}4zE28 zm0TZ~15sY3!1WclMXyK|a5B47{C$((cO+=@$!!zyL6Nk>Gr7P45EQd}XnSxU4mh9G zYZw?QoE;dx7_J!_xpn;#1Fl(R7dyZ^xpZK(R{*-$*VNUhTT-FOu5N^$6K} z57XM;%=*m-7Oq;~S0V|e)=H0EySe#lF;AEUnZ5w?zB2wrI}1;cZhheSajcmBiW6d!MDtck^{(Ms#p#)yyjEy!5({ zBvNNRC$8rujP`>+b!)_xM>U?4br;B93xD4^?k$Kw^;iqPpL<N&W$af_<*Pple_^;DA)>6U6hLeFuEi~cYc{xa)R zNv|YY%b{m2*f;mqNhxBJscFd>CN^$iz{<^b#q-zHZA84`5KMJ!yoDzu!p~V?BCp2m zxgtG7FrVV!84{2P)3U&%UX6LHwnWanT~HU!nsEGqO!Oqh&#wEfkCwmOO(GX8@(J_H zBRpL+5*`c|eVn9j1-am)Pnc?Gcr)lEJlHJyPhVBh#G1=6{0c70WzxkY;lX3kCw*1L z5o@l)@T*2H*h5Q&=Lw5GRZ{l^(ePTUS(SLKxd+3qbGhKlAWVErJY8H8p12CRTIUr$ zeHJXEGx1Xj9#R&4_E%MJV$Jj4k*Hze&8V_g>&KecGyHOu555e>#K*?deL})R#iB3t zstOisJ`E039Kka4l!S+dMPC9ONj|v#?Je&}Dt0LjUNRv`89j^lqu4bpxnTB3cq@-s zyfI_f$mN1Rf&v?YDXzlMKm^Ej2^e0%w~6ygC~4u?F*T5n~z9~x&qwM_zO1xHv43chB%k=cOwnwj+j zPlc_Y_Z80lUCs4I75u|X60zDhMZzEY&m6$t(S}=y&Szk#arir!WW(Non2mS{SdL~9 z*t?j1tN5VKo@}s@Q|niaX2e|DUa^xG?aG_m(sadu)1*=9YwdT^3Z80b?k00(*R*oK z=s}RiG^;n8N=>ueCuz9z9ZZk(Nw&4<_Pxj$6;0zl%pBCPr2ek$p&m|X_5J@9<>=t} zzfhFp78qTrer9{zYk?`U8=IY4W-8wG1gy0d61Kil9tV3QwsG z1+RLccA6PiQc^*2YU;LwjpyXSJF`mzgTnSoIE5N8Y89gXCNxxCM#{0HHppJ;^!{OG z^`iasLOLut?2v}EnTGTA@VC=fr6&5{Rv^FQ0DMyAXW>`ik=qVWIsa7pe_Yr@>+<^k zD5i^QJuI54MdR7XLqM_ROvx<%=^e+YfZxbLS4(4*^B*cN&Gn^i7Xr!wjNlg@m}b8u zOH$t2T+aE@Y|zKK=))JH6{V%a$C-cPUBCLaxl1g;kUnChJ)LafOdmOIw6-Glk}_&P z49CK1T3-5Wry}dQGv4o#{pQW#6G^8iN0rJpct?cywZdjXq73enVJW{iQm$tGdw;{j zBgFV+ec$rSL4_645-}C?tkn6Hz(z>k{2PAN%=Cw#%v_iAtu12TXeHHz2=gKYtPBCG zLcr<}u%8H6D+1Oj{QI%eS48-0$7hs6O$O#;Th>8Z3E#28)lbMN#hUcY*S1W9;u8F@ z!ZmPvHE^*tPo7cAG#Q!SfSZ)h5?Zh@RI63zc_#7K^q1vDNl-o;;|=0o8P%Ita99Q2+tFD113K5Xkw@!G8cgXz>CB zepR(x!vE1khA;6jP%ALdcs2&+@4k{gatwGi8uw{#c|l_|4l%cEC_fq(JGbncgpws8 zZ86fEaAV!{*10SgA>|aJFVfMmDd<6j^5Eugi z?f-x%5D;^1G<2qW%OcU$As)jSPs>O|*JL2)8!G1yPU$udyl^mj7R%6Nbn%~>vB}6L zy^l;K4(zD}x&n7-qX)Bi;lGLp0aXy-!5jV0n|jx8&}<;VnRHj+B?&7yRw8Kd7BqMR z_R#(hNCE*p5HJUUcvi1cV$c#S!q>$L&OU_*kpkWYGX(~7&?0nT*|5jwb==p1<+R@* zPaIf&-skh$WQ&H3(!V&@_f-^^^wN>PW;gln-UibwS}PR2(yMRvr^X}|`99Ek7QoZc z`sZKSKTe^5W5DLJ!+#vrxc$L;SGU*iojIWVb>JNhAR!)b*PZ@wRL3)vWcGbxp`!?i zV{mc)kdL$h3atU11(eEdvF?|Sx{mRpdN_F_T-ADnOsxVA%9O?b0RV2F9HU0?7~*g{UZJtff@zSpfMG+h(e)~1?P53Uj&KC+VTf4H?GYvH z(XPe6qTHLs@V#sHkS|$J=mG_APy^NkJ-k;}7=xE0F0SMqT)UpD?+ces{wDe8ivBr~ z;ilFoW;r8a*s#hb!*J##I&)>%V3qG43Pz-a6TKj0GG&6czD9T#g9^qCR>kh2a6}3L zF(3Wtx$Yfn9)~sW=gsFCfQriKquVTtf%u9C4x^bHdRiL}Er4~Q=&XbAc2#K9!wplr z2vfTZQy?bvfwC?bjXtc(pMLjlr=ID4^v`HX84JGE9Q>Vq)g?S54(WD9(M%EzB zMXx$uM$-9q+kb`?cSFh3EK9TWHqz)_djl@QCXh$gyTkXZ?;Is;ysqYcAR14qB>ZG+ z^p5J8dZ@DAGF)YfO)q`yM;Bmp_}1jq_*$U4av=R?wJ;H7;^=h+ku{>yU)-a)nGoM@ z5Q<6<(OqF>3m~%6$du8gc~Dinh?Vk--01^G{rzI7dvb2+QzPn&*b1J&htplJ>|8=^ z@_o8z(FTij7t?dE0`kAgUMF5e2R9}PWDR?+NI10#dd`aP{;YdBc%yTsA#iIjUgKQm zqmXjnGxgPUr*0vk`yWayZE}<)l8Six4^>3V=a~bu@8#d!H}afTL^T6{)W&=Kv?SwEF@V%Y&{%h}D1 zKdT9UYIW5qPVb6JeXi2SH1-(_lYX5 z$ur^>UPgBl=uvyEtrS6lj2$Sc;Eh0>w4$z| R+?^>J*5HKbLj12+fLcm@iV1fvkXsEd) z0wx=3u7H3kg_^4(U>XRRHUg%LfEglSrU=-31k4%%vqiuh5inN-%rn&77Xb?lH4jF> z!b8nJBVf@8SUdujjDV#fV3`Qm4@BVm0 zw!McStx&u2*WHLGgfUC*DXxD5b$&F_pOrE3NEKr9JSgC$R}@X>xxJ6QX(Y4P?IuD} zYpa_CQF4_fC`@C$=e24lyE-$>zsh`4JaXluy1aYx)TahU1qSX5p0 z=sN=&$}41{W5_!nIK!C=s`ul@FdNi%lA2Vas&kD?{3qY~P1>(U_$Fy)BUUPb^@3D> z<5;FyMe(A875iILz?5RT47*~ppAomq$j_jOrR`w-tK=VC zQX^S#*)aA_Y5Y5EXL&QOZE87E^F5OH{2m*xetwtu*_`f%AoLw!TnKD!6w;x@Nm%!d zE!h(BLhOWy@iY2Hgq5j{CBD)Rk{Dm2`$oo;M$VM>j46$RSoRxb&Y&cw`cvZJr^M<6 z#KQ!{>V(9@gv9DZ#KT0y>cqsu#Kh_(#KZAys=tJZ$EAtKp~T~_@qWD{9ycZ)|3Ex$ zPdx5UJnlbGi9^cPLHFm|gD<}octlb<@pvBbcp33{or+$t4{2~@LItJ}JLX&V0KA=% zJJ+1&3X}Kd^jp)e7|4Lq)>4k4naVUp)-?G2%SmEcsG;MvL~U9c{rr0 z{}ElMs@*;87Q?El-8jFh{Xy$}04H}DUk1tH5Z>cMwF>;t?p=5BhrvwcRqg5Ef{7qg zeKW_ke}!^FWqkDiaj`PKTF|2d*-Xk8mYI|@ro*8Du=5PaKlnzwci+`~@4h>yj=Qvy z(SSex2ev!XQDm^smsTbk@P*G89ZXi$?MvBJ?a~!B?f!BW0k<`ScY-$yqH~}Ea99t>D{9r=!XFIA0T@hUeY_f`cp+fVt{ z-Tn-v`3H!19I8mi&#v!iL0-7lJoKT|7f@MT4gm4DJ1Mwf^RU^?>Kl8&TCK$6hlmas zgtUj3^8PkdAu~cl#r%4Jry=!B?qmMc?c{&lnRA^){y6y0TAI7^_TSY6@a(3 zreC+m%Hvr$!3$fn{`_5VueOntlhZ~37LeNWa^wt@1 zl}~bT-SF7?=;H($Xy|_LX(`7%-7i#kc}$wyuN)KFLUj_LHhM?QV5}=h+j7{N0~My2Cz-# zRi`f=PhetHGO!-_U=>#e>BaK=3xv(J26s2Ts!OLg93eDSnQ#sKs~XSe8i=XuqQqvs z)q+t$#OIf9*?e;0pI8h`*4DTdXpS^!w4Rebvf2is1$vB&0L$FhX{hTQ)b-Mo;E>0P zyI!C3FQJpuMn|t#9gSI)qE!X^&!bNWfBQpBHcx2Hy`3uYoiaGY%Bf21o>4N2-REbt z-KL0aKkHLC;$6Klb-e8Ix}u(;NxqykJaBCN_+`oW#>eElN`$#LN?{fZxAOCJ6o1)+ zMXcYr`9-{;e)8LCPAV`3{r9P)-|lAolGU`o-)u_w;PTFgxu-EoLvd6W?pjSzR+d0y z^IlHJPyqt1`CF0Z*4%Si50_zhh=uU$S>#b)*YG`jYT4Bl=gIe+DK2AI_qG@Pp41R#JmVm=8}G>!d{}iIH1jxSeto}9Z00o+ zWqV2f#LUAzceIUlNh|xH*S~=-ywtL}y5-p~{(2p;!08W^ndzQGU18^;FZdST3YEDU z9`_yXBXI`yKNidjF*Wy7MJUggiyREjsEaMK2VB%j-$uqY^L(VGv~E7EX)NMz6`oaz zp&>0uL(R;)Hgq|kH@$G`CskN!s=cyt;TxQky`Pe0eh~2KtysQF?a)m4SZ-+9ahR)h zek5@{f4bsXEB(>9IpE~{nX3p5b=9+%N%TwWOVywMzT(Vsmb$%Ch}L-m3*FXT7Whn7 ziOAPWuz4#lXQ0|T&TXuL9<`iB{(?P7Nyt2G zNF{rmMt{Cxb8FrFS(&}!oJ zaxaQW_;@Sd34Ks4v-b(YQ3rS9#3aEC%j{WGlaBp0=m@NP2Sq~?B0`#~Ycl!NZ$ZY^~Ezx(Tc6Hb{YD4k&7ZqK0NN%Ir@Q zLA$!znq$t|n)(HU`2#E6hLHdC5bnhn;1$6>B{`UAJSTAP9BdF`e~M#hZe^t0H<)av z#Qd_V#)Pk`My7Y*chC#{dDjWgyr;Si8dBVgVS?O?6vt)u<$~Ns8Vk|xU4zL94b0_L zHQRZ|ulUOBC2XW7z`GPDsoUTIUPgKytxZG{FS~>C9ix@q256)Bzc$2-Z%9B-;FPjl zC!Dj{|JM|}gP>{id#fRp#khw52EMWjZC6-luO850)FmZ-%VlDjXaKMtblvgAv4uPX z4%Ltpd(AC4s?wDPEezA9yT7cPn=eR?n_DUU+UjuqkcrNIAzSUu;k}UKs0DwgS=%mS zq=*GNrWq|mC~PA)myrQCm6KR3`n^inx%=C+>d*>Rq2w_Wn_iM;o?(53#(XNZy=Gxc zyycXCH0~A?Zevj@qw;!eGKJ(J-_^J@!w4rjrv9U6<@lRm( z&b#{@C!Bo($ZU7u2Z!L3y$M*x#Y#i-wSAOFA9mlO$g+3aUT>!@%K6igC2;{Et}A4N zyWe9VvQCnW=*`NACKYezBivuOMB_w6!hic!4EA{jHfgV0ULW z3Fx^Z6f4xm{&f@pBFM^pxH^y`ADx3&1ds|N;p(eOcRi?>ev&(g>JU(N42-!ycr*eb zP9E-P^8K%w>yh+nt}}_X4Z!cwJr1%=DkWcg0O9^+vsC>#`_7_k-ap5r3gEqZf$J84 z5bW*6^-U-MN=lG^n!m{WY?3);#{jmsM`8CJ>(bhjfb10DVeHk~b|l%M6&^uuVFK58 z*$9!+3vA^*+7Di~D4l71YSdQd^6mGE-&vYXM?`tj0*|TKMnV<3+ux0k?>)n17CrxL zToOC+tniaQnl>!u75&BFK%BBbvWyVxegz7S5S}Q_56`|p|{5s|m&<~Q(4^q$%($Ehw3g)uV z4-n`FIp_y@=m!Pp2SvjobfpJnd>z^*XwZ(QonzyUrx~?lqo|;1WZd|w-uM#A%+oM> zSXW}dW5^f~P%`}q3{jR&1OLDS+mD(_4Vp#{bAao>(}duiMPt@O@f6K9;O0@y5Z8(< zRvncB^m*^@s~qOY7c!g)bc2&|LTjL-zx`P82v*L+VYo#i1)do^5ip^nSlGgjN41iYaqxQv3mtqtd^A z!v@DD3x~5G+4xR;fO*1DnLAP`T)debnl?>>nl|;~CQTMsxG%G&&6>_e#PdD0aln|C z^`Lr&9jAj)Xvwi{h7NNua~{I%cRs->z~H%b-8N-&$XV?6%c6M5vLz3!YW`~ZoF<;p zA(BkOv+%i$w`N1~xBnS$F4ah7RzD)lElG2a!yTDjTZNMBnU=|!tU@jP z$1kc-uFf8sd(g5D&*l<8*CdsrH)S2xC1o8S^gKodzK;FUhr56vtgPebAd9E?jG3_& zi4<7fr$R82Huf@wp&C`PUUD*^!&a>rk5b*{N|+Nc_@5rx|J&o;j2YI~F$)7Y?;8WS zOkNFY12hRT(5xpjfQW3_#-o-}75zF7Wm=$i7`{4DfKTfFd{g^8K<)6AO8~GGw7Faj zkU&K=pLqLqdaQs44{lR$AJ=MfFJ3#jv|p%|oL4wvROUOq){ZHAsuwM5n$yF~9GzJttjK1se9+MJ`o!}?afa{myS@yGmgWQ=pQU2avl7;Gzr=pY$-L#@c8JV#}{aI#dgg`$-*#gwdTg5kB*4-$BAwVs?=4a<1KUgpw=0gvjvn%1vM{IV4%dV; zZ5rK2K5gp4p=~&jJz9Ewtu@Tx5lpXCzc-^1d~fF}vZu`o_chpM|5Lrl->z?#$fgB1+!rx;N0Bvib+pBm?Ym z9+lQ$8M*KQZVLb(Y0|kB~sIUoC*bFLc0Ts6Fn&f#ro^Hb_kAFAO zUB^~X8Jt~dmzG%{(4Bd=YCTZ9V{K4i;GY9TfgjgjqNAtI;rH4=#LpAQxZvGOt}Dg~}hkwdbtj?B7V6(Xut-wLXV5n6chII}t zdiv%Q$`=I;pkar9{zAx$lu=4{?MLNTTqtqG7(F#st$`fwdFgNizqnLRLagGKa1L3( ze}^iruBsqCPZCktWxOkW+3eesT=EMICf?l;HbCSSUVp^G^%)D7piBMbvitxTRlfj1}Wl`I+)6j6BW}Z^aOAH9pFG zK^S+@X&Mzv(-EFOvBJQt$g%&_DVF$@NIaqjZDfNIZ;Nga8~5S&{3CU)j~K@C2;=Vg zN2k`98XNYXHuoi5Nk3v7sUv85<{vE*G3mL8cAEP}r-M=g4d~Dd}E2UaKd52$7-n zX_q32mZY3|e~+IyYTi#=HGO{*Q7Zk;Iq7nHV$7LFMtJr>OCxx1Y(K{%;J6_KIhR^S znFZPw^dy@~5Hnbn1JIW#e*ij%i3bS80PyK_L+(y@Su!sAq4bteld5O+qx2+RG#Sh% z(tp#>?-~jdO{nSKdGckIA2q>0pZ9)s8C?0pIQTtxBPjrbDXDS8ko$g7?9B=Rq4t4; zuVxwKwSd^GifwzC`)BZziA&7U8S1F*(73eK5$1ZMXSY?*mohlK(yHm$?uP$$mVmM} zrBghB@9TIIzii>JIcb{S?LdCEzAo=2y55Zfeu9WDO8a=`9Y-Cez^CMkzXe*Cn_H{5vR zp|3VVsH(f+8WxcN8Dkom+QWP(%#i1-kCW#tf`jL56K{Mtta-|(7L->?rv7V&w?zk}RI4qD5GIcXh9tC~fjp3dT_rJJq!GI>rJnmpHn zH-2PbOYxbfo?fJO%9N{SYDBDdYNf1ds(q+f#X;TnK544m(?;jZ&ei3eW?@)Ia>$TYZfK%^YyK##=^5Dv>8@}qLdbcpAd&_o; zf4S&ei2zh2*dBIw+pe$&1cDo`T%(Q&LvX~s1$x{%^yRV-ix%V{l5$eOz4m?gXS1^f zMmBGj7foe1TTFI}^RoMWDTc-~nfz4>6)97{)|<40pQ&)Om?tIbJm#rYa2YYL7p^5t zRuj56Z_D)S8T;IJ6&@sF#zTflTux2QOG8{vL(EG{Tuw{OOGjKzN6br4T%PrJx2=La zm{Q(|aZDFixUol5QMJNAsbirr7EIt9+j%PaHt_xH7$>vjss@I#w}G>K)HP3s;IhZa zEj;Zt$qLf7#QAR{%lg=gMQ}k&CDpP?(lq3A>8HPSKKnGr@m_rSKT2Cpgw2~qY)DXS zmr@HKg)fl@_#BIb|11>t`Vey|D&&grnW@|YI+4r0k4@?_mLw&6&ZKWIA$)XRYnd}w zc^tEOS31m3Oz!v?Cm*Aa#h)hJu2L#;UUV3u;9w{q<1o50!esaO4N#ueDtV-OgeFmS zp>?*r@AX~n4=G{8mnfzgDe8!<&rT$+6H8m8&3;`SrJMG?f{g9+Im#KsUXoMKM)|ai zDq8g%>c#ciEIiXnMxR%#cz=Y{duW#}6e#;$7L2Itcm?Em;;B47#=EW0SVA4Lx#FBb zDr6Xyhg;dVur7M8L+pbE{svM4bV1clBwzjBTKq9$=nIg1(%by{c4tUH}O^fpo(5_swN>`nSKNUyY=;P!200X9vubrKZ6y z2O^ZYbP;DZtY6jn`UeO{g9P@qd|ox+T57fX-}ld-kCnxpSIxP&s%m@rcqe%KKzDb} z5vztnxzBrkiIcL#pz5i<@~!q;t^oMGNw)k2{@9xXv;4_NK`vVkl!_Vyq`opZmVGZI zj>B;*{y6uOD$Wd1@t!6Lp=w`^JV>cG(0$|4ysOKS_ED@!uZNKLhB&%j)kg9YOeN%F z5qAP6LHBm(r-z>|Iw;OPIv&l=#ugTzV)SJte-t-EbA)h=av2k(x2t3#w{&k$hGY8O ztYsNT1H4OUh$#JYPD(Wvv6hX_x@;-vrv+Gkx-O%3pvS|gh(C0)7?JI{jGr3rT%v9L zxAZ=3Qxk8Bc|dx5aO!>-ik2_OoaimVgP@0D_?a*7(Gwyr!|oY=x#3NMpW!V%FSl}2+cukoYx*rtaW1KwFTkh1`)mr!C;TY#xc#F!E`i# zdh*5~gip&LWVN&gdsN3Dq_DW`S43&quX~VN>ddSigQO19%63t#W~*r4gK8ZILu2eS zoWQe{Sl6keHpXo%w;s_S_9XfVZ^c<$NbJs4zILjY4N&x#4a}#=XTDb-$)!Fgw>w|7 zn%T+#NhwvthZZDekl63w{Z?fIoJ;ZIQ6jgR=yvC{f_CTEm@`}Scx3}-@1@VvI@2Cp zf<$grLBa=h5}=JT=*3+Efn$&`nrx5|lL%6RL4sF`K|Oj}~CWi#WeW$Q>G z{+TS|llCXgzvfR`Yu2AMX#;EZOs*=nQ?^<*O)~w2$tyLq+ixO1dO;!*?BsHeV1Jn* z@-^7ZmdJ;htz~kRPowydwFdEcpcH7tdws5YgO8fZVL~Qqc7Xhp$IjMc#*DpmHwuEs?ODbT3Gtp zi|^mks)LEMJGVM-1d>dhTDVi%JGb`**JhZtcHEXUOrdTZ{SQLa@Dt~_50zc;h z49)r_Zo$j@=6ag%3yBS)`NzQ91=HH$Y5#ik2N?X7LThVjfM)` zrcZ#A?~q>~ZvnMZz)DyN@2+2!50$?U*So)N)TlD3SyR2qMygjAXm7ylA1YBBz_C{b z_%`DCorYT$ccxO>)>+!*Z-M3XU?FX3iin%|PDg>6&@o@%!oQ?D4=-A0vL`D$-UYJ; z$XbV&$U6k`?kTO*p_o6=Vwvl_RDA|zMs{eF-tDfAOgQ=Oeha*~`*p_~of_R0hKQq!{EJvb<@BY&;!Uec$R z?%Wg|{=SPrc^d;Nv?{>BHDpgSaJk#%V2v1Ac#m@SZ@84|+x<>`nAArg(O&h`*kG|iRfxg2Ak59q7nVh{r> zeK!eob{Y@%0E^j8z??}LC}0fNPk5bzpft- ziXo0mAdXAnyu|^A^$ZhIS(+!jlM(2*|h3Z$g-4@V3v-YmX1E` z7GNMSu$fm%ETACk1}Nb83XC1V^-=){gADgWKMufj#X=OKflUXwDT2v&@PwH34&W z$GWjnxF+A0sP=ldM&$H%L?&un&1}w16Ry)*E1RpbN~7Z|+NE`0AjSB_xC8u0y3YoZ zkGN*x;1iUa9YAMnwgip^#uNh2~I7i z!@kfsbxtG3c=hDx2q~j$UjV#6qal#p>xDR{m(Nk`lE=!SYh&wh4WmOnjl>m$xa8>L zcEtW`l>-(ZtJxwt<7Jb_IYpkDPy_pgsfb;dGh?Rjzs>)SBUK+^ z{l*>MyVUhRsh3!wUDjstl%o2Y={s9mC4VbL=DR9k*Y~98b4}sP;-hRKSFRRyvRFh$ zd@H;IPrUWj&LKX=)12Q4*Mu3FZNTY=E0pHo5t2TB{vK{(eG0GrvjW(rwgDmNe!%GC zBcvHUTKH^|Kn>)%FV$v;NPuB}lz>$aoa!~j0mSlM9vFcgI_qdS)VL6~_tH%SW`~ya_P)x{u!xKd4x=qVt6vBHmCp(+3HDZ-yVp62kTayMZs=5*(>EBodR{refo{q6vC0 z*VHPw6KzS-eb4r3lAEO))WFhYU#56R8L1|c zU&UE^m1s3sm7LA!*!~+!Eh)|~>xD*XS)wwtdquJsdjb~pjsvd<_IG9%iyPCA7%#m= z<(oZ}xr=v$8?-CQgUKM(;ZuI5`1P6_Hk^!?=?jg=@|*(lTK8B^@pp~$rB}cit%rF% zy2n}^ytpx-UXHfzmQiGXOp^0 zG(X$3s8SL)H&Tbs#*vRtfbLa!QDyp$gkPA~Fa{gFP6xT$#pn&@Y@~knj9~3we^)~M zQD*FCnGpt`6u#H|CA+HnD@>&iR#*GoO9S*99DQaaBv z(`nG|iSS2Lx$rd&tkxyMiSNkpN2y5gN87pZN2Nfc<9E+8Wk8MxV!9wTkl<^|zh5^{ ze7`=guw7g+`H$wF5q~rUgkM^%H>tc|Z_=|`zl|R3s{KTK$ALeJ1v-iVog9KrB)ISg zlHHdds!zkOK4o1U{dgIE-*=PsVK^gp6LPj%z+nu05#F6UKNsRLShz9UKZbC^6=qn} z8hRiLkQpNCFK0l-2HC#BPyURGdiZudT*EVseje`rRcjH@%0JG6Fb52Vru!QyO*eLM zvXwyZmA}{Bowol24t}0^FYZV>Njn`EWTXS9q#oN|he?I_nTbpH5MAzjmWZ3L&DJBc z(e);WrH9rA9#{o_Qy+UTYaoj=c@;5p9QJ#Z*|K_@oXc(yB{k}f;b*AJal`%F#^J64 zubI;Cv&wizG>Um$VNb&-o~i4aB|bSR(LEpzOQZLcwM!63$s@AqY1jt#pmPP4wCYy3{2Z?aDVDPhBc~N!`IrL|TcK(=R z{G?)|Oj*UrB*?{7!{woAt_$T2%Wp6);Z3=7xx#YDD_eWDuNf(5f-1EQ0RynlpQj!m^mp)jFdtj)mhsjiN~f|dzrxd=e7TKG@lHWA*z01mErmk`l)LrA!ZrhU(#JJ_?B-l!Lq)3FW_GS><{PDT^ zC*FQBv|ff)O#KjlQgZ?jkO2W~##QJCGm_L$5F>?tAnTfJJ^%7Y0ryCu9P^1{X;P=4 zJ7ds)8Q8hPeGRb0yJY^YaKBudQ|_&3nv>h*jp#zZ_Co&;*6WUfYdDML`SqZ0Ow9vC z(H;*NrOYh{Zo%}%pYezEH3NYA#LK29QgLvwk)TAgg9}mmEF(b|yl1UfjVi`|AIs`q z6sLMK4CVQjSNd$}LXzEI%d7}ZpirHb&7{2W1lhf+*T^-$dDMSN1~{deE^yG}a8;zLV-#jEj$uIo9CA(?4>!M{=nhL5xIkthhm(L~*>hK>|c^B1CZ#M3FLo z!=D6;nG-h$cd{V**%194h<+|aKM$h+g}X(NMLz^HnGv&qaTv>9F>u3_Nd?Uy;sI(- zcTO4<@Tv+8BQ6V<2a00kC%%p7RNA6PXO4s8mi-4L?9khya(3wNV&L8ZQT%DEUKuxV zQ9IuPE7g2^w2DsD>8sp6)TyevM#V9Zf7NKct8ot@B8+KVZs5e{Vv*jR7Wy_&sUj372 zwI4X<2%ZegE|9m5N3P!n&mbyXeL!8DIO=0qv&B29{if$$ zr8Iz$DRMT#`tr8Xue8;xENalNFIXoUu;D#hsb<*U4qBelYYt#;d!EGNQ0gbQ;2GFb zhb)Ymu%kyDM8{l}N)fh}w6{BiG}~Qz+gzXHOvz{#A5aU`Otqz2PKzl&)xExaoV@Z` zsOa7)P$9r?>VjnYlY^jA!1WN>_8MEMp+X$y%G0;gd0sIH1DUy#AcF zXPa5UoCW{C#K$x|yZGo;khKT8!cuV5s0BmWZXnhO^(6oN(gjU~+y(137AD!#Puue@ zy500e7i{#e{GSl}gz7SUQo{|>gt4NFhv_nX(reXYk}NUhChink(SoRycui?i4@ez+ z#A0+MQbsP7`zj;97Vh?|&|FR$U9RlUD1m>Tm4Rqoa;sM})_-_ZVDv)|*q1~ZNn?)r zVzVeK3Tz^{l#xez5(m3FNx6ZdMKqT}rW-|BIC~PVw93d2#h^S>U+c(!T7+@3SwtT) zs0C?d3YOl}IV8V{B@&PGTtMS9O?JeoXyt#5@EY`>X!RPj8HX%9Ml_(is$_NY@6*SB)Jw`qzdv{dNj#L^zMtGo-LZDEMhtt6xVcvJM{10aOLPu2 zF-b{E=zaK>;QCS&7@x10p<-qFnJlp}`=%P%G5U=!lOnJf_G(B>IeNJa%F5^R+^L>> z+{87bAX)U~#TUTB@$30JBf{n%IJlQ+EQKnOqt!qHABL!0`;$4edqH*bE2}Hl74<5-E33!e z4jA?!0jXHlS5`Y;j)p#@&8G|y&8MX9elY9J@N)Z+5J2U%GK{8iWp&4WH017H!?!uo zLbU^Gle|ceKKoNu@m^WkHqA&X>?6lU`cpW)Hm>uT?gei_hoY0K%6_1w#_0Hq`IKtT zG``$hLD``X&PitrNq?qJCg6&$wXR(1mhV^q%RIDya3>?%OcC+~QtLmj8|T>mydItT zXO_PQukEoOzSO$^{$BL}fQ9G+<7QWYy<0}hEI`-#Yz83s@REad1{v<31G9tAo-LR; zH|>fj^2_)RXQ$;b<|MtP-#NGK$5b-4HJl8Hp2jxXYZ}QutWLc?^frC}xfaI#WZ2_b zQVcQI8?jP^syFA~1{@Q$AVOQUAXF_GV#?b<4KA0mK$6{nS=2N z_csj^64D(a4T6MH3kXPvbeABKA|)LQ2)KZBD&5^BAxndFgLEU^y}S3hpYQMaV_tLT z%sKCK?%lm}r_LECDC)2yS@>-E(cTW(1=mgE%&ViwN)2%0i}}}U*-vR(F!RMfDGXG} zZQ8K=eTnT{x2ILeeA^4Q6l1z{N*Q|%FK-PzCWY2A{zRi0`?=0;Z2b+iIVZkXfexB& z%a^Q9f60Drq(?ucd4mXPW!l_mmgYxI`!hO!I0Ia>@?z=mZ}LG0*-^g7z*#iV{O|Hj zNpcUV+qZrf|F5CDzuG&yLu?I0-kS;^p;0P+eq}&&G>)y>%6mg!9z((#yJQ0oFL&Us zei+uG;`_O5!@GuFS zPnk841LztMK>IY1Nwbwng|b8S`(BVYsXJemAyo=?f`$WhN;P@RiIm$6iS7dOX&2n{ z$$hF@Y4wW0eJ#y^o=z#L^R1I|%G}%1raiY3R5d*R{!Mbpg3ob7t4)Iu39{9zvT~yv zx1{B{p?*b0ek~6RL%xmdty<-U&p+Cwma-WZ6@umq#uAk&ISiJ>om%EsCNp-dL4Gi) zpum4!n2$&?NGHN9nc3u+pBm9%Yn>ahAjPNx1-Ln!Q&o#tG$-)6Vkw+>QIThORP}Ru zDh)|mBQqD;`eSYs!knonY&|!H*LnnxTjn*1QdGz%99%x5^7N)0V;aIQ`b%lGtoet-E9mDVgagD!ZwrVP-2IH z0(=K0&sd|(F4{?kd{H?IB5;&W1dh_fzTlRKXdAq437$3yzpbvT4*MQ!@Y*akmROwF zLbH0Ih}A*avwGokTtmKufn(Hw(Ii8;M;4e-l;dQ4Z^-8{AQGqhkHtuc?Th4We1iOJ zydb{ApQmvRR=>K8YRf>@r?tt`;pOaUB$m5R`5rMOHdry-L+3S>Rpo3x^4+xz7@enf z8M!tL7`c{#`L`~k^GAyR2FlrOILq00xy#w-l*`%O{>r|!V7hkp7}-Uw7hEJ6fqe#TtF zYeG;lQL`I>UkTvQa{T#+!NuSnutC3?d7tGD+-Mb56y=X%e=FdRIf`!?U1W71a#ISc zXV==A&#+H){9)hbo}&a`}#<8>(#>i34h zphAYhYs-A!(VR~M*D*lJ{Q#AipS)2V(n+SAzfqFT8iX~tdtRs?tic^VlZujx%Y3BL^33vUuAyotO*-jE$UXoIO6Or~JsBRlZ@ z)^i^a4yK~P`^??Zn%6b$StYgYDY~;aCS=qS)CUhVAY(iC0XPfQ@worK9psmmwe(+A z0h8WFv!(1t^9C)*LdkvbFa_prVEz@v{f~X{KA>vw9>tJ!<&Vzx#bEY^Kxg)bxTIju z_pnE}iGZP5>Eyw|X7DjB zVcE}k^$z$eA^1*l@V8FI*A5NI(~n+S9*0Jb*^_huMS{8|UVkvhx^Ul4-byI_>5d;- zsNKc4x%e5eu0J#t<#lnAUZBE?=dz5~}k?p8;OyNKIg za1*8OVO$!Cqf-EdS!Jx`bLy2Q#F0E9F~Qium}M85Y+J$Ipy}Ls$=EL}FNQ6@Vf}jk-Ai*< z8kh2u&SB-6_ZiPXB&I-D6#IZC!BA`CF7(1xi^FEeO6@EN^vJe+%f1io0X=#BO8_ai zEp>1y4q^Vkb;m5*8rM0q&oMmd3y9jsvg4Wx&!{VcRYIt+T#C~itjGpS4>XqDi{il9 zXp7HW9D3R#oqxOCI5fQIE-fCrJ;)Pek=37asaZcX3_m8sue{HLr5c&OT-T61l6i3x zzCyk|cLBt^fSZs)rqDxO6>KYd-Gp_{uESt3MxLAF+An|?f@uL^n6us z-@4rHvm*3Bo0Fguo0jk;`}E`XGIB^|2RbBCd@FpDc4SCBB-;7X1M=M>22XIt^M{m3 zZsDV$@etXd8Rt?en}D)!r!Esu6`ZFlOgU~<3OF$uDAa!P+*`&KW2l|uM=e44jNBc! ze(w*$_fY;f;=Hb&5jI<;zIQVpW*RuF=j14pI$mGE+vn@C@xd(|Ebn!wACp_<&>}3iG}?Ww5O091(IOhaXs#8Lb>Y43u+jY!7#mGSh5@w zV>vb{Ul{$PYT5;QjO5p8uRDBf8;v&9m~9pEZ#v_>8t)Z+qA3;d^c2LIcq6Y+UU63_ zO5L=IS?e+pNOt_WxOvj2V)p%(<16UrZFxYq92K*R0z%$8REjLdSM32R4#2wM3%DHCw)``q09098Sy<68DO5vh(~uB#;J z)+u@i@3zJ=!x3xV?~-TaPPn^B^n70WbG%NpS>UaL)tn38h{RDmZXsv+!eeGNUaMF= zrP*FiI)9|RW%k?De2B{h3_!1Etr*S6F(P&WFH9xq2L?F#P-7{ZaGQy1!^#W2fqY#1 zcMU1?yDSh6@wsxXm4|z$E0tX+?6x6hbE76;A^r2mzWe4#Z7-|_9pYcNxjPhGV@szH zs;m~~ohmgpIjI{qIL0Q%w?<)i&_~Bio8gyHcv*6IxEXq2FZ<6(D&C4ceH8p(LNey3pV|uEeO!VGPH(ULEi5`+6D^N1U7ezfOKqZdr}`n=R7-8?W76 z9ruA0cP!9rdsz--W;u+EDpjQ~?=2t%nxL5?Gx0|PM@|tUz4mwTvRY)t-rv8{r2rWN zh4vsyU<9}+08R{n3zp70l#RiY2*C92%=%#pTVOHLQa0c(Uq#Z-z1b^<4+8GdYb%Z7 zyt_9mOY+M#2^0Hg(>o?Nbq{GR#NwZ3{O6m0JRZ6hQHs`}qs_>r)b|^iUdwk)>wS1) zQiLAV&RJ`C2% zpp=q6I87wj;hfZH$US7H{S>cMyD6H#IP+MAKAX`qewMe#`=swjrt*>V<)JMJtxtY4 zfhV}<$E`khy!uvhP4NcUSUp2Ypo?v+y&yaO8vnbSR&{5Ee>6Lfa1|g ziX>V!yVm`C_591uLZ!X0rg4+Fi@A8t)m1}Ui1%)&hUkAxq-(Z7O5S+f6XER&t`pZH z^qF7hwar?_=9lWotc0YvuL_iY*??*ZfVXReC-1VI0ThTr$PlBk&ac~`zEq&1-a z<&#xnc8u_@m0|5v{Tcjx5qPW`#^)crS`U3&FIxtfH5>_S&%0WG9DQ4LfBmg7np+bP z5e1jIL1(RkUh$=n1V_5G$`|izMm@y>4n%gLTUW&SJQNR`6jXkHjLa7@GgN*8{arr_ zrbo(Y$Oe;MVDdN+w`G=o+Rv!W70t<~I~f12k}lj(WXs*H-hnTdRHJK@5YAP^Dy8o zeci`)uiY@YkB4kD8^0b>xHsK!A}p;lEoml|)1SW&iC?H_Or4c}r(AWB@Umvb*=6IT zA@O_Ghu&f0G0xeWpAiXWQl)SCC3&p*d}V)F0y@K)sN#6GBTDufjdbz*e9K64?i~-P z>7S|H@H;hqo~aPhje5wjW+9Ku#`hTQq#wRN!|NJ3xILw)@`lYMWZQf-e+libv~Mg8 zLnzD`JEf*67BEdsNvee}?k+a5{piJH@Wi3g`h<8=Q0e9GK1UQBrOo~-G9RXGO$F=_HTd&?n2g3NmU=@leyn6aOwsD`W7#RXGIi7fP3_j&yvCu>*A6}#A9eb9WndK+g5o$Oy24NMs;V8pz4009t|Kz2-ev-Doxtxqc3Er;uvOsIx(_eCSv%U&;|KF*H7Cq0`uR$^PTxn7T3no_ z1yeEksxY&FsQ`cINnt&Lo+vIvIAzIg_?p{H z{+Mo>w7wL=WM?bV?+yf-{2hjSI@LxvmU#M<9 zGyt`w0xm(_K^o;z#9#XWR2MK~x@$2|Gjvd)O}`9itlshKNa|HZJE-&0qwkL0y!^7x z9JrhLRpu2=AZ_Kv^QZS==7-UW=})FU^RNcs%Ojq`ZtZMk0@b%(k?0T*eu0*MOW?oh zK0|vSh~D9RM2zTIS z#^AJ7o|+~t@DK}>H6MF99EuYg-yf`0sKp?6J~iGXNR2>#{QFenv74F9gBn}fU(xmb z{Of?NF5o=+maZRkjFtMgcPrXdF1$Kz_t|f8$8Gq?-f&|vgJOhqm7}K#xcOxB&G&Q6 ziI-1cT|zH)%v)3Y&U_hJub(jAZpOD%=(QO)QEwEzi?7c8eCU}@>DD4|`AE*vm^Yea zdz?aGtN5gF8rA>_c|;iOmXGu-jdi0*p~vYvY?YQ2{))xVSH2Ac=0!nrroy~2Bh$mP z!3gIL&>h@_lmCT!7_~UcwL2CPAFdj>0!4y~k-#|>l4@%Ym}4TXFEVz%kxS_+^*|wR zlU~M13Nh}+O^tAto4M2;tYZ5V_|E=fJ$Vx$;PH~^{54)hh1-7$UmP>&Em387lY{s44{n*?y7bj%(>C@l57g;}VDWG*=FTW8Yl zSnF_xtLWMydD`8_-$~YgfdN!SLV3~^C+QTa?MNKkdp)HU7yd z$b{nRTPEl0`|YDbuTxbgWb0-sXm{diSzFO0%#FdY1O_Au3Yn`U*G6#c@Ak zGb=L}76ge*#wLa(3&cd0JgMuM{w)x>hs258b4UrNIU=;|`zpXSyMLu{u+^WWY!0ZW zOWL-?yXzFa-4izYvx10>*FK z(5NMczi_2+*>QT&YuJ4q;pzWUG2M}u5k=NJk*3WYf;h7cTu!5rVT zp&h-!bA-W;l<*vtvz{y@kSvgsE+&yIP?9dDkStJ>F4B-L(vmLHkuK7cE;5iVGLkMb zkv^cYUSrDrBY@2_;pM0^Tf&(`+Jj)ouO#<)tk+NE{_()(%}Hv;zzW%(_OCD`F6jda z>ouF)KULU#7AV1D31z?*+FeEYQ12a*`IPs$# z0kg^1ZCyyxH#3~nx_P!=j?(FA!hOZjS{2=+_9{q|eKcYC+Q#Cto#+B5zL%4Z@eM$w!~huFBCVQ5I8KSa*53C{&H;@M=OOr#r{TP54da09RT=GV57_~0o1NP&9 z{i}MWxo_8yg4&fl!iN}PYL)MRBG{cTLHZu|eEKKu6e5mzf4CV@cffk}ZQis?T@YSo zqmS3sY(^S6hFecI8pBY4KO|0ojyx~u*mDoDKT=c?$^R%9_4=g5f<-l?`Y(aR+zK+) zgu_4Mi(A)A(hs=&FJcF)W62ohWOv~G4{Vg@@4H9_Nj zh5Fex_`fH6y00Wi(g!=9&U?6m^Xfm&pG~{3#3Gw3jQ?gikd9ac$4E$ZW1YP=r{-5e z(fOexKHRcP7PCJuHhTa3v-Gx8En*ykln!{Aq-zCC@g?vY?#+H;nul`T0jzm~0Q6%3 z=LqWe0_3j{*J|=68!85Ldll;gheqIgVGcWJ(?LOe9H5W(roKJ|F*2O@mYh#MHOw6A z)>+kPr!OrU+mpT0-@h0TA|aZBh4E+5pTMr_gMec%|D~YYB-)J% za@pmd+sGiVQHdDsAo9iwS7tJh)wiu@wLl6@@%7lT7+u4ogTZR3!@`%L;X&^h{9!=B z68R<_ptJrF|6$?~KDh|3W?xMfxGhBHv8k2=KUo`YouEGeH>dt``0?U9*>8tM#Pq5# zjf3T1>k!7fRlU)^Rg=0A_G~$2@fjCojy#D!MmnJ@$ttuhTR>;0#R zZWOnib6~(D)`zAYeZBjX+M`tsOVk*3$sH7B2^_nf1Mk>V=$tA??xzG&~}$1%K`*YY^xLKR!=4;9+hSKscksKJ8UKobUTx{A@mkG5CIY(EoYRiy~**rp~ABeo#?3`!_Q_ zs28>l1I~`SZi}9ew&AM{~w@SQaiRQ`n3Xj@-4fR^3agn`j?KLc+)nfH3gykL(59^ewXyaM1OEcSRMmD|_tW z7OL{OfXfkT^@FL~jm{o+VA9`Z;cG+r_`Q;OBBBS{32gd+D1&zE^_rz+_*uy|ry+qtj<%Y>;H88yDwgJ(|M{-i=oCghq{f8 z>DO@!1)RX}V784Gm*+d?jqdfofaGnyn_1`j!B5SIFp(mLd(>{qs@It8n}Y$$b;oDO zH_(J*6R*X^m6;~pq*GY**a2I*Z}+m@GQVq}SDL(pLY-oB9?E&nHc(=J%(U*8Enw>n z=V{GtVjwYDr6Glju?Bd`BTz6eBD{5R=8XpCkb@xDTnI@e0jn9E+%IXESJc0eb#a!B z2DzStP}m$HX%!u-nS^{>04PFwCeFIi@HFHg95$CoQt3cCWVY@Q`M^aeB_h;rbO%lC z(Nc^9NKH-VJzOZ^iQWC`3@WK5aD=sk;Z8jJJjH50Ls> zCin%w_=0$K#|#*f3dBdRmyK1WNPMVrxg0xnREXHhh3ZaG>RJGzLti4R4k3m4NQVm` zKnkc!o-9FJAb>04M;}qTd;pYU9x{sDGR*e_+tatoNEyzeFy&?B^cUdcc zjy}XlEw0jdB0leq$g=FDIWBHE-R(=b#pI~>#@OHGxJ~o^o@yANB&UbrQa?PyKU;p< z9C4!=To@>xxy$bsBlK&Kn3VWeqe=3~YoUtidBrm5sm0m1+aS`j*K8ZO93jLpFmp6g zI2LOFsobrQ!sQoscq}l@&07mKMs0yR{UWx2i*w+K&;`%YlpTu6bR#bLk8~u!qX5vb zi6D39?wz(NCZX$Q_f7@iz|k5oG@Lbo*8tSu$FK4xEYoJ!A~UJf^LXdeqqxZ&{Wk;O<~&oPU%H0o5u~ZkfGm)>nu=;a7+w-z{r& zeTp&f+eG7bQtqao&06gie%NVvshp%3)w4TBDMD8C3vy=gL`;ETK3mL z@3*bL4!Z-M0wZy|OFV;aj!i=f6#Zq`cbzPF#7EtqkvbNE4sIp8ul7e*!u`*JhPF)t z{CwgvvTH{iC7W2klQG}CX-xb_PoouNZ(Cg z-DVbA4Iw~0;Xl=sIn|cmDGOYyf(_fCcRHbWdZKqi(L2APcgCz^nB!nUaj>$&2oSXV zr;fim;G{ody0HVIy{HKY{++;vch@oC-w6DeNDxdUFVRT6&`5rv*=3xeGdp82pJFgO zV=|v&GJ~g@ono zbz&pH0=-Fxn=|?xSc6OD%*cGoxSXg%@qre$G3j?DF1EUH9qsejyA-1ls z{);JBr{kohkcT2?P(xwv5-+gYS$_p!zJyxqwTp_?;aH;|7XxLN01T${YZNFexCfGE zhCHzn8L8_{ecqpNE_aH(p_Mi%nqUx8i(|~3)7<5^C3L5IP?q`?jT2+OL3BmoPILu1 zdT%>^N%#6|#WBtp!2=-O318G_q*$LWFAtA@KIeY@ zu#4hIB2N48FB|9?ubETTf-gl*zGJXaMQTT@NX*gj5aBIw6TL*#2p4y1*^LLieVtOL zo!_Yn5&9x^Ue00_kj--Gp21T7Qn}RNvRL+&mV*XGFLl?4CxoP!3pA|$NFw{Bp6qtB zRg$Ako+#8tdCsDfSpKpn{qxJ09$K_@I}&z>*PaC~Zr7@tDHU<{wW2D%qYIAzlIk^n z#?}nRQnz%*CO+>UU;Gd#{8wNx7I82%DZR>{Q0OOWzTkNxY`34bSzvdds63T*AaEq6 z&UOS*=GqP+4f}TYr5lZZ^tr|pYwG-1I>tD$7lg?(f`siZf1Yo3*>#UsNedE6Alla_ zOr2=MoFv#8YEZ>rwkB`#^9YR#^4t&d{YJG)JpMhI6!b4tdXamP|09Sk-_UGtvS#E< z*rz6w*VpPXw65<8XS1#04qu;FhF{iLh~+DA ziSC7b<|ycueUs0=a-zZ^I?tQo^)c$ph(4SyCk+`qtMj+TuPp zJd7sBhR&LjNMyS4_EJF7Ve7{gNx}|q=&eow-KI81Pf7ZKH2K z`VphWIGz=I$Gwg29WrdgMXbbO)voF-lNxj$C=3FQ5GLZ53u!Q(LO^ymu;6EzExE=H z7N`mmafg#ZWO7o!jPt;Xkl|e}Vgr={zpV2>E>My5e^R`x&gk3d;__=)gP4pW8jRUs zBKeRl$=oU9Eh7QqqhK9WiMZ2Z8jNc^1(IvLE4U~SKM>TO0AjQ!Xfd8vf;WN$3$sdG z#Pgg5lJA?q+E9@2&keIOM>0@aY>j>pvz90k7kv2v4Mv$wd`Wq4v?!ia(rt9G1yN1J zoe{}HLT(Vb{HiC?l8NCx&%ctxF~(dw^>4Px4w z1nqJXXS-mxgU>buH0KL=|Npg3|5Hh<^`A;VrT;;&?x3qr{lb8Nb~k`#J_F4Jr3JYD zlU~XH5B4_Uf3TW^m^;c`#N%KZ0DqtA`TsM4=YPO;+rBOPKGVShxD#D>YDzUkv6ei` z&srF4cbD+}hR86ko$=~@1I@wc8nko3?+90p?-P`gUdSqqH=+6yK;_kHgCZbS_6`Ze z>>umaGf%0t&~o*QmKj9zi8sj^J=vc3<$|mfH60fxdaZZ@0>eYx)XN-V5CDA_SYH=A z0U~s`OfH}ey+D?$2Wqt??o53fKv4yhrUUR61tTtC?zik6FwP#S7>OzWTaA0%IRRfdUHEhd zUpCZHr;@f4IR!jg2o^L{P!OrcZKK_VC@OG0b>04Ikp9mI*rNvGb90wy|6+Qa96-)k z^ilc*yHN96`6B4r0c81Mvqh*%TjdYm9Ml?j$pdM#E1p;K*xkGA>FJWP|M)~fJU>%= z8S#pw!sJzw$bD=t+tVB%vxt}{I)Srqck+I$)6FGHKSUgN&ZETms#nBR6aJ;54dtd! zB~=NSOen=1Ky1?IHt)wY^^5?T8}SO@Q&Z|JbQY?1u5JwMDIu5a{~%6GYD}QKFIq*M z5ns}+RerK@osE?PypJn*aIv6wZ^{8HX_XSQXVOf!+yy?K&8URC=LJ49#=s+Kvl6r+ zKL-9&&$n068x!IGZc_IJSNnBh?jVIe-!Y%he2|p6Xm^o6_)g3@Q&$C8waJuZ`Xh^I zjfa!KlB8@2V+DZKo=tv2JPQ>W3cRkbvc9Wx_Wn;qjtdmw*rZZ9J3N5U zjA=yJm;--q!?Yyl?$uS!Pyq)J;4xw!eA1tgS;c%m6Auzm5K_&iaiH?f+-4t$;Aj+! z)%gdw72RdkibL|MuabBUbxZf-TgBu5P6aHsmw%oZf6!GHFz4U&st~KvDQOWeaGm*c zak2#{v(wy(xS_3OARwiKcf_Rl*Eak>6~i5rKsgiz8G3Nk=3UMC>HmH_vlsVzXIARg z`duXPT*0j4_+=3#QKP8cM4zK=r09W)5!M~CE$QWcWv_CKquJ8phn@{9crN9iU5+Lt z7p7^}?eSB}kNyTcV|YS0Q|ocY>h;(QsS9{Za75#8qWAJoA7~gzy`An1|2}jeK2;R` zz_UsV#*-IG!?PN*WQ|>Fk!OCz2b0k{&Y@xg zMJdO`7`)ZySGe`P<<^JlF5r*fZyOnrp$SA5 zF97Nk$3}prF#`DOTLPH+@2RE@=&tV+B+e~_<3^|dha-Wlh6rtR{SJON63beX=) z;5vBvS4+nD*E&W`Kgy~w;YdtJ)w#>boJz=?O39o;WG-cYbr9E()ZrcAti?D7skiKZ zuN2QK8~s=M0@bkp9U5I^dexV91vSb_r1goG_y?R0IENk}iWbU%$fKq>XJdr4-Y1F* zF2}2*gjZ$^iu-NXJG=ERk#h}(a{~RlR=B2%-VyYx%f~Gq2~_w;S&JInOwaHbE`8k@ zeEF*HwlleprfWKOvd+y)fY8mxz-;#d^lzRE!L*W^w3qw%#wm5)1)Zv%c&0JbUszhG zLdWO&_qxLNv3qlcGhD5HW^?1ug!YHwlVV!6D4GAr)-#(fsWK(c_5Ycu2zAl<*Ldt; zdc$BG%Ge*B*jN6_GIF`SS@`p)wK1aBMpeXcY{6&L=k*+qvW@D1L9QyHDeF`Ax06@b zup0IUqA7OZ{(Hq((}k+GG*8XdvP8S^)d)*ty*pAB?3Y*{h^ZScK;vVgp4{&-?&O7{ zr^|_HOes?5+gAGKgYku(r-oWO#oEckOm20=E_ouKAD)UX=e3%Vf&CIAaZBW4W1MPJ zaOQ+^}fVXa*rT<*;!f1{&{dPdO|FH zxI<}Ytg=$yfs)eo5DUv+**9`paj)ej8BV?iCB!aisK{xNC@CG!#KoGtXJhd$O$@OL ziVE5C^ zi#;?~k*iIM3dyfc49Sm)3bBHK*tS6-`EqaMYNKT2YG1A3zIn#tZTLp+Ts0_Us|bXr z1anc4Ed`4#{tbxJ?#RZ{+yKh|QIvDF<9hU`-|^|wqQv;v;rh5(dQDJ469{YqRuF=c z&YC>PZLOR`|nmjP3dK9EXpXf{MT}>|KV1RYW_p0l9609JnkRuUhbxd zW;_+MQyPP3qlWkfhH1^8NHL#G11U)Tk3iL61FEj^A6zf~;TOkWnTaGi(zx!lmY-|s8%<5qZ{fdBBbGm)W>QXi& z{s}nM0BkCc4xov|xi8lj01E&%Rd?P`o=i?5d3pw)LB$lql=Kl*>}RWcyU?b4kKW}h zK+3LWi+I=bqIA(s3@hMP>f?#L$AL6Dd44tSk(0eE`wu9EUdJpWgX=vaBw_tlB=hqTEJ ztVVXK3_L6E_glxMzSM>CG&}rrCJTHYcevzUjp_Lcbjus~;O*Q)TpuPL6?Z8TK8)3&@GeXKHU`7tq-Az9l} zv{NbEFfN>|?#=f^15<_VM^5o%X@Vh#BbLFPE6>?!AGSGr{?c)h%Lm{C@_yQ)8?62EK>`W#pvb^V4?w_N4W86-7$S%UdFK24OWZ zH3Vi1+-T=+CHkPQ*0Qr(3xX*7bii{Dz5@~jEk z^VRJ|1VN;I?`;|RdK_s|AAIK-hv}vh0Q+X8###Aew-^T~W8-;%j<6T~kH5Pfnonf^ai>;asaWfeANl@M{rcDH-%+=9leFM;R5)2l4p7bxCV?&9?`cm`0$ z>U0d`Xz`ft!e7>*^!rh!XN{Lnn69i%8c&|ZVQpUeP+=cc6Y7PmUchmBMQ*9qI~Rd6 z=P+e26PN>Sbsqu(ZEN(ZtwWg8=oSD@zl6Yl!PF1bi5(Ee(-mOc*y+1ZWvpkaQBRoq z*?#+~rM%%%#QT89=DV4VdRkkI@6ZDMu*ltEs;ynC;NnKBUva^5FuCf3-7?=f;=SrO zQb#_9DOzKM590G4xnJkT37e6)7-E1-RU#3gjoBO#5p?)1QnM)Fu(4w@(nQ2LSI_>Mx>kn9BTn~ia%544W{cNA3N>M~EwCNM%z3nr%>n-cLzt7NcLIu%7 zT!PHI=Fz%t(YmNvYF#iwYA{01FhT?|LtHRJYA{32Fhc~fLR_#yYOq4iutEf}LtL;! zYOq7jutNlKLR=KBJ`mw;65)M_9_V9e&We&^{tjWDM`NBxXP(Dkp2uuqsCy(+O(0WE zBvVa{s7{SS3(i0bu3#4OMaN2$werBkYsAF6#>5lF!t=nwYsA94#=;ZD#`6gE8~jwp zn>!=-5NuEyO`z$kw*j1q)Sevh-$5kff3Kwd7n|Dw+4SNLzHw<6j`Y~9D3flO<@w1w z>vU=UZhI1M}$XDC9{XtDkL(MfA= zP{VTWf!{;F!aK7|idBnY36p;GR&V&UKt+a-=P9a_(qXq)PC9#Cb+#kyNl!=BgR5-O z>P{;?wF6NAkLuz5s`P9uReZ0;GNr@(;afAoFHyzAha+V^?cLuvaL!2?^e*BuB)uCJ zO{DDyU-(Vm2!v0Vt3PS=Z;L!ZjdIrX)A8J;>PFtq+gP8u)J5Fg79hRV`rfzObP8Bj z)w2q+JbV52P+aNlKFzc1&-s)I62easpqFIYlCSR)G2!9<@RYL>04ewOVU0Db{T6@q zWIN9f*w{n_iYdBavgw_E#rZ_ZWOpVIP`z*Ha(V^8UZ)-O{_;xA`2X^7DD)eyYuLjlj(FO+9GUV1DBV(?3NFs%_dLHGZ8>EXS z3)tn7ad;ect0Nj9RSwgkbN+=}9Kbv*>wM=mVj6Sz&Tb4ftiHOzHJW-$193ZxvHZKx z2|>6P3HUx~H1?lyaf3GrP)ZL!1p_v=R?t6d+T)PK5KhJ_!%T=jcFBbgYq78fH*|e{ zUlaJgA@F@OY}hA`^anPZ5DyNPDh`$d4pt-%Rv8Y~5DwNsAM6h=J=?Q7*YByrlY*8) z4*ej35hVD4gmXd5!pV!TxbJ(c%mPUhyfGA2c8I9Ce!O88s_IbsBZ&!-++hIA-`?k2 znFYtid*g!)d?2BpSyr&B6Xd{UW4~bho;dvVxt5SX7f5)-_VnVJ%qge*jxNY&W07yQ z5()}ZgMuGHLeYPOFpxkG65K(;uEmzc3N9Wa9-bKNn(IWl8Q+(k4Zv}MPh(Ol8Qw#kChQ2h|gS&4^9|Icksi$Tc8DfL<_P* z3vxsYazP7nM+@@SU_lnl_a`h9X?6;<6n^`r7^WCf!s-n6^lhzD&#R1oFeCM|N77BU zc#%a~3c9ZVWtu%BK;QTr$h=Yb%7^K z&f-iOp1-M`@4N+1FSU$g-+dqu7uP+kbDY009leQ8FRQ9;rjApwgR-pO^f zqOamU-|7Ca?bp+-f2Xu6-e%+-tZyzc@}{YoRS|k7y?WHNX?eqNaL3s15h3URjy4wc zVJ>d75obdvcKcdF%P~WN^WX7(;g4v1q$eP1Z&j$fZ?5qu5|CtfTNfpM5xzFk+ZuSQ zH;IYr%hQSK62-Lzz(d&Dz}|jKO_hoi(Shno=@rYvi)ZSy8yKxulu9etu-G=o208nt z8GL01spDTqVQ&_ctu9|a^HQOBhxyT3=v1XN(*gHMR)ss{XH@QZ{=xJYFv)6WtgHuh&XuN$s&~ZTDgaiIF5!%h0QupN8TXjrjtaI zR`5eUYZrc81fGKeH!DIlpEXz(Bt+fpik;4;ge#U|QQh@qbmF0X3G6;ZysN!s&9Pw5pag!jjhYGjG7-l%y?XiQu&EYH91Y=UFjvZ>6@?^Cq`M(18R&?_9Iyo{I~ zb0)7SsYWZ5nea(anz(-ym)oG39}mq`n)r!N@SauYyr>H!6+a09@;v)G;ln{e+3}@# zi%B{3dsA^Wnzj7@*xo7S4CfE#+8({&DZYKv7b@?wk_-^^TfK?|bn!pV+^yoxA*#UM zK@M_JaCJWR7HaW0R&6NoZb#3I^6qOCdfh7p>qkZOcD1Dug+|>Un3@y`sqLaJN9s#; z&-AXYR%UIpNKG!oWgMEyTug2`)Iy5z64R~-u7wm1CVjfo>WW@n$1Ak_tCuYab0f`;xVJG)4@TO%4$e;n} z=HY_E^GG88l@hpdVPQNOSSD@%x>ZPC+xL9CtwbDnOhO{W0%{QN``I#fqX| zNG!lHdqgAfuFb!Gx!{&Z-gwtcu&+`Nda8=Stj;@)c_a{L2A7uWYTHcNXSiuEe3@ zhP_t2ZM-yJ(e5}LI<9{#+PoxZE#n#okPaO;Jjd{tP42SIkyHfG<}P z-y>S`hfrSrryim02T%?rIkSPp5PpL);}`%ZQNNst|afw^2hrbu_(<$XpadG34qXW>&j)+@wIQ8IKH0rH_M z2Ze8Yczk_Vlho@y=hTz+G_*OKgb{nT7hJE1E_~}0#HiSDCs88$t{kz=P$TxLL2gfG z%iDk(@ol6Sz)qp{>TOitKp4?OHU{_QlmBq4X-WM0(kD7dJ&n9GsrumFuitbKH}?+S zG`P$5gk|Dj7V2tVw{EPcbfhf?kt!k;#0RLBhy;-e(gRdg#F|tE`5`;~<)tVu$=^E` zcO!u`%;~?V|MXBfxag#~7^JwETDl825bDdYv9inzJ~!LXPXNNQ%YP7 zff5lQ8V{_VtP?m38hBqwMz*PQ=bYHp({&ByH66N|A7}l1} zv?{|wQuaD#Wy|BOu{>o-8H%w`{A0lum{}et!Tp`VN%-+plurNGydG8kt(o7k;fEU+ zx89ozPQEGofu`BJEDE-R%t%ff6j1${ zllN~w^zLjsoql0;!tM<8FLJue_g_!uZ1B52yNj7)!IEJ=T_1@wavqY4zB)PG@eFC} zybqco<67kFyyz%$_1JuiQK}}WV%P(x;34-~!DFKKorhVx!d$4nc}&?vuI(r(F^&$w zSGaNXCtWcIfLZyT8 zO{5#O)b6|{r|iWCMID}C8J!1&oIE!8-ommsT%}6ZKT2Mw$LE(_7dv5_R)Bm@ewn3Q z)X*y;4lZLE>+Vm1IK9YWuZW0wsfqcFOlJd37e58MAO-~@2E`)=<yqyBBR%^)%VszY{ zq*yr}EdXmx#0HC6OqBuZzgLW45*PV0!u8<`|1+dS$}r)K9?;`$^hD16+3CJY_{S--Y`2vLj@KhASi&KP zaerSllEY{19g?$g<$l|j2wKYvBia39#mJg1a@oZ-J}6WSu}p88Lsfb}LpdK%Qq}yW ztLz-7pJ53pO8I-m%Eof4#l{j2vpVpx3t{lk3S&6guvC5gML+suRync<;#=LCdF5Al zbF_L}?&Mc;dDAQB5bT9^>j~%4dtVv9UA&F+G&0#1yK-)*DaEyo_hBHo#Q#s3VAuNq zGYb{gOtlfkM4tdaW}5uIzirUC)HZC9feQ@o%;2?WrdGTm+!sD$CH1%hV5`78!i7 zg6V~_art%lVn^d^gIZlVaFRHR$%v` z0?zlMgASXB%l9sUUF#8ndMJkN3IkCeF!^g%=K4RaS?p4g$b2E_OZ+2_n1-PKG_iC@XOOZZstxsGWkyJ`cktsJpBAQ z;2GP=RGahbI|HrL`1$Pno*qe;VE@WNEXSoqk?WbWd$Ux|_f)HGxl~`T>5je;tbd~u zgqTtAyX6Xfxxi(W{1RM}4HFS${2G|X?aH0H)Fm|*nLQMfJh4~r7w)>W9<%3G^?A?r z?q+W=`{U?NrQYSknY`g38Rgc?`KDSzYhdi9Ta`YQPfPmyq?;m-psj@WS(ddaLlQVl zVL2O&%WL=^s-7+)M^{_mqtVK;Te>y-w!X!7l5?lnd1T94CFj?qQQFw*Lr5;leZQp~ z)oVdvHCXFIL$1#?s*?WtQWy*C)hnrUOsR8hsdHSZb9||DLaB3NnO**X4dHKNK5#w3 z*y%*@K}_&Lg787|@IiX;K{oI~pEvPIuehYnd5pBW#W2ouCf`^Xqk0v){qE+~c;G&h z*||fb(1bUR9EtOO{aSLjvgA2!F#poRI0^(fK%f%@PzpdG8U%P=OYDx9+^5OY@l+4I zvC4_@`Zi&Ph{e>u_g=|*SMg%CBYUwnX{;+d&0CU1{K?aN0HN^ucqM{_?EAEv1C!@5 zxf>gU+yCWQkZsXTU3lHqlrX(AoH)H=!B8Z>mjYk@ecDHvzR0E~VYGIH21!pxZp4+cKcrI-vVwK(}2$H`p)8>09(BT;L8|-~n9V z30&X>T;L5{-~(JB6fO`RJ`fo`5Dh*M13nN3K9E3zn#&pq$y3Ji6&fmA#K)qy_5tkh zf!y$c{P2Oo@PXp+fp0Xt0S5i+-K%MQGxR4H+v61FtOXuut|$!?=cxXTzm<0K18{!+ zDHZnlCoFXt!hOEvJSyNd`F(Ej;mo_k`CJYgH{o(j z581O>9j946zrw=Yt(vF){rvoD$>j3)XMv&jSr&+@{jL1>B4*u+W#he1T<7$2t``!- zaEguE9av#fn(QkwnkNyHrObj}REmuqAVbs{S7`9KM?Xh-@K%%k*e$_M!%v}%|0%05 z<1|Lj}b$e|2cB#;Tl4EUXPJ=~nOL3RD-&`>1;~5k! zU}i%RZZqQbINbVpVqmBI;Hnm)Ln>@0jN#)IuTo@GquM0^R^o(Fm414F@IoO~P_mcu zw=3g$Fwrwf(WW!WF6#3igBHdpsU`DNEKXfX0zwM(h+N-(-iATcsL)-lyfFJq+;6ja zux*@w5q$n~8p-sy(7f)1ob=nfU60!ISFFmaaazdt9q?+^6TwQOky3l&>O=DPe*tM#F%ctNqvm!{>S!U56b&qzBtm0jj+w z-UmUO9Cj{Pf3laaSsV;VRRlD9Yw=?NTwhgtjK9!dm`k*xi5eW1w!umbB`6-oPp{?K86qzzW9}iQTv;II?p`N8 z0DpM8bAn~ng4}XX?g_H~X1t#52I}*EgE$s(7VE?2^G4tMoz^*{GOyyZe?u3X%iA|!nn zc$xI!Ckab?G{2@NiCy7wU1xlqaymE!Ww>z0IMUPKd3w((EbaKnjc_!Rva}o76dqq% zkXWVqVE2S@wOd+;45>uajqO?T(U^YMND6-K|%i~B))E}N^XSb2#*J^kgXmM27nQKR`b12rU_|QJoSBSiRp}1=}$O4GAyHi zm^ z2PB#i$@g3$O_=aHy>y)>vodBEQ&_6G<-1cA`3B{dzu76VBP@i}4&n3QV>Q@8E*A$K z4DBGY82}z{o&SnHoe;W7!USq;dpW@kue*cMZSm`;eQHxk9P%l`&65z_J4jq7$av}h z_)2w!e0WzoLFo0U$i2UhCvbQZZ&P4QbpSFr)*xdW^jbU`D-MHzRyyM%zKiG0>OSr| z^qABlZk+SbLKhe$Zhv1Lmnl9&Y$7ICJv>Y5gQ+xor=-HgDC0iQ&`ez?jx2chcKSulpF zl$B=yeDV^)3vFvG;&FJZcxNprG&Ami zf=F9WC1d?Iz*K&R;q~6KrU$DDCX93FD_b-5ittTt*&al;rVpZS6)yW;W~(D=Evz) zB_owlCJy0kL$KIHpT`O)k(#?qOq;yhI>^PoeYOuGSlB(+nkFyi&;0F7VYOu{ORsiN zQm$=w21TB^9NQDssP(cjl=8aJrpFw3jnaXJPjwvgF~P*S#8EGUJkdz_A?2i*)sJ3py_MQX6qfh*F~fZGE*lpb(|Y8P zKtxCHv@{qh!driH%A)Gv(I|N+9+X=7B zZ>=eoyB`@6Q+18PQjE@zdVCjj3Mzl= zh`aajaX2h%gRUx_a@U3sJr|t$^OoudR4YuPB`2(ECs+`F*#r;w;j|gYql!=MN^x+@ z=^55vpAO+k(t4*V;`VEHj)|J=xLn`0vf-#bpV&0h(9=D%+M|}f?~T5iS4ZUTqUxu( zgA9P!cXiwQG=jFwJ7^bXYFiP1?ro1PW?TB5&x0RkTbi4C#U3SPLn77pCwi0*V<;H! z?L$v@f^}XTZH$sU|K8oM_+9C{*w4r6&O-H-einUoSiXy+URv*RiZkTX?-(P|MixQF z-HNn}eRXnos>wqbH_C^>`W1)GA1hu}Px89^JQAm~WV-ttIlw6{t2+Vbd;Vec)xJ>u z-iqI{9n1yc1a09&#TYw3?C<-kR$C+X48OI>ISTLNN<4(G3-4n}+yo50(K|oLtk%yX zyC3T}F|eOQzl*eA70oYut``j(@%@I``c#ei`4zXxFN(Kgxt|8HuUP(Y_dTP7(E2|J z@~Ig6id*WOSL&Q!>ReFjT=;(n<(eI2-@^&tBM9Fk2|p!Yi>>G( zd9ITcYz9L|m56fAdSIu%=b*mld~t##Odv$2k&WX^692$EptL*lExhy;r3-`TUXc1; zi27c5{*53uO)$hl{F+DgebBmvc+!K!{M&TE0-ERoH!qo6*v9k;eqNG<$`r~QcH51> zX$nptf`Ftcw4PxOBajK3L=VI+kU`AmM=jS{_aoCbW(626`j5c|8Ok1z!F@#$t#PPuXG1MSK`3q!d zE8^6CzHJXoe1{kPgF?Mq^*xo}Y{z&)6|iwSjv1E;Ebzf?qeiox%}Bb?FX{ImUk z%_VtXY(L<|} zs1E3^4d|{9=xz+?Zf4dX-a|k*qvbh8MuN#cM}sS|5D|zF5vULmm=F;-5fKCn1Yl8D zSK~hMw4=AQvaszt+IZOH)UGf$0Fi|X;!XhC65qB$%3-4tTYsL7BCZA`#lZedkhXbX zKIM6`_iOu9zH`SdxtFn({e7qO{_W}2Z3Y_01ZPkGLxY1&*c7)J$%iJDy|o-vK1MN; zdHW^2d}2%W$gMDO7w?{3j@&FK$*HVnQJg!Iia6bKctxdtXvuoUMBq zel=G3Pd_A#^uiY)_`uzK?1D8SjtJ<+Euby005r-!JVCOC0fGhU$(nViCTq?1DvXKF z*I#npTRPqw{#@KYxoaILZ@*B@5wmp^SqV=~zHSy6ZJ7|HVs^zU@`+;qwZ3Wac(Q9K z-jHE`(iQ3KXUnnYVTXAJb-`TNU3JKL43)0N-X}uul>TADe2pIXHSafTVSMtJP$|f} zeS+!ly~Z@flP+)M{r{Hg?Tcq^7f%n*F4&CZ`w@~AsDHFtty1IjTu-Mya`7$Uwm$i2 zacsC6FV6eQ605sN%cjWd82*(wc;d^0LrH>T8ONsdkskJA=B5 z_<3k_aQh1q6rhX?vv#5Z^GW#K?f(4ez-F%3h+JC6u$&W2L%?pAVQIfV-12!}-E#S0 zoouiV!ZJ$i{X1<5=B=-L%((sjfY73;dMXXN>Nb#dBs=iOFEM6Y?G?{%C6&{dKft}@ z^|qqc^h8SEOwFf&OHlK6v6s6pzqZl}FX_axUs2DJVVu4$!i+rz&(UPmUG*#fVTF%n z;v|sk66sy(aG2K|J(J@o_}rc{K`s_f^frFg`#*ft+hAU^pbcsZOMWmC|yELxl?t9a05Yzz5fqfagp^5 zA9f2Obqf-83o>=f3+fh>*J!)6CG=^TKY~oFf`&sGojtIW?<4dy@ePqoiYedu5NcMNT%P;=v6=!IuFS~7Txa>D&t){-O2md4Fc zX1DDHH>jsKV9T*PAvE4^L|WXMX1L8q;m*ybw4qI0()k zg7beUWO3ZQv)0U=z(jVc;^wp`Iv(_H+bS)0K7n`epr%N-Er%Q$sQD4TxRscHgO8{H zC2Q9lKXAqJzM z!ZszSv5#`q`B5+uFI@C3T5(mSK`lo``_PBsg>cmq^t%eIS$Z|0-TiI zmx!C&Z1u2@IUk{F**$s^D35|GafSoR6BIeNqQD>7VdN{aAjdigthg9b`@z>e?eBDm zz^@(3m)DY#b<+1mA9$hFhjdLQ&h&~^UUPU=S${YmPLu>!<^qhf+?RbdzN|Vpo{!#j z>@9z?UujmBU1$1Q*Cgr{>^vzsIZWjrbN0Qif7xX=DnHkke&Ij^`3x*oMqT0oyC$>m z#tQG3iV_7o;I0@3uaDriOKi#=waY=qLRB$v3H;XTUw2ht`ME{%lqiLfCoC->pP4QmBm*MER0)^D6~3`PaKKeKU=P3Jtv6G>f+ z#4EmHP$fdcdz(YVWXw$H9#b6I)*Bo;yeAi_WgJ8J^D8l@non5_7c?1j>s9u$z}1ov zOs($=Sw_Y7F1*Q|A2K|}q+z_zlH=Bjb+hSel8gv_SS01$sVp^n@!4@yd16oICl8)0 zW~uYnQs-<^=NwY!|D39S8bGHib_WKw80KWGwejaC-a`N0WaEtp_pob~4lGJV8wE%p zz~RMM#bvP;!8(=}K8;>4fKbV0_@kJ>n)O`5i>;DNRGM`>ZEtIYXScKAmjfh3H_fAW zJ($U&F~*yh_UdEO@>scs9q*)^?Pv#C5&PArq^7Y^jb$Enc@Urg0WT0Z0)feY096sY zML^^PO#+Oakd5l8Zz(!>fuQ+*IzDv=&53ZP8}$4tY}El#ZsoDB`dn0|6Hh znEwZ$fB;qWWpp425K4mpYSHO9Ya1~u4Ue^S=@VVH8OEzn&4t?hC~g9!4+FZ)9wZid zvsM+Mnu3?vBcNh%-mFK(zPU=+f0Rf;B|Gq1>S=g95s|{)TC#;F+~HOhA~d}ne&Pn{ z{M+HtG%N;A3(2I8QCkLr8fv9A+lB=6MQnyMR0Boiif;*mVY-^|qWQCq%B-aU3>ud_ zKV^nT7jYTRNdC=7Bz(pK?cB-?w`YKQQTczZR3a0a1uLVGWqx-R3N(tmTY^G9syGGCJVOp2C(lvO{TE;W3O`KWA`(4Y z0EUAz^7;u#im$pcX$%G5yL0|_EwC$H@u~{g)jDTiW$2y7IS+0$XcW1N8gOI)HHJO#{qmQ8U8kcr9`VpRL4FiBg7P zsNR)<`HeqkGPTzObKtL(X~}`L0oCNZryE^66UU)baBH!tnVLtOmpMIbL^Az9CY zb94vT*_Py<@dk4Q>_C43MC4Z^Y$g5Kq!v(b0llNAgA0U$j`y9zN5{o-XWJMIj+mI> zF6p-!EeKX?&ZFP#loljz=F8#TMHeVlEP&D-{p^`VcE}e$F`7JMDsNoSO~nB+W{UxD zl5@%ijj3p8z8v37wUl-}Ev@cSW;SP<>jIPH?Ov@cPPZ?2@3%-8Rd?OpCf{_F>TP{= zla}GsGB3{ArC*=AlsBjn#JXjk#ezL3UVZ1qI%0JUZ9U@roD%ZsUX_BTy#9fcf$d>qD>@m<41Np<}CPSM{@_{!ML#Re@+UaX`QciuPZtHQ$B1IC`HDK%H7sJDB`oGr zEiBW`?TVFlwwfl+_K~+5!v&E0)!7rfm)>_2`K5!kCV#o@t41xF{W;SE_t>x z+BbLXRj8|WCg=|1@ie95LidIeaZ}R2_*jl6LVuzM3CmvKGMu|!12Eh^HZQK$ob$iuojEcI5k0|B%i{@z@Ayxif^T3>fnz73(QAbF?L-&NN*eRynO?pMO4X7V z25&0*;#7NtoxR2vHM2UM#5AKjJ=u`z7qC#z;IY?GCTt}G?zV<0%P>4l+X3}?bOzap zLx93~=ehb^p~xQU>{tr4f^`zs*X-^zkA4G79M0Y>;Sb?5JV&DU;%-P_UuNYKgi165 zk?!+$ZX)6Yo)QCfIm~t+7LG4w>PoTt<4o3T0z)2;>r_wvx*Eesj$h~R4S0cGn8hQf zm2|uS>-rABFAghR&cFjc_X3+ZI(Pmp*SY@^A8IRNS5&0{{c7Kzd9+d#5i)e)Lj`a)1?2E**HbFV&X050%Tx3XUm)2Uc*pR8sf{%aU6&2Njw&>^ z{T$+2$9qyp4^g1lanK)oz?hcg{-hTuO6EMifpmk?f!Iy(SL0V=owJ06GmAm7Y6PWB z*9!4pqz9vB@!EnF#Phbjbm9f6C|URDD7!}FALlpcM({cwmu)55#tFC|qwXG@AQVLH zXYau>vQ1ji`e1U@YL|3$X1#8-$t|Zwq7}B!#FzN9WIi>Oq2E^#Fk5rYU&LL_@+E1< zsY6V;>N{G0M6X!FrSH3n3w;lN{`6gblgtiX7YtdU$h$|0ITLq+@yZ!TU9=opSyR_l z>ZVZ|IS1A;Lfyp>7}OrgyvT>uN-ftnj}3KJcXnq1)t4VeGgW7# z7cQ-5{u@Sqn-|#?cFxT1#qKPk`qFqbQ*9<_;qvE9!$xDrfjhZ6fuD2E>ql1^E{?MO zg*QlaZ!N>Z0>6=v>0Gaaf5+XytC`f2^)FT+fQr-VTI6`2a55V$eN$7`WJF6A!UCR4M4sYRD_hmPu7$OW zQEB8$jWQ$d=bk=*)=^F^<^&Gn2CSD|0q6@4lM~SZ0nvg$!}_hC`9R~`Z}}?)m#ZSC z`KY6#YoMBTcaGleI8sV0H-}MHsn=3#LBy~9z@3BL>A8(%}2=JVkU zIRE-xjchuExr4KCj?~vV=Qt3>&$@0EVESumACN&r#2k(~Yq9ra8cp zl~XNubwatetAc(3Fz`O!?)vVf8|@yC6F=vLZimk6Ct&^kxb(FS+}Y_t&7BmIJJmkr zrk&KMW|&FvsY%QORihHl=02d;`}c1KU`FMF`Q*!FBWepXA?WE_8{Y#;=!~AvWMIw? zzyNv@)H5o1P#vhTn)69WfkzD0HNNVbpKfiM*IV1_oNyxQh|4BgA&(~V^(-W@m3&tc ze~UOA-9WXU4>A<|(_ep7$}MAE*GAtWcCunk?+n|j|90b9#^ckM^9c$Y{iyz~;?*4z z0Ryp%`foUYx>GdG|A#GK{pbFfAV&L`!o7-qtEdc7~+y~$j{ zm9RWCZt4E3YnIIVFAUM-!hq=TV-2NeAeyfD?@23b1NNgu3tVsyczJ=6C3$C#Ur=!e zx%Q6lmN;$cUTZi_05y=}-LO}V0)vH!8@xl1(!dJxYm!`|QcH5qcf&&CAU^ow=exd3 z3>BV#2cj5l9#02T5xc^KS_{M(UAp9G3bM41@n0TZ{QSJ5%`-vAwlZ{`6|FQ78>5nF z^-tN7l%oJtrqikb(c2i6G0aj+hxMVpb5DKQZf%ak5Sf zhG$u^R`YeWSj{PVj|NCvuvX8ptyL#9c3q}-U4A_$cUy_+9qSr>%2;9FH*=D#EZ5ID zvp?m^k7%_x#HcsH%52p-SFDFj;<)XFon;^%8oxs_A|wbbhD;+J?Aiv!b`|W`?P18C z@6JV5m8UXG6He-YIgo{@tP>i{JmD-!bT*kfvIXOtwHD(zh+i`3{GQV0UQ5{x1tU74);hEPUgFax zG!`*C7o-9;^s~68lD#y=O$^?|Uq|+gp$ok#=+))v_&J=k?@?Gf$gDNJ1bA6G5#y-p^9(#Yh>t}UgZRZ=PAqzo8vklTu2?7T0k8tsPYEg zxhWl=(vb$mo-S10aH!`{VeONZkId3>)sUR6PybrXIvF&p@%@)* zF?DFO`kRr{nxLdrTV9s-1^mL}HNnI>AGtRpu}w9>HwkqG?8m14V8jDjI)7r_kqx8O z-M93l4>>_eACl7XS4q4Z39@G@o~u2!TpgnUhGpNA{f2}KX~n7FH~`%v88(Tt3I2?z z{=EWLs0C}*2cRZyHskt@;_bOpTizM#zyJxXd#0dB{xQ0_@ynx4>2^2U;wvK)S`u+$ znDfcdC41MB>!%wBVIlu2wP|uZeol9vKN9>k4y?{|?Ardjp>Gv3^S)Xl=G6UKgF9t6 zb4I{kL#6H@`9nUihRuO*>Wu!D%Ew}-9vf?aT)d@o#1oht8m?@$Xgd81XlUpx03q$q z>;JZb#mC>?t{7K~=})$Zi!RmJ+XZ}GV4!lk8;W^c%}s0hybbym3?-YonEVxhwduz{ z#?6+)Ys0IRPihoNzFfYk9zYJ+BO-Td3!*w!a)9CLp=oy8r%#v^JtJA+!a5NM^V33@ z2sO@)NlRD@ZxcVfH(AG7-LUksK z@AQ!Jdir)u*IkgkGzf=Eh0KHTB=p^OQC>QlS6egQ#qNH)bd22-$SoXaPZ1jZmsd|k zAJ_?-;pHvw2LB@*0S=a5Ip+20Z&#yZ>FF*31P5FGHkbbW;y1%!(%kM}OcO8a);`aT zt*;jwb0>eoselQp$4Sy7oD=*_iw~ur8Wvca)HMNCUxO^`C$_IYPOfTXR`!Y@XoIBw zwB%3EFE{U71w%QTI@z;?t~*aglLsA499PZpfW}_W%cVUBlRK=s>)pYfAiO8w)_%YMb z57M0*)=_(s{|3iP5e#<9nh`sxtd(vr5W;fqMSbyPo5e zuZDPjoNYRCzB2-z7{{B)I3Fuh*^XPr9m576MUXw3TjQQR{Z_A?o4n~4Fe-?4C_g~1 z1Osnu`sD;mnmKEFIDHDlAxqec=QF7VWStOdur~H;ekRdwHPbK8cB|LNx@T1o`UDqQ ze?H_bMmI6c=Dh-bGQg9$ zM(a5&*51I8eQ^-)4x^f!vtoopwGJ3$<;+Vs+Y3-36Hu9kIERN?4F(u z7gP!t)Cw0g3>UNn7j$4M&txs(mBYzPBlX`25i-`A0j9d&#u{q)5qLQrV!Bmc8Paj_ z_PPb8mzvolSvxI~2BwoY$(dS*SK_1{{DZ#!gQ!n+ZLGh65Q{ko#V+CorqffWF*M1T z{)`wJ!vk&L_0OY@slfMW!}l1$Pg!mv)Q3i;bHs}Ann-b(NpV?7aal=m+2Bi$6;5HN z>C^Gd47f+63)1qs;G>U?)fx<`P+*TyV5?Bpq-1n5JvAOB75l^j3~Et8^R$1whSiln zKbxA-BbLOLgX7PpZpH2DvNk}{h+xM6FfrT(aLIXP$U5f0Sh)EpcEtP^U1RT*`DvH= zc|F~a9TQzc-c+LIwr;=Uu9ot!mz|Em9dT!l2Ez@GhTJdu-}ka{L|y^f`)O@@=SQ`w z`}ogw8)x+@`+q)lVZZxa&c1OeLY9M4dBlk(@)%M%n#0nxG49<|RW83MO<24lZ8Ny^ zE3(%iQl(W@m`C*;0W#`lZJu;rzzgZZYL$1a{@Ak%9OzRLH;z*#e90lovE;ct- z{tsy~5F=x7!D&lZgdnJ)YrM9lJ5KXLAHG*5F`(g!Ff5Z1{)&wcO=vh7o?-fe($? zx^NgL2HcfBp_YCylyWoCLr&C36>VGm#5kebM zNo0gKsRd12gBB&MK+`t=PO9-kQ7Rj>MuZODL`u)f3tcHg}=HT$w7wbY&UDQlE#u2U~5f=5XwCvX3 zqY`kje!i>mw}5?wth(;>#}AxTN^nzh-5+iq%cNl@Dxk-UEl$=w_>BA^zu9*m6j4d3 z#84!etHHIDNh=QFRzJ~hK z89hnbMFHT8Fy&w=ZgrINXx+958`J1n*W7Q(V52*L zAtichx9u6iI0!LulB4Ne=7rgX8uScEmKK)08!B5$-6QoEfQ90{yi)p!Z#k!e_d_%X z(DQ(rge=d2g)=UIl)(YV^8`a!`VqinKTX7_e>xLyi)E2~d`cv{?n&d`_yV`w*MaMU zG89l$1`CPpA&2>zlzrj19jhXMVu``jhbR({Dxm&4^Zv3WMn^z4DIk8bJ|uO!QkOi~ zDJnj6Pxb60DYsqDM-xS4FKsh96+)ms%gj^HKN26sR+fK}a=vJ;+!p{#QCROGY*ez< zFo|oDdAgpF$^B??;Hi^$(6yaWG^vtyr_AQO|@=aQ~C0NAO-)9B_-)CP`|agM2xwFG<#6x^(55^)J(jMW0;NOacd@i08tZ!H7AC{h&68DGv4%7&^51Vv0YhMn+|qIW#TsO&oH+HRBOtE@g+jMu=OthK$cPY z|G5ClGV#;d!o+?bbZ9;+d$VxJGv=Sa6z)nGCP!O&pL$!5bs~yBwJi&U!m71qZRpgdfav&qz!p!|-->aethVp3qCXzn};=jA7c1+3h;I$$+Sa2eoM6*eUt`><}=pzXVXhXj!t3+z0c~!!V=&nLRAr4`xVL*AtDe4I=(^ z#8l&XI!<$RY_IJ*Z1=5bTn(a`znyt}zw@wKK_TZ}>#1^8``FJ{=ZW6sbn|~?mx3vT z>LxqE?&bGC3QSx-+-7YA3sP;hSFQDjvk6?~)$3iF^+$Qh?y?yRyC#;r{v!PqR0remakBQNaU5c54unoN zeEgoA?XnkFIRhbt#}Ze_a*hH5_>Fs+EOv%}lM8Mc@MKS-tRt7JRYvM0&lkOhZ6I8{mhtre8WyhPDU*Z>ET;hj}b6n0AIj>39t5vrxZG>Xk zz6BA89%W%Bm&_P_J>(+y44Hexvhr%NfU0vkzVS3Rij6yvLT)qrif&g<_SWAsFRdN24hIgG7uEcItWUcu8gz0`FKl^NWVTm;tOUnNM{QVVlJiMGC0)H3_ zH7R@aPnvE7dckj<1sFzZIMlP?As5=O$4^i-4DW9wJo+!+_OQeT`d+Ykz#rbb ztn#z=^9tisSd4@x;U2T3VDBYwk#WkG*|L5Yaa$Ud=qiNP^fHD!ap^yIe6#%|q}7?E z$w@Cq;U`EYPyf`4TJ$6$c5IjmcdJvdp$xeuF<%|+!L2XUlmGZI=^8+G!-mpg1TsLe zjc~XAmlQ;{S#WBu$29;;=so7S2FfVB+NT-gt@VNudv(^JYOQ%57B%izEAoAq;X`8N@ynO20Z5kX+WEsUiJj$mD8^RemI1UG0 zPZJ6|U6kT0CG2i-HEL21z51Wy)=Ox>lP1P%0JlHkh=ML#_fdN zNCvez0FvM}P6Iy8G2h&B1+z{>8cs@Th6%)={+Zu02-#&FXRS1zP?!c}1a{5U{5U*; z6Wj%hL8sci2t1?eRLSix&g(rl-o8XClPRB?>+XC;zMoWSK%Cl^4!@D5N=v{vVma3e zyCT9$I`r_gmnSm#-}Hx-SnatI;IN)VzQ`|XKcY5H5&rH2s~Za}-gCx_x56~QrQLbI z8-S<3`-9t0aypI&Me5B6IUELy8=Q9G82s;e9+Z_*^puaoJSfUSDZ;2@DW?jKGKHUA z$SMBdr#?Nyz*4FUABP2Yi;(*OdD+sDDj;bOfWHCOqF!gVPCoFfQpMeVfB~X@OudG` z8np`N$~P;zCSjSQ!Nzp!`Vj!Xa)l(NpAX~wsz)tX-GaIlkX*oO6YnhelkJ!oxk!I17k2V&ZbSB6n?b@h-*5AIlBVuQUkz_=QsTsZI9dBE?~iAjzrcj z_AMEbp?~rr5R9AB3`=N+wROJdut9L10*Z<2#}2wyEDCV1Bx8Q$J>-%DPx7#b^rZuD ztDU;{d@n2BLUJr9P7GpNTrt! zh)2aymU|f9$zM+(J~<}s_U-6&GfmUCYGI$5MAE#ap23rNQ9_~2YQM21bCR$tJo&v#8e;0u27>IfTeDi1g>Lv+A%UjD$p1pb7 z>2&#%#H(KauF!l*bszP+tcbGlqvuTNHwQZ$gW+~0BNbjLD$VfI+_^h3pEb17vj;NI zScq(U`gFB+uRyi6V=^J0AycP#h;T({2s6)ajd25SxW{`_%p+`JRD2p?Hf%b#_oRWs z6@w>ZX6&DM!f(Ai=gJW;q^BWEMH$w`}1GvW6GWL8=Y$ zm=4lsEH7{Qu5YxFyh$Iwybh{z+CN&_3Fp6NZ*Pw)j<2@)Or=g|GI=VIBiEI)>L(WC zE8hM;3OnzpCZ2fhE24BEfPyG3h;#vIA|(i+OK;MC7Vk?d<7T1Bn!a8Wt z#|MpoXJyaU2gD98EBLvN1@BSXY;i1|oU&dJxQK6Tbt_LJP=8n6@dv&|>>)s8#CyNT z=?>U=kI?M~V!j!rB=c)H{Z>9zB)^L2M~okliytGt@-L)hp=xkbE*OH7QGU8p&8YTW zynwuO*q{B4(ksSypIk6BomZnR^c{o#BcqZY%+)3bTAIDro_&RE0iHr)DNlc#FE7FC zms{EgW_*GFa{aRP%f4k`u7e%S8Frd>{M|k4B6(uCClPGv(AMwzzQU6+y5gLHjWgaS zU8l`AeD}F$TFISTjjryZ&zh~A&(;3t)j2Ly zH?wcrQ1+pL_d`9fG5-mDNcyGAXpFe?eF80+6KyokNuxuxGi9gOL?3E;Kh(K2Pt#~E z_Ictj8IcgG-L@?hkrb+ZxJ=AsL7Xb}oBBmyF^4vuvsN)av`C$(S2c$cnJLXm0ZSFk zz7?t@b%!|d6EQXCZF+WXJS8S54@DeZh!i=o_a~^eG{ed`K1))Hq_IUeo;`5->_XsS zir<>L=pgA>&Eb;VIIZ4K{~EM@Sgu-T$?Tu)wa2EoQm@VK14=tw>*< z%$ds5txKbL_?zL=n!`h7;d0LBUsfw6J!uq$@}V5dr;_bGc#*W8L;38U)d}f=)@22p zPhFIVg8t|$eI)PK4Epshi83J)PhaT>WlW8%wc+SGFp=n+;JABAZrBz^+F6SskE?V+0v{1-T`2AR&?Y9cBVuorH+ z7+lZAxz|lTK5g=w%Ja%e`ERBUwm#iEwjO+g6isc!;z zaJOQzM%}3*Fw{qtfaE-vlzVEAB}r9-vsaTt$xL>KdPBe3EyAVp?JL>q2=L{X^!Qim zjXiOa^Jhl|T8XaU0uxH_{wirEDjMx*vwomc|5}P)n5d+KlbP>aHsMZYX$BuaVHq_S zdbi#1wudEn>$U)>jR5!=fT_pHp0Dx=Ozma*)VBkBmuoVa-s^+SLbm5G)9Lpeq8H=X zg-jO=?W-Hhrd*aYCNc!P72~L&b zOMd+|Ez}u>)?Bart0FoaE>AH_>03t`da7G3;sU9tS9=WYhOeQ#`RAK?&#aV`l)vTg zO1#yKv1PL}a7(Hkr2Bdi)2#W%%20AML3uZz;MIy{zedScc;%G|FJkm^8ON})bjCSzbw?rvnSF<+Oo>j!58(OkrV$#{jiiMNZY~$Hjjo1_Jh3*!-4wF|79%V0J z-fK|~CkQ)MRFhTWkhuV?lbq2fUz>Q}O4W<0G;YnEsw#N>TKOo{2K?}vd93pTFu;dQ3@q;fmd_xH^ZpIYCZ+&RLjnLQcXj~23B-W4A^O0*lNiMS zzM!2vXg7QE8|e56!sjiCMDML}2r_V64sJ6+ySJIwfH-I=9e$0FkTDZD1(d+tAy`2V zSd;~9N6GD@4?kU6gXi1Sk`I5}RMrBcB;b&+Am;~XTz`N1Hx7XG@#O`UQSE||8490yl&#+YAe$t=aqb7DQD|MgtM?eJ2@n0%*e$iKYj&SDY zJ>d_HJ7P@JOmD7If+8Bm&-=Dp8$R|KKG|>b($Z^RcT)OrFqjuVMI*94tH~AC@pqZ= zfFuN3)Jf4BfF+~w_z?ho-BNQz{JGB`j$;rbVF}eYyJ`x)`}y%1H{arcS>QiE2@+C0 z(YI!eBVBksqNIVV+*C|2!n$Lzl|Iv5Pz^82n~pr!>oyeFnFmG+zYtoWJWkB>2<2_dEPw@}CB~$KFhV zAASUD8S^>*%d{C4vt9j;OYe0_6Egm=%?VZa466!ka1Y~SB0JrFKTY;E%==8{-RAhp z3xSySuHWL#)4#>he)VYt3Vq^D>sFcK3%|sh`hJOfQ_3*Wovz1|{f>9Y_#Mwim%0Tsil zo8VdcLJjvshUxj~`u;eX9Pg35cTX}s@91#0*A0l>?$C_A z>A0JLLo~hZ$EF=|05;ua*$A>w=24*VF*-}AyQ(p=oROGj_4_9)g0fFmEuU9KCo65$ zk_t7^F|(hLv#&31eGi=r{w%oZrShAh9Fii@Xv;eTC>gD+v}J`ffb&1nPKePlwQq(8 ze<@z(;mUHsXAU6l44@fbVk=j1$J@}%S&Uh~_HpgJ+0_rd2jdlLeeKkjPEf3KC zXs^p_`r_EgXf-MtlzL;i+IwL?+RR_Z-dih+sC0%^+#ivo8`n}5ZiA)xH8~y%nM~U* zot>6I5+P5TvKhM(ok{tK>3}GRgWHWv`EkcPiwd8th&mI%nDhqsq55K?(CA9OsOc5C z@xNWi8rSnfY{;7F+mUHhO?->xJooq4L)Jz;{FlyTfZrE~Cjzk!l&`)-+9p1e`xd^M z7D`HyUXl4!E?`FEJr3UbXZ3)u+FFj_cw|e&Iii|8yrQ?qVlN=FQCNTF;|D|7K;2_E zq=Tt7bT()5p04N7k_~kF$&-0j&Ec-*WZ6>Dt4k^ntiKD_cIC4duE5iC5U@LMs6RF{ zIH$)t*yy@ep<5M%2*gm7C7Wh;jbM`8n4)C!uL(h!=yr|b{)&4j?aIj`T~~EWmgJAl zq9R8_z}h|kmywTKD`Ox3IF}stV-;L~mTaiAuhG`rAW`sOwoV*X=NUkuYdYVQ`T;O{aw)_C^{m%nZ$=h|@t1aKo)=pfA2aC9NJk;5xI)y8k{; z8SiIYV`(FrY0S zTj9lIc`P-v8u+{K&RitHixi5tZ-_k1Ua-!{=zDX%NIN23*rG^PA>1#-7vuXzx!>uc zRCQ;PqwD_rA`oO!?px}s+)e4DP9^5D>bc>)fC%J04 zSXqJ8a4x*1C<#yS3*|v37!*{h^|j}aT4NtSEGf7(Y#IJPc1+5(|JhM-jNhME2=^FM z`2QB~C{3(fWXTHPH(_tS7ncaG{TW>Mxv?$33`hl6?!{h9>f3m(;ZWdtQ<5w610c@^ zcq=7*ueOK_US7c+jGdJ|F6C*(CvAlTFAP3wsWCD!FT!vR0xRr>_v2 znmMe)>j*4w5#U*C@RwRcrvDcj1-ssh5^dUP->z{3`(^9j;N&YN2ClQD<`F1*_4uVN zOeJsmT7X2ult;1WlO+c{>M@OJfs;$;}DYTZEqa)AozpO zLRz^>2z@aA*hIT_ zbVP@T^VK0?iN3bm&qtKeTPaqSD6fDaBU_r+WH8Pv=lD)4Ry^Ghx`eL;J`{Me6mic& zq#nTYNuZjapLt0=PWyY?-8P8eEQg3elaM?cqB5piHS;p9rt$SvC8{YXroF@t7+A6h*>&Te zfkE{IFsKEpbwJ=&0D>eC7=pn4R}iw?T~sp#%v%bZqhlo*civDv%NA5g_%tJ^foDGn zC)!paq%IuL^z3`5E4iQx`DYTRfT6%v8T*)QJP&iu} zSuBSvRzeo5!e6O#yfDtzMHU-`WSbz1Ei(Tc3#->unH;4{mR5bb0U(7zZ?Xh$w`yRD zrizEA05c5R4UpXktch0Y7b(#QGm{)|-#TjHTx%%p&XQ{qyy{)GTNZ)|RneWqqyntz z#3V1+c2z`o0%ic0UD)&=z&I1Q*27JzT;Am0-H$rxDEy6P1iMOjxA%eM$NdRErrz_2 z4E5?P;wFs)u6oN35OZpMzcWkKXy$HT=>l5Zh2Os20{=DZ3j=xDgW%b?+i{6*!5y9+ z1-Mf-2HYuCbheB013XNXj-3%lcS;YEDd+tL*^FNHlN0-=GrCIkbI}0%2Du6C*)2?MGfjbA^b+Qbgi%@Gec2Vk{SM|~zhnBH zu5(^Raurh!3CvUvr9t^vEN26ny}f4K2%=f{vaQ?u(24z@yje}31Dn-g;sce%*CruM z0%@JWi`y|$9)ivU*UY)3cB}VYp5p6I^wO^@622%P!aS&%*udbl=X97$ zPJ<>qD&L=!ofB1@)|u4VemE`4p_DB!*F>-g#GL5d`K#0m*zW_}H^AQrU^FI`D=B*F zIG}Y))z5krf$9Bm3`}QnqIM5kEY(d6lK3mr)jIN4&$E;gUnzYD&hjwhWb0i$UxlXE zsjf{Ma9o_tztQBc!Ef;sI6YUuYm>%n`QutBRfki6O^)RXCM2eP?_!RCSx36-T|?%^ zuepy#>Cv=z>P?5Htx;wl5PsYO;U)rgirU)2I+L0H zAGid7Aty{$%_14>9Ay$rPl|R*TNk)QSYl`Ght2@4|GKcxV80E4{)}$ZrTCq3?Fyyh zBkPC<3xn(@gh^G?#2 z?z3-atx^LWtylkbfS6)DyHk90aEx1)+**|-+L*Ac?GM-?K)dP3=oUK5>i3(=IZtgP zKu6;QjruzYh}vzv^A^-O7sGMNZG}%xB~BnPa3?G;ptW&W888NXt1U0txxPIPUVZ>` zRrWo^E%12Z^EB_Gnh$((UV_~&To?)QmdFzgyZFWMxd?V>VN#Jx z9hZcU%4k#DUz`J+qzX8ibbljyz+oNZRJDz8j|m#_Z=5HAOt(+Ueyb&XUl~$F-zS@t zotIYm_Kab5d9q-gA*z{jMXn+E@8e??jIQ+6JBts_lfL!-Dt+IjquB<>ft|u2JVuq< zoN<808k$iQSQEL9Q2wQYe9f-ikv z>F?S5aGmxP{3>y7=(Fz{zaK`-Jx5?ISIUuy#ek{;bxtIzs~di z41t9{T}?tWvND@WkvMTnV~i%xP61ZAfR!A^%46*Mr9o9td1zB7IjJ89tr+mtG?*)- z0#2$jFloDW1wycn!ODe>bkh!ley6H;%Jpmpr;oQr{)wdCC{*g}Dy5$j+m*f0n%6~@ z*H&ydo~@uRmBf~@pAfDK47iE{as@Hdx5EoG)Bjd7g_99%uJ<||-UxTJIC0sjy`k)A z=y$-h^P`k*B-^0}`&} zkFTl{il+4Ha&~0(2}_c5bn;3<^q2R&jH#ZCp4!*6o@d7e6V=sO$Bd+}1{qPfj6+0* z>Yn6#bo*hRfya`3vdFHaWQURGZbxXdny~&sQSxX5qWP~z%jvk^jZKo^%~lD^fsGt# zRn`H`2S9pmJm9;#)=hHUadV};gS)BmPPQ1J5^}f~U+1>cdYOK>aImu7Zs@AyY5tFS z=@Rf!-b$-}ec5AGcG#~-un%KJRRK>LMsSh#;n-PWwFO>bii2e7qCdRY1pCUGVk*sskJ7EXhxtX$iSlv3MCF8%R(SA=ZKCY|if z(M81l>Cc8@n2FK(W0J(W$>aK&mJ7ij%Wc`N~#bMvw z-#YOlRqs3?STDvadbZXpe3rB5cpk>EX$j4WvykLJS*m=K=+pYB|5993Znw@pcj_jJ zK{IQvT60Bqn03kjL;fHACGVz@+WZm0e4oUzbVq8#wbCqbB7dpIX>&}~{J(aePH{k) z#>4+Qm&M}^x~mV(@ei|d^CWAWHuUI9QZDA|%|%Jh~`$yqa2J7w#t z1f(qwWZVLpI}L{qDLxhqI)92!5`omEhVXDe4Yc63#kEQd<(yChU3l&HT6+d=Zm5Ai zytX#P>;q(c6)YwTABsfI&_H3fm3QIK&cORkO_eCP{uCRKMYIP@7lR}?ZS zjiOu#YVaOjJ0D`!1R3|C%CuMu)cr=mEebVom9E-G+D;R7{b0#;XTv9nLF#5wa7#c9 ze7pNoTgP@Mqp8iVA>(M4T!L)e`ro@0<5UzuB2w^}Pe_y75Q#enwzrqTEk4wT2!2LA zb@H(FF4TtXRj(vHVnS}q5_ZL_lhJ!o!Oz%{+d35EHR_!7tt3z%TKL%u z(nel)euq2+X@ZFQklu(~%@H189b`Hh3 zGIMSb^!%<-K4S1QFXZ+)#rP?6?j;L8ITC3?0g+&a`pCe~kjU)^kZ}QMt|uH$t{4dQQiPu+ zAh*GH$`YD83x|(}BbVbT#My_VyCNxr*ks{O5lDsykTy=Jk1qV|J966`GCl;&Eh>0) z%gYenr2J`-^^*&~i80>#Ku>ZU!9`uu})1^7N{aN}t1L1B@%0?@(VR#UZV|{xm@5s(Cw5(U@p_m+} ziyqYHfl_WSdJHS`P%>|=du>+f;hn!Y*5Bg^QE zD3gyKja*CP|3+uYGP)_BWT?WN8u#{8qgz(e9Jl@6e`+84>Pu>Dx>t<4P(Xie9zIMKeb-@;$|=m8hkOcJa9>%V4XH{mIZah-q@v^dMjE5sSaesk+SIzyUge z%am5m?%|%iKVFI4rL)9tvVWtkkL^X=C2gX9i`e@$ z5&QaG+3Z-Fgh0$acKp!RY-;M0APeo3o!%?*`>k1pQDSAR(4v=4B@X zMK3|o$$tP20tg5cK=48X1hOD_0D`w5aM36xA(kUlk^`OSnAsnL_#=oXKwX49sAB}d zClFA6{eUl<5AH);XzR*^gSOJc6NbXs;&0sRb>j}&nqV#;C1Y*PjPmh`QW}yr2{*|* z`4KxST?sx|W*{Bhr0Nfdg-S9eriHftI-^TW2ix{hTHS@%{q;D}>p708yP<6O1z^jm z|7&#sv3J)kaNp%VNt^KDgVpnc%_qeRZ8f6?k429ZGcPT{DlaXeG#@3{@*F07OYlh0 zl1lT@f!cqd_6q^1l>)Ua(x8?Sv`Yc)?uCMOoS?)!2b7S260`p#;1Q#xljai!t-6@; z^JG9P25CM?P7S`v8mD5%vkd2;*I#YMAlF5Guy@SXs+df!?f@lmy$>ghKXd#7AfZH-)>5XeQa;LDmA8!Ep*Q~R^Jx!p^cW|p^ zbWc3B#pR3m*3H82YYdj1vl*jA)W(!#A);Yd(xE5VNB2`NBzgOOZ2HMl)y&>s@r~JO zy?5)Pny~L2B%=Vo2RNNvoK~5J1;C5IFLJ?>KVI$n*&43+6YPJyrE7l8b+;g5T}>?B zO<8cm`i+{1fu_H+`wvViCjN`vj((CAbb$zuNJ0U3U2t2**rV{9*3bcS?5u^| zcq~D$EP^5VA#Xdhy%y2lxNfdt)clUzCL@LOx$nu>M)3<}XW8Z8)j{F7#i1Ty7l%?T z^Y`m(R>vhA&mwwS$uZbH$ksQPxxx96)G=P4!n8)5&2eUh*-uLqn z5H7a7G}PAgf$MT+!pM1xgxBRFRCaM;lHu?2uiyVXqEb5#4i}^rsV0*b7S^mCS5gjE z+haPYF2l|qUIJq}ekkyfoPo1Zi-Are&G#cLjS18~tlyV-+)qC*G~@Tj!j*p6Rl%>L zivAKeFP&`JiW%}2jhhcYpdELorCCZJ-syItqvlpc1dU1tIam2P^Sh2)URQ)>ZAET)qgjLIsDl# z*4^asG=wl!pUDfY6Q6}qXXOta#ZMP1KQL4qzc;HkCeE;GIn#=$7|B)mei;AHs7-$y zYC5J-o;9Q0%EBsbPvI%c4J_}c>w0GfJRX_!Z^siWf5%f9hDtI_K4s^0aqrdU5uePL z4B`2H|5Ea@^-i_8+Gy=|nu7uQ(Mmrp;#M6Q1$&#X$Wno#Dn}c!SV)-{RQ|h#5#>0C_JQ3Fc-%DN;NM%U9%q}J@~_>Fc4~d>Y%sJ8h**D% zot2ZW%`C_5u|`iLK7gX+ua@VgRB&BjIu=X_ywn9)kziO9^jKp4fv$80%b^)%s_H@W zbkO|51x#@D?{>Jz>H_D`HaV`}yB!RF!P@SF@Y%<5Tv;F}LxZwC>;LMY0W0?eO?HRD zyX7(?2-XuTI7i>N_YR=?8)tX-?ol`^2xz8_3T)06=QofO>SWk6{c zm>++x3+QwnIX~mAMT12}z{(^afV(eac5doK0um~jqkCYTj{>I=)`Va_Flrhh@eXX$ zU141y(Gg7Ky)Vacb^YA!umBTf@nBr226UbW;}3YM(3R1oUGd~S?U80J22Wh4FUhpkE>XbKWm9!z@tWC{O`cvd`c)9?^JNi7fzDXKv4J{=Q|XP zR-c@T0qYwC*ztrDd3y%y&8sFE;h`l|Li>K%y3fUFC#|Uz3PTpCeLpof{|H5$tlMBk zHaGHb@^9_mLsx2-B!I)Y8(0`#DIl~66CC%<5`hB%0?T$WFZBMKe}l00pUH}3f2Ta& z5PyiJ`%Q%Odz8s;$pQlX<)AIbVEcY+UBAaihq%Mo3rLO~7Ur%Jb5JF8W8MIDQAr!a z3~x=ZkfAVOYn0$DGq3){E!$I?{TPwy0;rkPt}8Mez>ThrvwpJ#9;yg{e5X zj6&Z+6<1bOnkBa?9<-I1@$Ze1`a`FwvGc2|=klZ1C{f?G_-vGfbw$nSi7L8%?c%Xp z%*;&xh1c}h=v9W~_qh%-tmVq=B`|D7J7%93&>8;kbD(+HlRKUpuL%6xy7;~N__ks% zH^nF!BV9DlH0>V|t2lGD`xF?|<>q2r`0;~mJR$54LNp&!z87>^44j$NdXo0$^;hD6 zslG1F4Y+dGAF6Xtrd%XP4v9r!$a7>GWiykb(ss}QM;wU-8~!*;?iCoK&JKA#_KC7d z>aNmXJbHF#l7ws`!LcCLGY@q<$j)mbiEKjFvro?w#_p7=OOtO)gMbtS!XViF56FOk z0t9j(;M4#CoG>95&M+21Q7j9FXuyyK7)k>}#d08c3<4Jr%*SoUn2}Y*27qZ|O=)sl z1uzH&gV7;Uitv0^X7zYVA1O7Z?c1^(IRt_QAq?t?AkYJWH3(||1IZvTeX%2D4#Fr9 zY8QqujHObrtAJKQ%<7LJJO7QOfb1J6bO%LCpeTX&{(NxE#Y;o=bB$l)J;T?)4Yz*$ zmnAHEuz7Tkv1fl@#6Y^$;#p` zUc>c)cz@EE?rPG;(7EAg^m@I7$Kvry8(ZVSWt3~{u@b$re76=_Wb3Dkle^zSwW~{@ zZRL3+?x1&Qbzx;>47EESy5Me`VW}n&+-QTfLAQOkY4q2W>2K*zbGhN+eYL#sjh#V3 zG9PwTK0rV5F`C%Jvg>2w56XH9T}R5F!ub>l(#~IVErR8hUz^>sD+x>EdDIn{U%!G2muX*ASEfNlZjdls)BqTIJ^AKNhzbZ^mO&oO zQ9?|KaH#mKlRrRhZ8;{qy5#57t#!nZKEU%yIrb89l8149@#8xHoaO*}cHh4PI(E}1 zI7HwXAhZNa&~wfb>A|e{1qb}e4YB(!%jhA@yUf3W`RbqT@M{q^tX$#QRQ@^Hc&PW| z?8$e}M%~{EZWUAFs|HOK&E8HL?gAB~k{Uy39G(#Bil(2wT5vPw{s#w~nF84xaWg|C?F7C7Ep3~{V(Q5+zC!TMj1VkDuv;KTsebYW(6>g+3t$7_> zI$hS4XYcOSa4&PJ*dSxA^jmHVxkkZY=dM>~I+yhB+KrLrg<uGTJnm4*4Amq46Cx+>c!2zL5woX{s z+^&2AK@`(Khy`6N2^qt{J@N!{=pZ3^0u^+S7_t$Hd?7CQ=C;8F3^u!6 zLu3ZdYMTP|F92Ku!bklVOo?M?CFTTW^IPa$dXOA$-Zn~e1i040c|Qu(zx@^V@W$i; zP$pzAiX%6}m>&Mfuf!@E&R zhr?m_X&mP_5|L+(yZb1v8UI~**EY)@+4de{*P4p5#w5tI+0H+%tDPbS=>!dSKgn>H z&Zn_j9{kSTG@JQb4dF7=r$#*w4J*)TPH)dONFJcSOwariV63|jY=3fx*Rjq(M6YMo zw;#={pI4OsD!Rw6Nhxn2FZ{@fUUagA_S#-&uXtp9MuX<&tvnmdoE{uJ?@N*6h~Q`B zEVq0BGwhF6tdGXZmeDrUMYu2a^+ov3&sAJLiBbOodtyTm_!h1%%L?zc-NbN_Ch-#o zyB7-8p%`R$u^%4TMY)#SVYj#@Ivk1{d0+fYtHwfPf0C=p&S0Wv$}*K7Y~I9iA8=RWG2i!OB=@GaQ${Arcv7b=K$-;EcYvGZ*E zo+7+yxp$^q@j#$iBG>EhQNZ4v4pVjUs}t(J;Qq`y$@iiaUS29rvZBfdX$O1he3Pd9n5Ypv^o{#i(+Et zViIU6DRc3Ea|xJ>$)Tkb&{7Drl=2_>J`lz!OK2a8ES0_DZVAxlmsS^mYYR!Mi@>!- z;o4$wZ3$_07+m{x_wfEJjR!E#P`oid%v(L1djHVZMD z{@7oG-n^vcTBVbSZ#Ifd){IMBc$IS}HAWMb>&LMd?hy*HnA(?A)Sv1$F_|WL(5jhl zWyS@DuJ;8@D}~rN!_`=xG~LZz#EXkJWYTp!{n`=NWm%TBrZi9HlSbwdzqh_|1EU4f zPrl4a&g0rN&sPj8T5yUa*h#D7ohPekqL?q@m<3||hk%(c;$K6rq&;?Wl| z{w89U{&?*3Jyx!gp=HWu4#_WPM}Y?Ix7gTaiCT-_lxntLbFTQEpAYMqZqM;>8N7YV zcMaT5Iy=Lg#NIvzHu?Zgl>~9%G7m%brQ}&82d{*}dgb)s9s3s`+NFx}`H8X5C_@kED=T(s;QH=|)}^Kqt8E2pw#d#k*URjj+QexzzNb8OZ_ z*Yh*#{_O1Bbz;rokU#x7PHVVxbYylm>$Kos^cS7zd#N{MWCSY*wt`1GGHWetyXkJ2 z24<*VV4j^ye9n%i@A+_m*Tkb8+FYlAr?YbwYckQa{3gylp*3c=49a_zd3CV{UL!L~ zy*al#r6;fcBq)*HasSzu(|PA}@H;=gr^uJw$d`g=xB0}8FX2{y%u*{e^0{b-6&j7YAbH2@?=!0F8`(&t5t9alvrag%P=ZUJ4Q#_9Z6`|&?SIn z1EeiY4Ad)t=>arK@4D#WW+C0FKuwS|ZhWxVRul&oIE|X?$^#4%4Zz7sS~$g@Pb}?^ z_JN%6=d1|uCy^$&&F&zkc|i67J?mbF=>|E_)}0-PsMXwg09?B{r*R{`!VWRJc^e&x z&A?s;5VwKyy-t7e#ZLb^HS{$U!FTHfivS1YM=jFQ9;9G8l#B}jfpbJM3b?^FhKD-k z9G-rJWR_F8J`8IYdCr@!WUDdGYg3I!oLSa=iie6a`1-d9>@|w;CtKEJ6tR489?@5s zXR3?g+E_B@%m!qt0pHS?&9{lifI$-wX1b9jJeRT;k3kRrL0HeB5fzJoMC`?iZ}Rg| z?~J=!y|rS8=@yP-xLW3XCBpvc578Yq{-4vi-e#FEc00Q%M#;)bW*y)?EBu#!C4qc! z{oH$Y06dQ$7I(zVcj#~e{TGOYQv~j7N6A=<1jnSdItBGf%#57;&?N%P4w#4jB(+Q{ z;p@?+#Xr%Le#11hl65kh&V9G=xr~aI%Ih4!AnH8;UKFhI>?|TGXKpR=;bLZzqJRyY zDfk<`HV69B=1~ry=I^4`YU69yt7FeL=1^^-&=s9oOS#R}{I4B9XGHeJ$1~#1Z<@74 zc4H&_*+z&C56;6nH}9$aWBmxunrUz$bMYunz!h0-6h3@MLwyep3=IPVd*EjYp`0|A zKqYt$yVVM0r6j&u+FrXIyPULj&LEq1qLc27P6ZOy?v6p6EqhJgB7HP^9s>7-nCV2&P1$pI&-AYIM4{HOHG5 z)Dx8R>(6bn0J+|OITH8Hk$)J%lx>Tco^ZmSJcmDdv1@&&N3QL?**2m16`?s{Ndp-v z%XxiZ4!rjtyqCE^g3A2uvBgJQ4jA!&7lJRyo;NfkGEvB$D5Oj@vL_lT6NBuDK|-bA z{C_p#GCpFM80xdapR{AGFA#3JCI*EZ5@Jl?{j`2P&Xi*XAbNOH34UvOt@U>aDDyv` z9(jIx@u>No&{m6tS0>dx+%I`+ZK_22j8^;3T4l3K;yK>j-we}W^o2-p>{o7 zvAB#HDq!$cu^7Fs$v1ht>D*Sd$ae1iDrQgYkl(~+!nImr;yR!w!N{lvza7!=*$7y9 zb$n+RH%RtUBVDc0E$rn-Jp#M2Fj!=;-qhF6@3C1aSKA_#=P9ZZPsv&kImM`k*L045 z2BT~xOqFKZ1`4K3Mq{+aJEJgitMaZ?jpGE|7+M=s$Ne%6KLy#eb{_^aJv*0Q=tv>G zkLKEa408Tvd*fnci@uJf_%qI=Hd)RErGL1DFUleFv39feA#!q0)&r3*Uc;N1a^E9! znHb>-5a=Kg#5xptlFNHH6%w>aq2Uq21cN6qLkA%c>u}@=9GICA6w5q$lwh4ZNG-~XLs z*LCMQ2>Ffq`j!(#oLI<8)xlY;ObEdr51#>6alB!+Cf*>wfw2Gk4+C?|OYOOpLU!x_ E0mcVuvH$=8 diff --git a/venv/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py b/venv/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py deleted file mode 100644 index 78f0d1a..0000000 --- a/venv/lib/python3.6/site-packages/dateutil/zoneinfo/rebuild.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -import os -import tempfile -import shutil -import json -from subprocess import check_call -from tarfile import TarFile - -from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME - - -def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): - """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* - - filename is the timezone tarball from ``ftp.iana.org/tz``. - - """ - tmpdir = tempfile.mkdtemp() - zonedir = os.path.join(tmpdir, "zoneinfo") - moduledir = os.path.dirname(__file__) - try: - with TarFile.open(filename) as tf: - for name in zonegroups: - tf.extract(name, tmpdir) - filepaths = [os.path.join(tmpdir, n) for n in zonegroups] - try: - check_call(["zic", "-d", zonedir] + filepaths) - except OSError as e: - _print_on_nosuchfile(e) - raise - # write metadata file - with open(os.path.join(zonedir, METADATA_FN), 'w') as f: - json.dump(metadata, f, indent=4, sort_keys=True) - target = os.path.join(moduledir, ZONEFILENAME) - with TarFile.open(target, "w:%s" % format) as tf: - for entry in os.listdir(zonedir): - entrypath = os.path.join(zonedir, entry) - tf.add(entrypath, entry) - finally: - shutil.rmtree(tmpdir) - - -def _print_on_nosuchfile(e): - """Print helpful troubleshooting message - - e is an exception raised by subprocess.check_call() - - """ - if e.errno == 2: - logging.error( - "Could not find zic. Perhaps you need to install " - "libc-bin or some other package that provides it, " - "or it's not in your PATH?") diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/LICENSE.md b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/LICENSE.md deleted file mode 100644 index 26127d7..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/LICENSE.md +++ /dev/null @@ -1,21 +0,0 @@ -Copyright © 2015 Carles Barrobés and additional contributors. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the “Softwareâ€), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -([MIT License](http://mit-license.org/)) diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/METADATA b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/METADATA deleted file mode 100644 index d60e3a2..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/METADATA +++ /dev/null @@ -1,25 +0,0 @@ -Metadata-Version: 2.1 -Name: ddt -Version: 1.2.1 -Summary: Data-Driven/Decorated Tests -Home-page: https://github.com/txels/ddt -Author: Carles Barrobés -Author-email: carles@barrobes.com -License: UNKNOWN -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Topic :: Software Development :: Testing - -A library to multiply test cases - - diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/RECORD b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/RECORD deleted file mode 100644 index e9dc9c7..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -ddt.py,sha256=uuBmXZnE1-IwX95u3sbTVaa4T83vLI4M3Zi4iy0U49c,9895 -ddt-1.2.1.dist-info/LICENSE.md,sha256=L36M-0oop3-nNaTsncDVmgF_Ph19Qh8ryFShEIEO93Q,1138 -ddt-1.2.1.dist-info/METADATA,sha256=X0HxNw0ZXT9o8QRpPLdhyPMusrjtDWpY_aAEHqZ3Wt8,831 -ddt-1.2.1.dist-info/WHEEL,sha256=HX-v9-noUkyUoxyZ1PMSuS7auUxDAR4VBdoYLqD0xws,110 -ddt-1.2.1.dist-info/top_level.txt,sha256=n_Kh_cBI3hQ8OTNnC_34Cq9ScGLoAMUfBzkDTGKLYl0,4 -ddt-1.2.1.dist-info/RECORD,, -ddt-1.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -__pycache__/ddt.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/WHEEL b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/WHEEL deleted file mode 100644 index c8240f0..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/top_level.txt deleted file mode 100644 index 96b1ffd..0000000 --- a/venv/lib/python3.6/site-packages/ddt-1.2.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -ddt diff --git a/venv/lib/python3.6/site-packages/ddt.py b/venv/lib/python3.6/site-packages/ddt.py deleted file mode 100644 index 636de02..0000000 --- a/venv/lib/python3.6/site-packages/ddt.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- -# This file is a part of DDT (https://github.com/txels/ddt) -# Copyright 2012-2015 Carles Barrobés and DDT contributors -# For the exact contribution history, see the git revision log. -# DDT is licensed under the MIT License, included in -# https://github.com/txels/ddt/blob/master/LICENSE.md - -import inspect -import json -import os -import re -import codecs -from functools import wraps - -try: - import yaml -except ImportError: # pragma: no cover - _have_yaml = False -else: - _have_yaml = True - -__version__ = '1.2.1' - -# These attributes will not conflict with any real python attribute -# They are added to the decorated test method and processed later -# by the `ddt` class decorator. - -DATA_ATTR = '%values' # store the data the test must run with -FILE_ATTR = '%file_path' # store the path to JSON file -UNPACK_ATTR = '%unpack' # remember that we have to unpack values -index_len = 5 # default max length of case index - - -try: - trivial_types = (type(None), bool, int, float, basestring) -except NameError: - trivial_types = (type(None), bool, int, float, str) - - -def is_trivial(value): - if isinstance(value, trivial_types): - return True - elif isinstance(value, (list, tuple)): - return all(map(is_trivial, value)) - return False - - -def unpack(func): - """ - Method decorator to add unpack feature. - - """ - setattr(func, UNPACK_ATTR, True) - return func - - -def data(*values): - """ - Method decorator to add to your test methods. - - Should be added to methods of instances of ``unittest.TestCase``. - - """ - global index_len - index_len = len(str(len(values))) - return idata(values) - - -def idata(iterable): - """ - Method decorator to add to your test methods. - - Should be added to methods of instances of ``unittest.TestCase``. - - """ - def wrapper(func): - setattr(func, DATA_ATTR, iterable) - return func - return wrapper - - -def file_data(value): - """ - Method decorator to add to your test methods. - - Should be added to methods of instances of ``unittest.TestCase``. - - ``value`` should be a path relative to the directory of the file - containing the decorated ``unittest.TestCase``. The file - should contain JSON encoded data, that can either be a list or a - dict. - - In case of a list, each value in the list will correspond to one - test case, and the value will be concatenated to the test method - name. - - In case of a dict, keys will be used as suffixes to the name of the - test case, and values will be fed as test data. - - """ - def wrapper(func): - setattr(func, FILE_ATTR, value) - return func - return wrapper - - -def mk_test_name(name, value, index=0): - """ - Generate a new name for a test case. - - It will take the original test name and append an ordinal index and a - string representation of the value, and convert the result into a valid - python identifier by replacing extraneous characters with ``_``. - - We avoid doing str(value) if dealing with non-trivial values. - The problem is possible different names with different runs, e.g. - different order of dictionary keys (see PYTHONHASHSEED) or dealing - with mock objects. - Trivial scalar values are passed as is. - - A "trivial" value is a plain scalar, or a tuple or list consisting - only of trivial values. - """ - - # Add zeros before index to keep order - index = "{0:0{1}}".format(index + 1, index_len) - if not is_trivial(value): - return "{0}_{1}".format(name, index) - try: - value = str(value) - except UnicodeEncodeError: - # fallback for python2 - value = value.encode('ascii', 'backslashreplace') - test_name = "{0}_{1}_{2}".format(name, index, value) - return re.sub(r'\W|^(?=\d)', '_', test_name) - - -def feed_data(func, new_name, test_data_docstring, *args, **kwargs): - """ - This internal method decorator feeds the test data item to the test. - - """ - @wraps(func) - def wrapper(self): - return func(self, *args, **kwargs) - wrapper.__name__ = new_name - wrapper.__wrapped__ = func - # set docstring if exists - if test_data_docstring is not None: - wrapper.__doc__ = test_data_docstring - else: - # Try to call format on the docstring - if func.__doc__: - try: - wrapper.__doc__ = func.__doc__.format(*args, **kwargs) - except (IndexError, KeyError): - # Maybe the user has added some of the formating strings - # unintentionally in the docstring. Do not raise an exception - # as it could be that user is not aware of the - # formating feature. - pass - return wrapper - - -def add_test(cls, test_name, test_docstring, func, *args, **kwargs): - """ - Add a test case to this class. - - The test will be based on an existing function but will give it a new - name. - - """ - setattr(cls, test_name, feed_data(func, test_name, test_docstring, - *args, **kwargs)) - - -def process_file_data(cls, name, func, file_attr): - """ - Process the parameter in the `file_data` decorator. - """ - cls_path = os.path.abspath(inspect.getsourcefile(cls)) - data_file_path = os.path.join(os.path.dirname(cls_path), file_attr) - - def create_error_func(message): # pylint: disable-msg=W0613 - def func(*args): - raise ValueError(message % file_attr) - return func - - # If file does not exist, provide an error function instead - if not os.path.exists(data_file_path): - test_name = mk_test_name(name, "error") - test_docstring = """Error!""" - add_test(cls, test_name, test_docstring, - create_error_func("%s does not exist"), None) - return - - _is_yaml_file = data_file_path.endswith((".yml", ".yaml")) - - # Don't have YAML but want to use YAML file. - if _is_yaml_file and not _have_yaml: - test_name = mk_test_name(name, "error") - test_docstring = """Error!""" - add_test( - cls, - test_name, - test_docstring, - create_error_func("%s is a YAML file, please install PyYAML"), - None - ) - return - - with codecs.open(data_file_path, 'r', 'utf-8') as f: - # Load the data from YAML or JSON - if _is_yaml_file: - data = yaml.safe_load(f) - else: - data = json.load(f) - - _add_tests_from_data(cls, name, func, data) - - -def _add_tests_from_data(cls, name, func, data): - """ - Add tests from data loaded from the data file into the class - """ - for i, elem in enumerate(data): - if isinstance(data, dict): - key, value = elem, data[elem] - test_name = mk_test_name(name, key, i) - elif isinstance(data, list): - value = elem - test_name = mk_test_name(name, value, i) - if isinstance(value, dict): - add_test(cls, test_name, test_name, func, **value) - else: - add_test(cls, test_name, test_name, func, value) - - -def _is_primitive(obj): - """Finds out if the obj is a "primitive". It is somewhat hacky but it works. - """ - return not hasattr(obj, '__dict__') - - -def _get_test_data_docstring(func, value): - """Returns a docstring based on the following resolution strategy: - 1. Passed value is not a "primitive" and has a docstring, then use it. - 2. In all other cases return None, i.e the test name is used. - """ - if not _is_primitive(value) and value.__doc__: - return value.__doc__ - else: - return None - - -def ddt(cls): - """ - Class decorator for subclasses of ``unittest.TestCase``. - - Apply this decorator to the test case class, and then - decorate test methods with ``@data``. - - For each method decorated with ``@data``, this will effectively create as - many methods as data items are passed as parameters to ``@data``. - - The names of the test methods follow the pattern - ``original_test_name_{ordinal}_{data}``. ``ordinal`` is the position of the - data argument, starting with 1. - - For data we use a string representation of the data value converted into a - valid python identifier. If ``data.__name__`` exists, we use that instead. - - For each method decorated with ``@file_data('test_data.json')``, the - decorator will try to load the test_data.json file located relative - to the python file containing the method that is decorated. It will, - for each ``test_name`` key create as many methods in the list of values - from the ``data`` key. - - """ - for name, func in list(cls.__dict__.items()): - if hasattr(func, DATA_ATTR): - for i, v in enumerate(getattr(func, DATA_ATTR)): - test_name = mk_test_name(name, getattr(v, "__name__", v), i) - test_data_docstring = _get_test_data_docstring(func, v) - if hasattr(func, UNPACK_ATTR): - if isinstance(v, tuple) or isinstance(v, list): - add_test( - cls, - test_name, - test_data_docstring, - func, - *v - ) - else: - # unpack dictionary - add_test( - cls, - test_name, - test_data_docstring, - func, - **v - ) - else: - add_test(cls, test_name, test_data_docstring, func, v) - delattr(cls, name) - elif hasattr(func, FILE_ATTR): - file_attr = getattr(func, FILE_ATTR) - process_file_data(cls, name, func, file_attr) - delattr(cls, name) - return cls diff --git a/venv/lib/python3.6/site-packages/dns/__init__.py b/venv/lib/python3.6/site-packages/dns/__init__.py deleted file mode 100644 index c1ce8e6..0000000 --- a/venv/lib/python3.6/site-packages/dns/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython DNS toolkit""" - -__all__ = [ - 'dnssec', - 'e164', - 'edns', - 'entropy', - 'exception', - 'flags', - 'hash', - 'inet', - 'ipv4', - 'ipv6', - 'message', - 'name', - 'namedict', - 'node', - 'opcode', - 'query', - 'rcode', - 'rdata', - 'rdataclass', - 'rdataset', - 'rdatatype', - 'renderer', - 'resolver', - 'reversename', - 'rrset', - 'set', - 'tokenizer', - 'tsig', - 'tsigkeyring', - 'ttl', - 'rdtypes', - 'update', - 'version', - 'wiredata', - 'zone', -] diff --git a/venv/lib/python3.6/site-packages/dns/_compat.py b/venv/lib/python3.6/site-packages/dns/_compat.py deleted file mode 100644 index ca0931c..0000000 --- a/venv/lib/python3.6/site-packages/dns/_compat.py +++ /dev/null @@ -1,59 +0,0 @@ -import sys -import decimal -from decimal import Context - -PY3 = sys.version_info[0] == 3 -PY2 = sys.version_info[0] == 2 - - -if PY3: - long = int - xrange = range -else: - long = long # pylint: disable=long-builtin - xrange = xrange # pylint: disable=xrange-builtin - -# unicode / binary types -if PY3: - text_type = str - binary_type = bytes - string_types = (str,) - unichr = chr - def maybe_decode(x): - return x.decode() - def maybe_encode(x): - return x.encode() - def maybe_chr(x): - return x - def maybe_ord(x): - return x -else: - text_type = unicode # pylint: disable=unicode-builtin, undefined-variable - binary_type = str - string_types = ( - basestring, # pylint: disable=basestring-builtin, undefined-variable - ) - unichr = unichr # pylint: disable=unichr-builtin - def maybe_decode(x): - return x - def maybe_encode(x): - return x - def maybe_chr(x): - return chr(x) - def maybe_ord(x): - return ord(x) - - -def round_py2_compat(what): - """ - Python 2 and Python 3 use different rounding strategies in round(). This - function ensures that results are python2/3 compatible and backward - compatible with previous py2 releases - :param what: float - :return: rounded long - """ - d = Context( - prec=len(str(long(what))), # round to integer with max precision - rounding=decimal.ROUND_HALF_UP - ).create_decimal(str(what)) # str(): python 2.6 compat - return long(d) diff --git a/venv/lib/python3.6/site-packages/dns/dnssec.py b/venv/lib/python3.6/site-packages/dns/dnssec.py deleted file mode 100644 index 35da6b5..0000000 --- a/venv/lib/python3.6/site-packages/dns/dnssec.py +++ /dev/null @@ -1,519 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNSSEC-related functions and constants.""" - -from io import BytesIO -import struct -import time - -import dns.exception -import dns.name -import dns.node -import dns.rdataset -import dns.rdata -import dns.rdatatype -import dns.rdataclass -from ._compat import string_types - - -class UnsupportedAlgorithm(dns.exception.DNSException): - """The DNSSEC algorithm is not supported.""" - - -class ValidationFailure(dns.exception.DNSException): - """The DNSSEC signature is invalid.""" - - -#: RSAMD5 -RSAMD5 = 1 -#: DH -DH = 2 -#: DSA -DSA = 3 -#: ECC -ECC = 4 -#: RSASHA1 -RSASHA1 = 5 -#: DSANSEC3SHA1 -DSANSEC3SHA1 = 6 -#: RSASHA1NSEC3SHA1 -RSASHA1NSEC3SHA1 = 7 -#: RSASHA256 -RSASHA256 = 8 -#: RSASHA512 -RSASHA512 = 10 -#: ECDSAP256SHA256 -ECDSAP256SHA256 = 13 -#: ECDSAP384SHA384 -ECDSAP384SHA384 = 14 -#: INDIRECT -INDIRECT = 252 -#: PRIVATEDNS -PRIVATEDNS = 253 -#: PRIVATEOID -PRIVATEOID = 254 - -_algorithm_by_text = { - 'RSAMD5': RSAMD5, - 'DH': DH, - 'DSA': DSA, - 'ECC': ECC, - 'RSASHA1': RSASHA1, - 'DSANSEC3SHA1': DSANSEC3SHA1, - 'RSASHA1NSEC3SHA1': RSASHA1NSEC3SHA1, - 'RSASHA256': RSASHA256, - 'RSASHA512': RSASHA512, - 'INDIRECT': INDIRECT, - 'ECDSAP256SHA256': ECDSAP256SHA256, - 'ECDSAP384SHA384': ECDSAP384SHA384, - 'PRIVATEDNS': PRIVATEDNS, - 'PRIVATEOID': PRIVATEOID, -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be true inverse. - -_algorithm_by_value = {y: x for x, y in _algorithm_by_text.items()} - - -def algorithm_from_text(text): - """Convert text into a DNSSEC algorithm value. - - Returns an ``int``. - """ - - value = _algorithm_by_text.get(text.upper()) - if value is None: - value = int(text) - return value - - -def algorithm_to_text(value): - """Convert a DNSSEC algorithm value to text - - Returns a ``str``. - """ - - text = _algorithm_by_value.get(value) - if text is None: - text = str(value) - return text - - -def _to_rdata(record, origin): - s = BytesIO() - record.to_wire(s, origin=origin) - return s.getvalue() - - -def key_id(key, origin=None): - """Return the key id (a 16-bit number) for the specified key. - - Note the *origin* parameter of this function is historical and - is not needed. - - Returns an ``int`` between 0 and 65535. - """ - - rdata = _to_rdata(key, origin) - rdata = bytearray(rdata) - if key.algorithm == RSAMD5: - return (rdata[-3] << 8) + rdata[-2] - else: - total = 0 - for i in range(len(rdata) // 2): - total += (rdata[2 * i] << 8) + \ - rdata[2 * i + 1] - if len(rdata) % 2 != 0: - total += rdata[len(rdata) - 1] << 8 - total += ((total >> 16) & 0xffff) - return total & 0xffff - - -def make_ds(name, key, algorithm, origin=None): - """Create a DS record for a DNSSEC key. - - *name* is the owner name of the DS record. - - *key* is a ``dns.rdtypes.ANY.DNSKEY``. - - *algorithm* is a string describing which hash algorithm to use. The - currently supported hashes are "SHA1" and "SHA256". Case does not - matter for these strings. - - *origin* is a ``dns.name.Name`` and will be used as the origin - if *key* is a relative name. - - Returns a ``dns.rdtypes.ANY.DS``. - """ - - if algorithm.upper() == 'SHA1': - dsalg = 1 - hash = SHA1.new() - elif algorithm.upper() == 'SHA256': - dsalg = 2 - hash = SHA256.new() - else: - raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) - - if isinstance(name, string_types): - name = dns.name.from_text(name, origin) - hash.update(name.canonicalize().to_wire()) - hash.update(_to_rdata(key, origin)) - digest = hash.digest() - - dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest - return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, - len(dsrdata)) - - -def _find_candidate_keys(keys, rrsig): - candidate_keys = [] - value = keys.get(rrsig.signer) - if value is None: - return None - if isinstance(value, dns.node.Node): - try: - rdataset = value.find_rdataset(dns.rdataclass.IN, - dns.rdatatype.DNSKEY) - except KeyError: - return None - else: - rdataset = value - for rdata in rdataset: - if rdata.algorithm == rrsig.algorithm and \ - key_id(rdata) == rrsig.key_tag: - candidate_keys.append(rdata) - return candidate_keys - - -def _is_rsa(algorithm): - return algorithm in (RSAMD5, RSASHA1, - RSASHA1NSEC3SHA1, RSASHA256, - RSASHA512) - - -def _is_dsa(algorithm): - return algorithm in (DSA, DSANSEC3SHA1) - - -def _is_ecdsa(algorithm): - return _have_ecdsa and (algorithm in (ECDSAP256SHA256, ECDSAP384SHA384)) - - -def _is_md5(algorithm): - return algorithm == RSAMD5 - - -def _is_sha1(algorithm): - return algorithm in (DSA, RSASHA1, - DSANSEC3SHA1, RSASHA1NSEC3SHA1) - - -def _is_sha256(algorithm): - return algorithm in (RSASHA256, ECDSAP256SHA256) - - -def _is_sha384(algorithm): - return algorithm == ECDSAP384SHA384 - - -def _is_sha512(algorithm): - return algorithm == RSASHA512 - - -def _make_hash(algorithm): - if _is_md5(algorithm): - return MD5.new() - if _is_sha1(algorithm): - return SHA1.new() - if _is_sha256(algorithm): - return SHA256.new() - if _is_sha384(algorithm): - return SHA384.new() - if _is_sha512(algorithm): - return SHA512.new() - raise ValidationFailure('unknown hash for algorithm %u' % algorithm) - - -def _make_algorithm_id(algorithm): - if _is_md5(algorithm): - oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05] - elif _is_sha1(algorithm): - oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a] - elif _is_sha256(algorithm): - oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01] - elif _is_sha512(algorithm): - oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03] - else: - raise ValidationFailure('unknown algorithm %u' % algorithm) - olen = len(oid) - dlen = _make_hash(algorithm).digest_size - idbytes = [0x30] + [8 + olen + dlen] + \ - [0x30, olen + 4] + [0x06, olen] + oid + \ - [0x05, 0x00] + [0x04, dlen] - return struct.pack('!%dB' % len(idbytes), *idbytes) - - -def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None): - """Validate an RRset against a single signature rdata - - The owner name of *rrsig* is assumed to be the same as the owner name - of *rrset*. - - *rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or - a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. - - *rrsig* is a ``dns.rdata.Rdata``, the signature to validate. - - *keys* is the key dictionary, used to find the DNSKEY associated with - a given name. The dictionary is keyed by a ``dns.name.Name``, and has - ``dns.node.Node`` or ``dns.rdataset.Rdataset`` values. - - *origin* is a ``dns.name.Name``, the origin to use for relative names. - - *now* is an ``int``, the time to use when validating the signatures, - in seconds since the UNIX epoch. The default is the current time. - """ - - if isinstance(origin, string_types): - origin = dns.name.from_text(origin, dns.name.root) - - candidate_keys = _find_candidate_keys(keys, rrsig) - if candidate_keys is None: - raise ValidationFailure('unknown key') - - for candidate_key in candidate_keys: - # For convenience, allow the rrset to be specified as a (name, - # rdataset) tuple as well as a proper rrset - if isinstance(rrset, tuple): - rrname = rrset[0] - rdataset = rrset[1] - else: - rrname = rrset.name - rdataset = rrset - - if now is None: - now = time.time() - if rrsig.expiration < now: - raise ValidationFailure('expired') - if rrsig.inception > now: - raise ValidationFailure('not yet valid') - - hash = _make_hash(rrsig.algorithm) - - if _is_rsa(rrsig.algorithm): - keyptr = candidate_key.key - (bytes_,) = struct.unpack('!B', keyptr[0:1]) - keyptr = keyptr[1:] - if bytes_ == 0: - (bytes_,) = struct.unpack('!H', keyptr[0:2]) - keyptr = keyptr[2:] - rsa_e = keyptr[0:bytes_] - rsa_n = keyptr[bytes_:] - try: - pubkey = CryptoRSA.construct( - (number.bytes_to_long(rsa_n), - number.bytes_to_long(rsa_e))) - except ValueError: - raise ValidationFailure('invalid public key') - sig = rrsig.signature - elif _is_dsa(rrsig.algorithm): - keyptr = candidate_key.key - (t,) = struct.unpack('!B', keyptr[0:1]) - keyptr = keyptr[1:] - octets = 64 + t * 8 - dsa_q = keyptr[0:20] - keyptr = keyptr[20:] - dsa_p = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_g = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_y = keyptr[0:octets] - pubkey = CryptoDSA.construct( - (number.bytes_to_long(dsa_y), - number.bytes_to_long(dsa_g), - number.bytes_to_long(dsa_p), - number.bytes_to_long(dsa_q))) - sig = rrsig.signature[1:] - elif _is_ecdsa(rrsig.algorithm): - # use ecdsa for NIST-384p -- not currently supported by pycryptodome - - keyptr = candidate_key.key - - if rrsig.algorithm == ECDSAP256SHA256: - curve = ecdsa.curves.NIST256p - key_len = 32 - elif rrsig.algorithm == ECDSAP384SHA384: - curve = ecdsa.curves.NIST384p - key_len = 48 - - x = number.bytes_to_long(keyptr[0:key_len]) - y = number.bytes_to_long(keyptr[key_len:key_len * 2]) - if not ecdsa.ecdsa.point_is_valid(curve.generator, x, y): - raise ValidationFailure('invalid ECDSA key') - point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order) - verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point, - curve) - pubkey = ECKeyWrapper(verifying_key, key_len) - r = rrsig.signature[:key_len] - s = rrsig.signature[key_len:] - sig = ecdsa.ecdsa.Signature(number.bytes_to_long(r), - number.bytes_to_long(s)) - - else: - raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm) - - hash.update(_to_rdata(rrsig, origin)[:18]) - hash.update(rrsig.signer.to_digestable(origin)) - - if rrsig.labels < len(rrname) - 1: - suffix = rrname.split(rrsig.labels + 1)[1] - rrname = dns.name.from_text('*', suffix) - rrnamebuf = rrname.to_digestable(origin) - rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass, - rrsig.original_ttl) - rrlist = sorted(rdataset) - for rr in rrlist: - hash.update(rrnamebuf) - hash.update(rrfixed) - rrdata = rr.to_digestable(origin) - rrlen = struct.pack('!H', len(rrdata)) - hash.update(rrlen) - hash.update(rrdata) - - try: - if _is_rsa(rrsig.algorithm): - verifier = pkcs1_15.new(pubkey) - # will raise ValueError if verify fails: - verifier.verify(hash, sig) - elif _is_dsa(rrsig.algorithm): - verifier = DSS.new(pubkey, 'fips-186-3') - verifier.verify(hash, sig) - elif _is_ecdsa(rrsig.algorithm): - digest = hash.digest() - if not pubkey.verify(digest, sig): - raise ValueError - else: - # Raise here for code clarity; this won't actually ever happen - # since if the algorithm is really unknown we'd already have - # raised an exception above - raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm) - # If we got here, we successfully verified so we can return without error - return - except ValueError: - # this happens on an individual validation failure - continue - # nothing verified -- raise failure: - raise ValidationFailure('verify failure') - - -def _validate(rrset, rrsigset, keys, origin=None, now=None): - """Validate an RRset. - - *rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or - a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. - - *rrsigset* is the signature RRset to be validated. It can be a - ``dns.rrset.RRset`` or a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. - - *keys* is the key dictionary, used to find the DNSKEY associated with - a given name. The dictionary is keyed by a ``dns.name.Name``, and has - ``dns.node.Node`` or ``dns.rdataset.Rdataset`` values. - - *origin* is a ``dns.name.Name``, the origin to use for relative names. - - *now* is an ``int``, the time to use when validating the signatures, - in seconds since the UNIX epoch. The default is the current time. - """ - - if isinstance(origin, string_types): - origin = dns.name.from_text(origin, dns.name.root) - - if isinstance(rrset, tuple): - rrname = rrset[0] - else: - rrname = rrset.name - - if isinstance(rrsigset, tuple): - rrsigname = rrsigset[0] - rrsigrdataset = rrsigset[1] - else: - rrsigname = rrsigset.name - rrsigrdataset = rrsigset - - rrname = rrname.choose_relativity(origin) - rrsigname = rrsigname.choose_relativity(origin) - if rrname != rrsigname: - raise ValidationFailure("owner names do not match") - - for rrsig in rrsigrdataset: - try: - _validate_rrsig(rrset, rrsig, keys, origin, now) - return - except ValidationFailure: - pass - raise ValidationFailure("no RRSIGs validated") - - -def _need_pycrypto(*args, **kwargs): - raise NotImplementedError("DNSSEC validation requires pycryptodome/pycryptodomex") - - -try: - try: - # test we're using pycryptodome, not pycrypto (which misses SHA1 for example) - from Crypto.Hash import MD5, SHA1, SHA256, SHA384, SHA512 - from Crypto.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA - from Crypto.Signature import pkcs1_15, DSS - from Crypto.Util import number - except ImportError: - from Cryptodome.Hash import MD5, SHA1, SHA256, SHA384, SHA512 - from Cryptodome.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA - from Cryptodome.Signature import pkcs1_15, DSS - from Cryptodome.Util import number -except ImportError: - validate = _need_pycrypto - validate_rrsig = _need_pycrypto - _have_pycrypto = False - _have_ecdsa = False -else: - validate = _validate - validate_rrsig = _validate_rrsig - _have_pycrypto = True - - try: - import ecdsa - import ecdsa.ecdsa - import ecdsa.ellipticcurve - import ecdsa.keys - except ImportError: - _have_ecdsa = False - else: - _have_ecdsa = True - - class ECKeyWrapper(object): - - def __init__(self, key, key_len): - self.key = key - self.key_len = key_len - - def verify(self, digest, sig): - diglong = number.bytes_to_long(digest) - return self.key.pubkey.verifies(diglong, sig) diff --git a/venv/lib/python3.6/site-packages/dns/e164.py b/venv/lib/python3.6/site-packages/dns/e164.py deleted file mode 100644 index 758c47a..0000000 --- a/venv/lib/python3.6/site-packages/dns/e164.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS E.164 helpers.""" - -import dns.exception -import dns.name -import dns.resolver -from ._compat import string_types, maybe_decode - -#: The public E.164 domain. -public_enum_domain = dns.name.from_text('e164.arpa.') - - -def from_e164(text, origin=public_enum_domain): - """Convert an E.164 number in textual form into a Name object whose - value is the ENUM domain name for that number. - - Non-digits in the text are ignored, i.e. "16505551212", - "+1.650.555.1212" and "1 (650) 555-1212" are all the same. - - *text*, a ``text``, is an E.164 number in textual form. - - *origin*, a ``dns.name.Name``, the domain in which the number - should be constructed. The default is ``e164.arpa.``. - - Returns a ``dns.name.Name``. - """ - - parts = [d for d in text if d.isdigit()] - parts.reverse() - return dns.name.from_text('.'.join(parts), origin=origin) - - -def to_e164(name, origin=public_enum_domain, want_plus_prefix=True): - """Convert an ENUM domain name into an E.164 number. - - Note that dnspython does not have any information about preferred - number formats within national numbering plans, so all numbers are - emitted as a simple string of digits, prefixed by a '+' (unless - *want_plus_prefix* is ``False``). - - *name* is a ``dns.name.Name``, the ENUM domain name. - - *origin* is a ``dns.name.Name``, a domain containing the ENUM - domain name. The name is relativized to this domain before being - converted to text. If ``None``, no relativization is done. - - *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of - the returned number. - - Returns a ``text``. - - """ - if origin is not None: - name = name.relativize(origin) - dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] - if len(dlabels) != len(name.labels): - raise dns.exception.SyntaxError('non-digit labels in ENUM domain name') - dlabels.reverse() - text = b''.join(dlabels) - if want_plus_prefix: - text = b'+' + text - return maybe_decode(text) - - -def query(number, domains, resolver=None): - """Look for NAPTR RRs for the specified number in the specified domains. - - e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) - - *number*, a ``text`` is the number to look for. - - *domains* is an iterable containing ``dns.name.Name`` values. - - *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If - ``None``, the default resolver is used. - """ - - if resolver is None: - resolver = dns.resolver.get_default_resolver() - e_nx = dns.resolver.NXDOMAIN() - for domain in domains: - if isinstance(domain, string_types): - domain = dns.name.from_text(domain) - qname = dns.e164.from_e164(number, domain) - try: - return resolver.query(qname, 'NAPTR') - except dns.resolver.NXDOMAIN as e: - e_nx += e - raise e_nx diff --git a/venv/lib/python3.6/site-packages/dns/edns.py b/venv/lib/python3.6/site-packages/dns/edns.py deleted file mode 100644 index 5660f7b..0000000 --- a/venv/lib/python3.6/site-packages/dns/edns.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""EDNS Options""" - -from __future__ import absolute_import - -import math -import struct - -import dns.inet - -#: NSID -NSID = 3 -#: DAU -DAU = 5 -#: DHU -DHU = 6 -#: N3U -N3U = 7 -#: ECS (client-subnet) -ECS = 8 -#: EXPIRE -EXPIRE = 9 -#: COOKIE -COOKIE = 10 -#: KEEPALIVE -KEEPALIVE = 11 -#: PADDING -PADDING = 12 -#: CHAIN -CHAIN = 13 - -class Option(object): - - """Base class for all EDNS option types.""" - - def __init__(self, otype): - """Initialize an option. - - *otype*, an ``int``, is the option type. - """ - self.otype = otype - - def to_wire(self, file): - """Convert an option to wire format. - """ - raise NotImplementedError - - @classmethod - def from_wire(cls, otype, wire, current, olen): - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *wire*, a ``binary``, is the wire-format message. - - *current*, an ``int``, is the offset in *wire* of the beginning - of the rdata. - - *olen*, an ``int``, is the length of the wire-format option data - - Returns a ``dns.edns.Option``. - """ - - raise NotImplementedError - - def _cmp(self, other): - """Compare an EDNS option with another option of the same type. - - Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. - """ - raise NotImplementedError - - def __eq__(self, other): - if not isinstance(other, Option): - return False - if self.otype != other.otype: - return False - return self._cmp(other) == 0 - - def __ne__(self, other): - if not isinstance(other, Option): - return False - if self.otype != other.otype: - return False - return self._cmp(other) != 0 - - def __lt__(self, other): - if not isinstance(other, Option) or \ - self.otype != other.otype: - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if not isinstance(other, Option) or \ - self.otype != other.otype: - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if not isinstance(other, Option) or \ - self.otype != other.otype: - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if not isinstance(other, Option) or \ - self.otype != other.otype: - return NotImplemented - return self._cmp(other) > 0 - - -class GenericOption(Option): - - """Generic Option Class - - This class is used for EDNS option types for which we have no better - implementation. - """ - - def __init__(self, otype, data): - super(GenericOption, self).__init__(otype) - self.data = data - - def to_wire(self, file): - file.write(self.data) - - def to_text(self): - return "Generic %d" % self.otype - - @classmethod - def from_wire(cls, otype, wire, current, olen): - return cls(otype, wire[current: current + olen]) - - def _cmp(self, other): - if self.data == other.data: - return 0 - if self.data > other.data: - return 1 - return -1 - - -class ECSOption(Option): - """EDNS Client Subnet (ECS, RFC7871)""" - - def __init__(self, address, srclen=None, scopelen=0): - """*address*, a ``text``, is the client address information. - - *srclen*, an ``int``, the source prefix length, which is the - leftmost number of bits of the address to be used for the - lookup. The default is 24 for IPv4 and 56 for IPv6. - - *scopelen*, an ``int``, the scope prefix length. This value - must be 0 in queries, and should be set in responses. - """ - - super(ECSOption, self).__init__(ECS) - af = dns.inet.af_for_address(address) - - if af == dns.inet.AF_INET6: - self.family = 2 - if srclen is None: - srclen = 56 - elif af == dns.inet.AF_INET: - self.family = 1 - if srclen is None: - srclen = 24 - else: - raise ValueError('Bad ip family') - - self.address = address - self.srclen = srclen - self.scopelen = scopelen - - addrdata = dns.inet.inet_pton(af, address) - nbytes = int(math.ceil(srclen/8.0)) - - # Truncate to srclen and pad to the end of the last octet needed - # See RFC section 6 - self.addrdata = addrdata[:nbytes] - nbits = srclen % 8 - if nbits != 0: - last = struct.pack('B', ord(self.addrdata[-1:]) & (0xff << nbits)) - self.addrdata = self.addrdata[:-1] + last - - def to_text(self): - return "ECS {}/{} scope/{}".format(self.address, self.srclen, - self.scopelen) - - def to_wire(self, file): - file.write(struct.pack('!H', self.family)) - file.write(struct.pack('!BB', self.srclen, self.scopelen)) - file.write(self.addrdata) - - @classmethod - def from_wire(cls, otype, wire, cur, olen): - family, src, scope = struct.unpack('!HBB', wire[cur:cur+4]) - cur += 4 - - addrlen = int(math.ceil(src/8.0)) - - if family == 1: - af = dns.inet.AF_INET - pad = 4 - addrlen - elif family == 2: - af = dns.inet.AF_INET6 - pad = 16 - addrlen - else: - raise ValueError('unsupported family') - - addr = dns.inet.inet_ntop(af, wire[cur:cur+addrlen] + b'\x00' * pad) - return cls(addr, src, scope) - - def _cmp(self, other): - if self.addrdata == other.addrdata: - return 0 - if self.addrdata > other.addrdata: - return 1 - return -1 - -_type_to_class = { - ECS: ECSOption -} - -def get_option_class(otype): - """Return the class for the specified option type. - - The GenericOption class is used if a more specific class is not - known. - """ - - cls = _type_to_class.get(otype) - if cls is None: - cls = GenericOption - return cls - - -def option_from_wire(otype, wire, current, olen): - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *wire*, a ``binary``, is the wire-format message. - - *current*, an ``int``, is the offset in *wire* of the beginning - of the rdata. - - *olen*, an ``int``, is the length of the wire-format option data - - Returns an instance of a subclass of ``dns.edns.Option``. - """ - - cls = get_option_class(otype) - return cls.from_wire(otype, wire, current, olen) diff --git a/venv/lib/python3.6/site-packages/dns/entropy.py b/venv/lib/python3.6/site-packages/dns/entropy.py deleted file mode 100644 index 00c6a4b..0000000 --- a/venv/lib/python3.6/site-packages/dns/entropy.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import os -import random -import time -from ._compat import long, binary_type -try: - import threading as _threading -except ImportError: - import dummy_threading as _threading - - -class EntropyPool(object): - - # This is an entropy pool for Python implementations that do not - # have a working SystemRandom. I'm not sure there are any, but - # leaving this code doesn't hurt anything as the library code - # is used if present. - - def __init__(self, seed=None): - self.pool_index = 0 - self.digest = None - self.next_byte = 0 - self.lock = _threading.Lock() - try: - import hashlib - self.hash = hashlib.sha1() - self.hash_len = 20 - except ImportError: - try: - import sha - self.hash = sha.new() - self.hash_len = 20 - except ImportError: - import md5 # pylint: disable=import-error - self.hash = md5.new() - self.hash_len = 16 - self.pool = bytearray(b'\0' * self.hash_len) - if seed is not None: - self.stir(bytearray(seed)) - self.seeded = True - self.seed_pid = os.getpid() - else: - self.seeded = False - self.seed_pid = 0 - - def stir(self, entropy, already_locked=False): - if not already_locked: - self.lock.acquire() - try: - for c in entropy: - if self.pool_index == self.hash_len: - self.pool_index = 0 - b = c & 0xff - self.pool[self.pool_index] ^= b - self.pool_index += 1 - finally: - if not already_locked: - self.lock.release() - - def _maybe_seed(self): - if not self.seeded or self.seed_pid != os.getpid(): - try: - seed = os.urandom(16) - except Exception: - try: - r = open('/dev/urandom', 'rb', 0) - try: - seed = r.read(16) - finally: - r.close() - except Exception: - seed = str(time.time()) - self.seeded = True - self.seed_pid = os.getpid() - self.digest = None - seed = bytearray(seed) - self.stir(seed, True) - - def random_8(self): - self.lock.acquire() - try: - self._maybe_seed() - if self.digest is None or self.next_byte == self.hash_len: - self.hash.update(binary_type(self.pool)) - self.digest = bytearray(self.hash.digest()) - self.stir(self.digest, True) - self.next_byte = 0 - value = self.digest[self.next_byte] - self.next_byte += 1 - finally: - self.lock.release() - return value - - def random_16(self): - return self.random_8() * 256 + self.random_8() - - def random_32(self): - return self.random_16() * 65536 + self.random_16() - - def random_between(self, first, last): - size = last - first + 1 - if size > long(4294967296): - raise ValueError('too big') - if size > 65536: - rand = self.random_32 - max = long(4294967295) - elif size > 256: - rand = self.random_16 - max = 65535 - else: - rand = self.random_8 - max = 255 - return first + size * rand() // (max + 1) - -pool = EntropyPool() - -try: - system_random = random.SystemRandom() -except Exception: - system_random = None - -def random_16(): - if system_random is not None: - return system_random.randrange(0, 65536) - else: - return pool.random_16() - -def between(first, last): - if system_random is not None: - return system_random.randrange(first, last + 1) - else: - return pool.random_between(first, last) diff --git a/venv/lib/python3.6/site-packages/dns/exception.py b/venv/lib/python3.6/site-packages/dns/exception.py deleted file mode 100644 index 71ff04f..0000000 --- a/venv/lib/python3.6/site-packages/dns/exception.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNS Exceptions. - -Dnspython modules may also define their own exceptions, which will -always be subclasses of ``DNSException``. -""" - -class DNSException(Exception): - """Abstract base class shared by all dnspython exceptions. - - It supports two basic modes of operation: - - a) Old/compatible mode is used if ``__init__`` was called with - empty *kwargs*. In compatible mode all *args* are passed - to the standard Python Exception class as before and all *args* are - printed by the standard ``__str__`` implementation. Class variable - ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` - if *args* is empty. - - b) New/parametrized mode is used if ``__init__`` was called with - non-empty *kwargs*. - In the new mode *args* must be empty and all kwargs must match - those set in class variable ``supp_kwargs``. All kwargs are stored inside - ``self.kwargs`` and used in a new ``__str__`` implementation to construct - a formatted message based on the ``fmt`` class variable, a ``string``. - - In the simplest case it is enough to override the ``supp_kwargs`` - and ``fmt`` class variables to get nice parametrized messages. - """ - - msg = None # non-parametrized message - supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check) - fmt = None # message parametrized with results from _fmt_kwargs - - def __init__(self, *args, **kwargs): - self._check_params(*args, **kwargs) - if kwargs: - self.kwargs = self._check_kwargs(**kwargs) - self.msg = str(self) - else: - self.kwargs = dict() # defined but empty for old mode exceptions - if self.msg is None: - # doc string is better implicit message than empty string - self.msg = self.__doc__ - if args: - super(DNSException, self).__init__(*args) - else: - super(DNSException, self).__init__(self.msg) - - def _check_params(self, *args, **kwargs): - """Old exceptions supported only args and not kwargs. - - For sanity we do not allow to mix old and new behavior.""" - if args or kwargs: - assert bool(args) != bool(kwargs), \ - 'keyword arguments are mutually exclusive with positional args' - - def _check_kwargs(self, **kwargs): - if kwargs: - assert set(kwargs.keys()) == self.supp_kwargs, \ - 'following set of keyword args is required: %s' % ( - self.supp_kwargs) - return kwargs - - def _fmt_kwargs(self, **kwargs): - """Format kwargs before printing them. - - Resulting dictionary has to have keys necessary for str.format call - on fmt class variable. - """ - fmtargs = {} - for kw, data in kwargs.items(): - if isinstance(data, (list, set)): - # convert list of to list of str() - fmtargs[kw] = list(map(str, data)) - if len(fmtargs[kw]) == 1: - # remove list brackets [] from single-item lists - fmtargs[kw] = fmtargs[kw].pop() - else: - fmtargs[kw] = data - return fmtargs - - def __str__(self): - if self.kwargs and self.fmt: - # provide custom message constructed from keyword arguments - fmtargs = self._fmt_kwargs(**self.kwargs) - return self.fmt.format(**fmtargs) - else: - # print *args directly in the same way as old DNSException - return super(DNSException, self).__str__() - - -class FormError(DNSException): - """DNS message is malformed.""" - - -class SyntaxError(DNSException): - """Text input is malformed.""" - - -class UnexpectedEnd(SyntaxError): - """Text input ended unexpectedly.""" - - -class TooBig(DNSException): - """The DNS message is too big.""" - - -class Timeout(DNSException): - """The DNS operation timed out.""" - supp_kwargs = {'timeout'} - fmt = "The DNS operation timed out after {timeout} seconds" diff --git a/venv/lib/python3.6/site-packages/dns/flags.py b/venv/lib/python3.6/site-packages/dns/flags.py deleted file mode 100644 index 0119dec..0000000 --- a/venv/lib/python3.6/site-packages/dns/flags.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Message Flags.""" - -# Standard DNS flags - -#: Query Response -QR = 0x8000 -#: Authoritative Answer -AA = 0x0400 -#: Truncated Response -TC = 0x0200 -#: Recursion Desired -RD = 0x0100 -#: Recursion Available -RA = 0x0080 -#: Authentic Data -AD = 0x0020 -#: Checking Disabled -CD = 0x0010 - -# EDNS flags - -#: DNSSEC answer OK -DO = 0x8000 - -_by_text = { - 'QR': QR, - 'AA': AA, - 'TC': TC, - 'RD': RD, - 'RA': RA, - 'AD': AD, - 'CD': CD -} - -_edns_by_text = { - 'DO': DO -} - - -# We construct the inverse mappings programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mappings not to be true inverses. - -_by_value = {y: x for x, y in _by_text.items()} - -_edns_by_value = {y: x for x, y in _edns_by_text.items()} - - -def _order_flags(table): - order = list(table.items()) - order.sort() - order.reverse() - return order - -_flags_order = _order_flags(_by_value) - -_edns_flags_order = _order_flags(_edns_by_value) - - -def _from_text(text, table): - flags = 0 - tokens = text.split() - for t in tokens: - flags = flags | table[t.upper()] - return flags - - -def _to_text(flags, table, order): - text_flags = [] - for k, v in order: - if flags & k != 0: - text_flags.append(v) - return ' '.join(text_flags) - - -def from_text(text): - """Convert a space-separated list of flag text values into a flags - value. - - Returns an ``int`` - """ - - return _from_text(text, _by_text) - - -def to_text(flags): - """Convert a flags value into a space-separated list of flag text - values. - - Returns a ``text``. - """ - - return _to_text(flags, _by_value, _flags_order) - - -def edns_from_text(text): - """Convert a space-separated list of EDNS flag text values into a EDNS - flags value. - - Returns an ``int`` - """ - - return _from_text(text, _edns_by_text) - - -def edns_to_text(flags): - """Convert an EDNS flags value into a space-separated list of EDNS flag - text values. - - Returns a ``text``. - """ - - return _to_text(flags, _edns_by_value, _edns_flags_order) diff --git a/venv/lib/python3.6/site-packages/dns/grange.py b/venv/lib/python3.6/site-packages/dns/grange.py deleted file mode 100644 index ffe8be7..0000000 --- a/venv/lib/python3.6/site-packages/dns/grange.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2012-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS GENERATE range conversion.""" - -import dns - -def from_text(text): - """Convert the text form of a range in a ``$GENERATE`` statement to an - integer. - - *text*, a ``str``, the textual range in ``$GENERATE`` form. - - Returns a tuple of three ``int`` values ``(start, stop, step)``. - """ - - # TODO, figure out the bounds on start, stop and step. - step = 1 - cur = '' - state = 0 - # state 0 1 2 3 4 - # x - y / z - - if text and text[0] == '-': - raise dns.exception.SyntaxError("Start cannot be a negative number") - - for c in text: - if c == '-' and state == 0: - start = int(cur) - cur = '' - state = 2 - elif c == '/': - stop = int(cur) - cur = '' - state = 4 - elif c.isdigit(): - cur += c - else: - raise dns.exception.SyntaxError("Could not parse %s" % (c)) - - if state in (1, 3): - raise dns.exception.SyntaxError() - - if state == 2: - stop = int(cur) - - if state == 4: - step = int(cur) - - assert step >= 1 - assert start >= 0 - assert start <= stop - # TODO, can start == stop? - - return (start, stop, step) diff --git a/venv/lib/python3.6/site-packages/dns/hash.py b/venv/lib/python3.6/site-packages/dns/hash.py deleted file mode 100644 index 1713e62..0000000 --- a/venv/lib/python3.6/site-packages/dns/hash.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Hashing backwards compatibility wrapper""" - -import hashlib -import warnings - -warnings.warn( - "dns.hash module will be removed in future versions. Please use hashlib instead.", - DeprecationWarning) - -hashes = {} -hashes['MD5'] = hashlib.md5 -hashes['SHA1'] = hashlib.sha1 -hashes['SHA224'] = hashlib.sha224 -hashes['SHA256'] = hashlib.sha256 -hashes['SHA384'] = hashlib.sha384 -hashes['SHA512'] = hashlib.sha512 - - -def get(algorithm): - return hashes[algorithm.upper()] diff --git a/venv/lib/python3.6/site-packages/dns/inet.py b/venv/lib/python3.6/site-packages/dns/inet.py deleted file mode 100644 index c8d7c1b..0000000 --- a/venv/lib/python3.6/site-packages/dns/inet.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Generic Internet address helper functions.""" - -import socket - -import dns.ipv4 -import dns.ipv6 - -from ._compat import maybe_ord - -# We assume that AF_INET is always defined. - -AF_INET = socket.AF_INET - -# AF_INET6 might not be defined in the socket module, but we need it. -# We'll try to use the socket module's value, and if it doesn't work, -# we'll use our own value. - -try: - AF_INET6 = socket.AF_INET6 -except AttributeError: - AF_INET6 = 9999 - - -def inet_pton(family, text): - """Convert the textual form of a network address into its binary form. - - *family* is an ``int``, the address family. - - *text* is a ``text``, the textual address. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``binary``. - """ - - if family == AF_INET: - return dns.ipv4.inet_aton(text) - elif family == AF_INET6: - return dns.ipv6.inet_aton(text) - else: - raise NotImplementedError - - -def inet_ntop(family, address): - """Convert the binary form of a network address into its textual form. - - *family* is an ``int``, the address family. - - *address* is a ``binary``, the network address in binary form. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``text``. - """ - - if family == AF_INET: - return dns.ipv4.inet_ntoa(address) - elif family == AF_INET6: - return dns.ipv6.inet_ntoa(address) - else: - raise NotImplementedError - - -def af_for_address(text): - """Determine the address family of a textual-form network address. - - *text*, a ``text``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns an ``int``. - """ - - try: - dns.ipv4.inet_aton(text) - return AF_INET - except Exception: - try: - dns.ipv6.inet_aton(text) - return AF_INET6 - except: - raise ValueError - - -def is_multicast(text): - """Is the textual-form network address a multicast address? - - *text*, a ``text``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns a ``bool``. - """ - - try: - first = maybe_ord(dns.ipv4.inet_aton(text)[0]) - return first >= 224 and first <= 239 - except Exception: - try: - first = maybe_ord(dns.ipv6.inet_aton(text)[0]) - return first == 255 - except Exception: - raise ValueError diff --git a/venv/lib/python3.6/site-packages/dns/ipv4.py b/venv/lib/python3.6/site-packages/dns/ipv4.py deleted file mode 100644 index 8fc4f7d..0000000 --- a/venv/lib/python3.6/site-packages/dns/ipv4.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv4 helper functions.""" - -import struct - -import dns.exception -from ._compat import binary_type - -def inet_ntoa(address): - """Convert an IPv4 address in binary form to text form. - - *address*, a ``binary``, the IPv4 address in binary form. - - Returns a ``text``. - """ - - if len(address) != 4: - raise dns.exception.SyntaxError - if not isinstance(address, bytearray): - address = bytearray(address) - return ('%u.%u.%u.%u' % (address[0], address[1], - address[2], address[3])) - -def inet_aton(text): - """Convert an IPv4 address in text form to binary form. - - *text*, a ``text``, the IPv4 address in textual form. - - Returns a ``binary``. - """ - - if not isinstance(text, binary_type): - text = text.encode() - parts = text.split(b'.') - if len(parts) != 4: - raise dns.exception.SyntaxError - for part in parts: - if not part.isdigit(): - raise dns.exception.SyntaxError - if len(part) > 1 and part[0] == '0': - # No leading zeros - raise dns.exception.SyntaxError - try: - bytes = [int(part) for part in parts] - return struct.pack('BBBB', *bytes) - except: - raise dns.exception.SyntaxError diff --git a/venv/lib/python3.6/site-packages/dns/ipv6.py b/venv/lib/python3.6/site-packages/dns/ipv6.py deleted file mode 100644 index 128e56c..0000000 --- a/venv/lib/python3.6/site-packages/dns/ipv6.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv6 helper functions.""" - -import re -import binascii - -import dns.exception -import dns.ipv4 -from ._compat import xrange, binary_type, maybe_decode - -_leading_zero = re.compile(r'0+([0-9a-f]+)') - -def inet_ntoa(address): - """Convert an IPv6 address in binary form to text form. - - *address*, a ``binary``, the IPv6 address in binary form. - - Raises ``ValueError`` if the address isn't 16 bytes long. - Returns a ``text``. - """ - - if len(address) != 16: - raise ValueError("IPv6 addresses are 16 bytes long") - hex = binascii.hexlify(address) - chunks = [] - i = 0 - l = len(hex) - while i < l: - chunk = maybe_decode(hex[i : i + 4]) - # strip leading zeros. we do this with an re instead of - # with lstrip() because lstrip() didn't support chars until - # python 2.2.2 - m = _leading_zero.match(chunk) - if not m is None: - chunk = m.group(1) - chunks.append(chunk) - i += 4 - # - # Compress the longest subsequence of 0-value chunks to :: - # - best_start = 0 - best_len = 0 - start = -1 - last_was_zero = False - for i in xrange(8): - if chunks[i] != '0': - if last_was_zero: - end = i - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - last_was_zero = False - elif not last_was_zero: - start = i - last_was_zero = True - if last_was_zero: - end = 8 - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - if best_len > 1: - if best_start == 0 and \ - (best_len == 6 or - best_len == 5 and chunks[5] == 'ffff'): - # We have an embedded IPv4 address - if best_len == 6: - prefix = '::' - else: - prefix = '::ffff:' - hex = prefix + dns.ipv4.inet_ntoa(address[12:]) - else: - hex = ':'.join(chunks[:best_start]) + '::' + \ - ':'.join(chunks[best_start + best_len:]) - else: - hex = ':'.join(chunks) - return hex - -_v4_ending = re.compile(br'(.*):(\d+\.\d+\.\d+\.\d+)$') -_colon_colon_start = re.compile(br'::.*') -_colon_colon_end = re.compile(br'.*::$') - -def inet_aton(text): - """Convert an IPv6 address in text form to binary form. - - *text*, a ``text``, the IPv6 address in textual form. - - Returns a ``binary``. - """ - - # - # Our aim here is not something fast; we just want something that works. - # - if not isinstance(text, binary_type): - text = text.encode() - - if text == b'::': - text = b'0::' - # - # Get rid of the icky dot-quad syntax if we have it. - # - m = _v4_ending.match(text) - if not m is None: - b = bytearray(dns.ipv4.inet_aton(m.group(2))) - text = (u"{}:{:02x}{:02x}:{:02x}{:02x}".format(m.group(1).decode(), - b[0], b[1], b[2], - b[3])).encode() - # - # Try to turn '::' into ':'; if no match try to - # turn '::' into ':' - # - m = _colon_colon_start.match(text) - if not m is None: - text = text[1:] - else: - m = _colon_colon_end.match(text) - if not m is None: - text = text[:-1] - # - # Now canonicalize into 8 chunks of 4 hex digits each - # - chunks = text.split(b':') - l = len(chunks) - if l > 8: - raise dns.exception.SyntaxError - seen_empty = False - canonical = [] - for c in chunks: - if c == b'': - if seen_empty: - raise dns.exception.SyntaxError - seen_empty = True - for i in xrange(0, 8 - l + 1): - canonical.append(b'0000') - else: - lc = len(c) - if lc > 4: - raise dns.exception.SyntaxError - if lc != 4: - c = (b'0' * (4 - lc)) + c - canonical.append(c) - if l < 8 and not seen_empty: - raise dns.exception.SyntaxError - text = b''.join(canonical) - - # - # Finally we can go to binary. - # - try: - return binascii.unhexlify(text) - except (binascii.Error, TypeError): - raise dns.exception.SyntaxError - -_mapped_prefix = b'\x00' * 10 + b'\xff\xff' - -def is_mapped(address): - """Is the specified address a mapped IPv4 address? - - *address*, a ``binary`` is an IPv6 address in binary form. - - Returns a ``bool``. - """ - - return address.startswith(_mapped_prefix) diff --git a/venv/lib/python3.6/site-packages/dns/message.py b/venv/lib/python3.6/site-packages/dns/message.py deleted file mode 100644 index 9d2b2f4..0000000 --- a/venv/lib/python3.6/site-packages/dns/message.py +++ /dev/null @@ -1,1175 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Messages""" - -from __future__ import absolute_import - -from io import StringIO -import struct -import time - -import dns.edns -import dns.exception -import dns.flags -import dns.name -import dns.opcode -import dns.entropy -import dns.rcode -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.rrset -import dns.renderer -import dns.tsig -import dns.wiredata - -from ._compat import long, xrange, string_types - - -class ShortHeader(dns.exception.FormError): - """The DNS packet passed to from_wire() is too short.""" - - -class TrailingJunk(dns.exception.FormError): - """The DNS packet passed to from_wire() has extra junk at the end of it.""" - - -class UnknownHeaderField(dns.exception.DNSException): - """The header field name was not recognized when converting from text - into a message.""" - - -class BadEDNS(dns.exception.FormError): - """An OPT record occurred somewhere other than the start of - the additional data section.""" - - -class BadTSIG(dns.exception.FormError): - """A TSIG record occurred somewhere other than the end of - the additional data section.""" - - -class UnknownTSIGKey(dns.exception.DNSException): - """A TSIG with an unknown key was received.""" - - -#: The question section number -QUESTION = 0 - -#: The answer section number -ANSWER = 1 - -#: The authority section number -AUTHORITY = 2 - -#: The additional section number -ADDITIONAL = 3 - -class Message(object): - """A DNS message.""" - - def __init__(self, id=None): - if id is None: - self.id = dns.entropy.random_16() - else: - self.id = id - self.flags = 0 - self.question = [] - self.answer = [] - self.authority = [] - self.additional = [] - self.edns = -1 - self.ednsflags = 0 - self.payload = 0 - self.options = [] - self.request_payload = 0 - self.keyring = None - self.keyname = None - self.keyalgorithm = dns.tsig.default_algorithm - self.request_mac = b'' - self.other_data = b'' - self.tsig_error = 0 - self.fudge = 300 - self.original_id = self.id - self.mac = b'' - self.xfr = False - self.origin = None - self.tsig_ctx = None - self.had_tsig = False - self.multi = False - self.first = True - self.index = {} - - def __repr__(self): - return '' - - def __str__(self): - return self.to_text() - - def to_text(self, origin=None, relativize=True, **kw): - """Convert the message to text. - - The *origin*, *relativize*, and any other keyword - arguments are passed to the RRset ``to_wire()`` method. - - Returns a ``text``. - """ - - s = StringIO() - s.write(u'id %d\n' % self.id) - s.write(u'opcode %s\n' % - dns.opcode.to_text(dns.opcode.from_flags(self.flags))) - rc = dns.rcode.from_flags(self.flags, self.ednsflags) - s.write(u'rcode %s\n' % dns.rcode.to_text(rc)) - s.write(u'flags %s\n' % dns.flags.to_text(self.flags)) - if self.edns >= 0: - s.write(u'edns %s\n' % self.edns) - if self.ednsflags != 0: - s.write(u'eflags %s\n' % - dns.flags.edns_to_text(self.ednsflags)) - s.write(u'payload %d\n' % self.payload) - for opt in self.options: - s.write(u'option %s\n' % opt.to_text()) - is_update = dns.opcode.is_update(self.flags) - if is_update: - s.write(u';ZONE\n') - else: - s.write(u';QUESTION\n') - for rrset in self.question: - s.write(rrset.to_text(origin, relativize, **kw)) - s.write(u'\n') - if is_update: - s.write(u';PREREQ\n') - else: - s.write(u';ANSWER\n') - for rrset in self.answer: - s.write(rrset.to_text(origin, relativize, **kw)) - s.write(u'\n') - if is_update: - s.write(u';UPDATE\n') - else: - s.write(u';AUTHORITY\n') - for rrset in self.authority: - s.write(rrset.to_text(origin, relativize, **kw)) - s.write(u'\n') - s.write(u';ADDITIONAL\n') - for rrset in self.additional: - s.write(rrset.to_text(origin, relativize, **kw)) - s.write(u'\n') - # - # We strip off the final \n so the caller can print the result without - # doing weird things to get around eccentricities in Python print - # formatting - # - return s.getvalue()[:-1] - - def __eq__(self, other): - """Two messages are equal if they have the same content in the - header, question, answer, and authority sections. - - Returns a ``bool``. - """ - - if not isinstance(other, Message): - return False - if self.id != other.id: - return False - if self.flags != other.flags: - return False - for n in self.question: - if n not in other.question: - return False - for n in other.question: - if n not in self.question: - return False - for n in self.answer: - if n not in other.answer: - return False - for n in other.answer: - if n not in self.answer: - return False - for n in self.authority: - if n not in other.authority: - return False - for n in other.authority: - if n not in self.authority: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def is_response(self, other): - """Is this message a response to *other*? - - Returns a ``bool``. - """ - - if other.flags & dns.flags.QR == 0 or \ - self.id != other.id or \ - dns.opcode.from_flags(self.flags) != \ - dns.opcode.from_flags(other.flags): - return False - if dns.rcode.from_flags(other.flags, other.ednsflags) != \ - dns.rcode.NOERROR: - return True - if dns.opcode.is_update(self.flags): - return True - for n in self.question: - if n not in other.question: - return False - for n in other.question: - if n not in self.question: - return False - return True - - def section_number(self, section): - """Return the "section number" of the specified section for use - in indexing. The question section is 0, the answer section is 1, - the authority section is 2, and the additional section is 3. - - *section* is one of the section attributes of this message. - - Raises ``ValueError`` if the section isn't known. - - Returns an ``int``. - """ - - if section is self.question: - return QUESTION - elif section is self.answer: - return ANSWER - elif section is self.authority: - return AUTHORITY - elif section is self.additional: - return ADDITIONAL - else: - raise ValueError('unknown section') - - def section_from_number(self, number): - """Return the "section number" of the specified section for use - in indexing. The question section is 0, the answer section is 1, - the authority section is 2, and the additional section is 3. - - *section* is one of the section attributes of this message. - - Raises ``ValueError`` if the section isn't known. - - Returns an ``int``. - """ - - if number == QUESTION: - return self.question - elif number == ANSWER: - return self.answer - elif number == AUTHORITY: - return self.authority - elif number == ADDITIONAL: - return self.additional - else: - raise ValueError('unknown section') - - def find_rrset(self, section, name, rdclass, rdtype, - covers=dns.rdatatype.NONE, deleting=None, create=False, - force_unique=False): - """Find the RRset with the given attributes in the specified section. - - *section*, an ``int`` section number, or one of the section - attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.find_rrset(my_message.answer, name, rdclass, rdtype) - my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) - - *name*, a ``dns.name.Name``, the name of the RRset. - - *rdclass*, an ``int``, the class of the RRset. - - *rdtype*, an ``int``, the type of the RRset. - - *covers*, an ``int`` or ``None``, the covers value of the RRset. - The default is ``None``. - - *deleting*, an ``int`` or ``None``, the deleting value of the RRset. - The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - Raises ``KeyError`` if the RRset was not found and create was - ``False``. - - Returns a ``dns.rrset.RRset object``. - """ - - if isinstance(section, int): - section_number = section - section = self.section_from_number(section_number) - else: - section_number = self.section_number(section) - key = (section_number, name, rdclass, rdtype, covers, deleting) - if not force_unique: - if self.index is not None: - rrset = self.index.get(key) - if rrset is not None: - return rrset - else: - for rrset in section: - if rrset.match(name, rdclass, rdtype, covers, deleting): - return rrset - if not create: - raise KeyError - rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) - section.append(rrset) - if self.index is not None: - self.index[key] = rrset - return rrset - - def get_rrset(self, section, name, rdclass, rdtype, - covers=dns.rdatatype.NONE, deleting=None, create=False, - force_unique=False): - """Get the RRset with the given attributes in the specified section. - - If the RRset is not found, None is returned. - - *section*, an ``int`` section number, or one of the section - attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.get_rrset(my_message.answer, name, rdclass, rdtype) - my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) - - *name*, a ``dns.name.Name``, the name of the RRset. - - *rdclass*, an ``int``, the class of the RRset. - - *rdtype*, an ``int``, the type of the RRset. - - *covers*, an ``int`` or ``None``, the covers value of the RRset. - The default is ``None``. - - *deleting*, an ``int`` or ``None``, the deleting value of the RRset. - The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - Returns a ``dns.rrset.RRset object`` or ``None``. - """ - - try: - rrset = self.find_rrset(section, name, rdclass, rdtype, covers, - deleting, create, force_unique) - except KeyError: - rrset = None - return rrset - - def to_wire(self, origin=None, max_size=0, **kw): - """Return a string containing the message in DNS compressed wire - format. - - Additional keyword arguments are passed to the RRset ``to_wire()`` - method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended - to any relative names. - - *max_size*, an ``int``, the maximum size of the wire format - output; default is 0, which means "the message's request - payload, if nonzero, or 65535". - - Raises ``dns.exception.TooBig`` if *max_size* was exceeded. - - Returns a ``binary``. - """ - - if max_size == 0: - if self.request_payload != 0: - max_size = self.request_payload - else: - max_size = 65535 - if max_size < 512: - max_size = 512 - elif max_size > 65535: - max_size = 65535 - r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) - for rrset in self.question: - r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) - for rrset in self.answer: - r.add_rrset(dns.renderer.ANSWER, rrset, **kw) - for rrset in self.authority: - r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) - if self.edns >= 0: - r.add_edns(self.edns, self.ednsflags, self.payload, self.options) - for rrset in self.additional: - r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) - r.write_header() - if self.keyname is not None: - r.add_tsig(self.keyname, self.keyring[self.keyname], - self.fudge, self.original_id, self.tsig_error, - self.other_data, self.request_mac, - self.keyalgorithm) - self.mac = r.mac - return r.get_wire() - - def use_tsig(self, keyring, keyname=None, fudge=300, - original_id=None, tsig_error=0, other_data=b'', - algorithm=dns.tsig.default_algorithm): - """When sending, a TSIG signature using the specified keyring - and keyname should be added. - - See the documentation of the Message class for a complete - description of the keyring dictionary. - - *keyring*, a ``dict``, the TSIG keyring to use. If a - *keyring* is specified but a *keyname* is not, then the key - used will be the first key in the *keyring*. Note that the - order of keys in a dictionary is not defined, so applications - should supply a keyname when a keyring is used, unless they - know the keyring contains only one key. - - *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key - to use; defaults to ``None``. The key must be defined in the keyring. - - *fudge*, an ``int``, the TSIG time fudge. - - *original_id*, an ``int``, the TSIG original id. If ``None``, - the message's id is used. - - *tsig_error*, an ``int``, the TSIG error code. - - *other_data*, a ``binary``, the TSIG other data. - - *algorithm*, a ``dns.name.Name``, the TSIG algorithm to use. - """ - - self.keyring = keyring - if keyname is None: - self.keyname = list(self.keyring.keys())[0] - else: - if isinstance(keyname, string_types): - keyname = dns.name.from_text(keyname) - self.keyname = keyname - self.keyalgorithm = algorithm - self.fudge = fudge - if original_id is None: - self.original_id = self.id - else: - self.original_id = original_id - self.tsig_error = tsig_error - self.other_data = other_data - - def use_edns(self, edns=0, ednsflags=0, payload=1280, request_payload=None, - options=None): - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying - ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case - the other parameters are ignored. Specifying ``True`` is - equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when - sending this message. If not specified, defaults to the value of - *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - """ - - if edns is None or edns is False: - edns = -1 - if edns is True: - edns = 0 - if request_payload is None: - request_payload = payload - if edns < 0: - ednsflags = 0 - payload = 0 - request_payload = 0 - options = [] - else: - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= long(0xFF00FFFF) - ednsflags |= (edns << 16) - if options is None: - options = [] - self.edns = edns - self.ednsflags = ednsflags - self.payload = payload - self.options = options - self.request_payload = request_payload - - def want_dnssec(self, wanted=True): - """Enable or disable 'DNSSEC desired' flag in requests. - - *wanted*, a ``bool``. If ``True``, then DNSSEC data is - desired in the response, EDNS is enabled if required, and then - the DO bit is set. If ``False``, the DO bit is cleared if - EDNS is enabled. - """ - - if wanted: - if self.edns < 0: - self.use_edns() - self.ednsflags |= dns.flags.DO - elif self.edns >= 0: - self.ednsflags &= ~dns.flags.DO - - def rcode(self): - """Return the rcode. - - Returns an ``int``. - """ - return dns.rcode.from_flags(self.flags, self.ednsflags) - - def set_rcode(self, rcode): - """Set the rcode. - - *rcode*, an ``int``, is the rcode to set. - """ - (value, evalue) = dns.rcode.to_flags(rcode) - self.flags &= 0xFFF0 - self.flags |= value - self.ednsflags &= long(0x00FFFFFF) - self.ednsflags |= evalue - if self.ednsflags != 0 and self.edns < 0: - self.edns = 0 - - def opcode(self): - """Return the opcode. - - Returns an ``int``. - """ - return dns.opcode.from_flags(self.flags) - - def set_opcode(self, opcode): - """Set the opcode. - - *opcode*, an ``int``, is the opcode to set. - """ - self.flags &= 0x87FF - self.flags |= dns.opcode.to_flags(opcode) - - -class _WireReader(object): - - """Wire format reader. - - wire: a binary, is the wire-format message. - message: The message object being built - current: When building a message object from wire format, this - variable contains the offset from the beginning of wire of the next octet - to be read. - updating: Is the message a dynamic update? - one_rr_per_rrset: Put each RR into its own RRset? - ignore_trailing: Ignore trailing junk at end of request? - zone_rdclass: The class of the zone in messages which are - DNS dynamic updates. - """ - - def __init__(self, wire, message, question_only=False, - one_rr_per_rrset=False, ignore_trailing=False): - self.wire = dns.wiredata.maybe_wrap(wire) - self.message = message - self.current = 0 - self.updating = False - self.zone_rdclass = dns.rdataclass.IN - self.question_only = question_only - self.one_rr_per_rrset = one_rr_per_rrset - self.ignore_trailing = ignore_trailing - - def _get_question(self, qcount): - """Read the next *qcount* records from the wire data and add them to - the question section. - """ - - if self.updating and qcount > 1: - raise dns.exception.FormError - - for i in xrange(0, qcount): - (qname, used) = dns.name.from_wire(self.wire, self.current) - if self.message.origin is not None: - qname = qname.relativize(self.message.origin) - self.current = self.current + used - (rdtype, rdclass) = \ - struct.unpack('!HH', - self.wire[self.current:self.current + 4]) - self.current = self.current + 4 - self.message.find_rrset(self.message.question, qname, - rdclass, rdtype, create=True, - force_unique=True) - if self.updating: - self.zone_rdclass = rdclass - - def _get_section(self, section, count): - """Read the next I{count} records from the wire data and add them to - the specified section. - - section: the section of the message to which to add records - count: the number of records to read - """ - - if self.updating or self.one_rr_per_rrset: - force_unique = True - else: - force_unique = False - seen_opt = False - for i in xrange(0, count): - rr_start = self.current - (name, used) = dns.name.from_wire(self.wire, self.current) - absolute_name = name - if self.message.origin is not None: - name = name.relativize(self.message.origin) - self.current = self.current + used - (rdtype, rdclass, ttl, rdlen) = \ - struct.unpack('!HHIH', - self.wire[self.current:self.current + 10]) - self.current = self.current + 10 - if rdtype == dns.rdatatype.OPT: - if section is not self.message.additional or seen_opt: - raise BadEDNS - self.message.payload = rdclass - self.message.ednsflags = ttl - self.message.edns = (ttl & 0xff0000) >> 16 - self.message.options = [] - current = self.current - optslen = rdlen - while optslen > 0: - (otype, olen) = \ - struct.unpack('!HH', - self.wire[current:current + 4]) - current = current + 4 - opt = dns.edns.option_from_wire( - otype, self.wire, current, olen) - self.message.options.append(opt) - current = current + olen - optslen = optslen - 4 - olen - seen_opt = True - elif rdtype == dns.rdatatype.TSIG: - if not (section is self.message.additional and - i == (count - 1)): - raise BadTSIG - if self.message.keyring is None: - raise UnknownTSIGKey('got signed message without keyring') - secret = self.message.keyring.get(absolute_name) - if secret is None: - raise UnknownTSIGKey("key '%s' unknown" % name) - self.message.keyname = absolute_name - (self.message.keyalgorithm, self.message.mac) = \ - dns.tsig.get_algorithm_and_mac(self.wire, self.current, - rdlen) - self.message.tsig_ctx = \ - dns.tsig.validate(self.wire, - absolute_name, - secret, - int(time.time()), - self.message.request_mac, - rr_start, - self.current, - rdlen, - self.message.tsig_ctx, - self.message.multi, - self.message.first) - self.message.had_tsig = True - else: - if ttl < 0: - ttl = 0 - if self.updating and \ - (rdclass == dns.rdataclass.ANY or - rdclass == dns.rdataclass.NONE): - deleting = rdclass - rdclass = self.zone_rdclass - else: - deleting = None - if deleting == dns.rdataclass.ANY or \ - (deleting == dns.rdataclass.NONE and - section is self.message.answer): - covers = dns.rdatatype.NONE - rd = None - else: - rd = dns.rdata.from_wire(rdclass, rdtype, self.wire, - self.current, rdlen, - self.message.origin) - covers = rd.covers() - if self.message.xfr and rdtype == dns.rdatatype.SOA: - force_unique = True - rrset = self.message.find_rrset(section, name, - rdclass, rdtype, covers, - deleting, True, force_unique) - if rd is not None: - rrset.add(rd, ttl) - self.current = self.current + rdlen - - def read(self): - """Read a wire format DNS message and build a dns.message.Message - object.""" - - l = len(self.wire) - if l < 12: - raise ShortHeader - (self.message.id, self.message.flags, qcount, ancount, - aucount, adcount) = struct.unpack('!HHHHHH', self.wire[:12]) - self.current = 12 - if dns.opcode.is_update(self.message.flags): - self.updating = True - self._get_question(qcount) - if self.question_only: - return - self._get_section(self.message.answer, ancount) - self._get_section(self.message.authority, aucount) - self._get_section(self.message.additional, adcount) - if not self.ignore_trailing and self.current != l: - raise TrailingJunk - if self.message.multi and self.message.tsig_ctx and \ - not self.message.had_tsig: - self.message.tsig_ctx.update(self.wire) - - -def from_wire(wire, keyring=None, request_mac=b'', xfr=False, origin=None, - tsig_ctx=None, multi=False, first=True, - question_only=False, one_rr_per_rrset=False, - ignore_trailing=False): - """Convert a DNS wire format message into a message - object. - - *keyring*, a ``dict``, the keyring to use if the message is signed. - - *request_mac*, a ``binary``. If the message is a response to a - TSIG-signed request, *request_mac* should be set to the MAC of - that request. - - *xfr*, a ``bool``, should be set to ``True`` if this message is part of - a zone transfer. - - *origin*, a ``dns.name.Name`` or ``None``. If the message is part - of a zone transfer, *origin* should be the origin name of the - zone. - - *tsig_ctx*, a ``hmac.HMAC`` objext, the ongoing TSIG context, used - when validating zone transfers. - - *multi*, a ``bool``, should be set to ``True`` if this message - part of a multiple message sequence. - - *first*, a ``bool``, should be set to ``True`` if this message is - stand-alone, or the first message in a multi-message sequence. - - *question_only*, a ``bool``. If ``True``, read only up to - the end of the question section. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its - own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the message. - - Raises ``dns.message.ShortHeader`` if the message is less than 12 octets - long. - - Raises ``dns.messaage.TrailingJunk`` if there were octets in the message - past the end of the proper DNS message, and *ignore_trailing* is ``False``. - - Raises ``dns.message.BadEDNS`` if an OPT record was in the - wrong section, or occurred more than once. - - Raises ``dns.message.BadTSIG`` if a TSIG record was not the last - record of the additional data section. - - Returns a ``dns.message.Message``. - """ - - m = Message(id=0) - m.keyring = keyring - m.request_mac = request_mac - m.xfr = xfr - m.origin = origin - m.tsig_ctx = tsig_ctx - m.multi = multi - m.first = first - - reader = _WireReader(wire, m, question_only, one_rr_per_rrset, - ignore_trailing) - reader.read() - - return m - - -class _TextReader(object): - - """Text format reader. - - tok: the tokenizer. - message: The message object being built. - updating: Is the message a dynamic update? - zone_rdclass: The class of the zone in messages which are - DNS dynamic updates. - last_name: The most recently read name when building a message object. - """ - - def __init__(self, text, message): - self.message = message - self.tok = dns.tokenizer.Tokenizer(text) - self.last_name = None - self.zone_rdclass = dns.rdataclass.IN - self.updating = False - - def _header_line(self, section): - """Process one line from the text format header section.""" - - token = self.tok.get() - what = token.value - if what == 'id': - self.message.id = self.tok.get_int() - elif what == 'flags': - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.message.flags = self.message.flags | \ - dns.flags.from_text(token.value) - if dns.opcode.is_update(self.message.flags): - self.updating = True - elif what == 'edns': - self.message.edns = self.tok.get_int() - self.message.ednsflags = self.message.ednsflags | \ - (self.message.edns << 16) - elif what == 'eflags': - if self.message.edns < 0: - self.message.edns = 0 - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.message.ednsflags = self.message.ednsflags | \ - dns.flags.edns_from_text(token.value) - elif what == 'payload': - self.message.payload = self.tok.get_int() - if self.message.edns < 0: - self.message.edns = 0 - elif what == 'opcode': - text = self.tok.get_string() - self.message.flags = self.message.flags | \ - dns.opcode.to_flags(dns.opcode.from_text(text)) - elif what == 'rcode': - text = self.tok.get_string() - self.message.set_rcode(dns.rcode.from_text(text)) - else: - raise UnknownHeaderField - self.tok.get_eol() - - def _question_line(self, section): - """Process one line from the text format question section.""" - - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = dns.name.from_text(token.value, None) - name = self.last_name - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - self.message.find_rrset(self.message.question, name, - rdclass, rdtype, create=True, - force_unique=True) - if self.updating: - self.zone_rdclass = rdclass - self.tok.get_eol() - - def _rr_line(self, section): - """Process one line from the text format answer, authority, or - additional data sections. - """ - - deleting = None - # Name - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = dns.name.from_text(token.value, None) - name = self.last_name - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # TTL - try: - ttl = int(token.value, 0) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - ttl = 0 - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - if rdclass == dns.rdataclass.ANY or rdclass == dns.rdataclass.NONE: - deleting = rdclass - rdclass = self.zone_rdclass - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - token = self.tok.get() - if not token.is_eol_or_eof(): - self.tok.unget(token) - rd = dns.rdata.from_text(rdclass, rdtype, self.tok, None) - covers = rd.covers() - else: - rd = None - covers = dns.rdatatype.NONE - rrset = self.message.find_rrset(section, name, - rdclass, rdtype, covers, - deleting, True, self.updating) - if rd is not None: - rrset.add(rd, ttl) - - def read(self): - """Read a text format DNS message and build a dns.message.Message - object.""" - - line_method = self._header_line - section = None - while 1: - token = self.tok.get(True, True) - if token.is_eol_or_eof(): - break - if token.is_comment(): - u = token.value.upper() - if u == 'HEADER': - line_method = self._header_line - elif u == 'QUESTION' or u == 'ZONE': - line_method = self._question_line - section = self.message.question - elif u == 'ANSWER' or u == 'PREREQ': - line_method = self._rr_line - section = self.message.answer - elif u == 'AUTHORITY' or u == 'UPDATE': - line_method = self._rr_line - section = self.message.authority - elif u == 'ADDITIONAL': - line_method = self._rr_line - section = self.message.additional - self.tok.get_eol() - continue - self.tok.unget(token) - line_method(section) - - -def from_text(text): - """Convert the text format message into a message object. - - *text*, a ``text``, the text format message. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - m = Message() - - reader = _TextReader(text, m) - reader.read() - - return m - - -def from_file(f): - """Read the next text format message from the specified file. - - *f*, a ``file`` or ``text``. If *f* is text, it is treated as the - pathname of a file to open. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - str_type = string_types - opts = 'rU' - - if isinstance(f, str_type): - f = open(f, opts) - want_close = True - else: - want_close = False - - try: - m = from_text(f) - finally: - if want_close: - f.close() - return m - - -def make_query(qname, rdtype, rdclass=dns.rdataclass.IN, use_edns=None, - want_dnssec=False, ednsflags=None, payload=None, - request_payload=None, options=None): - """Make a query message. - - The query name, type, and class may all be specified either - as objects of the appropriate type, or as strings. - - The query will have a randomly chosen query id, and its DNS flags - will be set to dns.flags.RD. - - qname, a ``dns.name.Name`` or ``text``, the query name. - - *rdtype*, an ``int`` or ``text``, the desired rdata type. - - *rdclass*, an ``int`` or ``text``, the desired rdata class; the default - is class IN. - - *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the - default is None (no EDNS). - See the description of dns.message.Message.use_edns() for the possible - values for use_edns and their meanings. - - *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when - sending this message. If not specified, defaults to the value of - *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - - Returns a ``dns.message.Message`` - """ - - if isinstance(qname, string_types): - qname = dns.name.from_text(qname) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(rdclass, string_types): - rdclass = dns.rdataclass.from_text(rdclass) - m = Message() - m.flags |= dns.flags.RD - m.find_rrset(m.question, qname, rdclass, rdtype, create=True, - force_unique=True) - # only pass keywords on to use_edns if they have been set to a - # non-None value. Setting a field will turn EDNS on if it hasn't - # been configured. - kwargs = {} - if ednsflags is not None: - kwargs['ednsflags'] = ednsflags - if use_edns is None: - use_edns = 0 - if payload is not None: - kwargs['payload'] = payload - if use_edns is None: - use_edns = 0 - if request_payload is not None: - kwargs['request_payload'] = request_payload - if use_edns is None: - use_edns = 0 - if options is not None: - kwargs['options'] = options - if use_edns is None: - use_edns = 0 - kwargs['edns'] = use_edns - m.use_edns(**kwargs) - m.want_dnssec(want_dnssec) - return m - - -def make_response(query, recursion_available=False, our_payload=8192, - fudge=300): - """Make a message which is a response for the specified query. - The message returned is really a response skeleton; it has all - of the infrastructure required of a response, but none of the - content. - - The response's question section is a shallow copy of the query's - question section, so the query's question RRsets should not be - changed. - - *query*, a ``dns.message.Message``, the query to respond to. - - *recursion_available*, a ``bool``, should RA be set in the response? - - *our_payload*, an ``int``, the payload size to advertise in EDNS - responses. - - *fudge*, an ``int``, the TSIG time fudge. - - Returns a ``dns.message.Message`` object. - """ - - if query.flags & dns.flags.QR: - raise dns.exception.FormError('specified query message is not a query') - response = dns.message.Message(query.id) - response.flags = dns.flags.QR | (query.flags & dns.flags.RD) - if recursion_available: - response.flags |= dns.flags.RA - response.set_opcode(query.opcode()) - response.question = list(query.question) - if query.edns >= 0: - response.use_edns(0, 0, our_payload, query.payload) - if query.had_tsig: - response.use_tsig(query.keyring, query.keyname, fudge, None, 0, b'', - query.keyalgorithm) - response.request_mac = query.mac - return response diff --git a/venv/lib/python3.6/site-packages/dns/name.py b/venv/lib/python3.6/site-packages/dns/name.py deleted file mode 100644 index 0bcfd83..0000000 --- a/venv/lib/python3.6/site-packages/dns/name.py +++ /dev/null @@ -1,994 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Names. -""" - -from io import BytesIO -import struct -import sys -import copy -import encodings.idna -try: - import idna - have_idna_2008 = True -except ImportError: - have_idna_2008 = False - -import dns.exception -import dns.wiredata - -from ._compat import long, binary_type, text_type, unichr, maybe_decode - -try: - maxint = sys.maxint # pylint: disable=sys-max-int -except AttributeError: - maxint = (1 << (8 * struct.calcsize("P"))) // 2 - 1 - - -# fullcompare() result values - -#: The compared names have no relationship to each other. -NAMERELN_NONE = 0 -#: the first name is a superdomain of the second. -NAMERELN_SUPERDOMAIN = 1 -#: The first name is a subdomain of the second. -NAMERELN_SUBDOMAIN = 2 -#: The compared names are equal. -NAMERELN_EQUAL = 3 -#: The compared names have a common ancestor. -NAMERELN_COMMONANCESTOR = 4 - - -class EmptyLabel(dns.exception.SyntaxError): - """A DNS label is empty.""" - - -class BadEscape(dns.exception.SyntaxError): - """An escaped code in a text format of DNS name is invalid.""" - - -class BadPointer(dns.exception.FormError): - """A DNS compression pointer points forward instead of backward.""" - - -class BadLabelType(dns.exception.FormError): - """The label type in DNS name wire format is unknown.""" - - -class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): - """An attempt was made to convert a non-absolute name to - wire when there was also a non-absolute (or missing) origin.""" - - -class NameTooLong(dns.exception.FormError): - """A DNS name is > 255 octets long.""" - - -class LabelTooLong(dns.exception.SyntaxError): - """A DNS label is > 63 octets long.""" - - -class AbsoluteConcatenation(dns.exception.DNSException): - """An attempt was made to append anything other than the - empty name to an absolute DNS name.""" - - -class NoParent(dns.exception.DNSException): - """An attempt was made to get the parent of the root name - or the empty name.""" - -class NoIDNA2008(dns.exception.DNSException): - """IDNA 2008 processing was requested but the idna module is not - available.""" - - -class IDNAException(dns.exception.DNSException): - """IDNA processing raised an exception.""" - - supp_kwargs = {'idna_exception'} - fmt = "IDNA processing exception: {idna_exception}" - - -class IDNACodec(object): - """Abstract base class for IDNA encoder/decoders.""" - - def __init__(self): - pass - - def encode(self, label): - raise NotImplementedError - - def decode(self, label): - # We do not apply any IDNA policy on decode; we just - downcased = label.lower() - if downcased.startswith(b'xn--'): - try: - label = downcased[4:].decode('punycode') - except Exception as e: - raise IDNAException(idna_exception=e) - else: - label = maybe_decode(label) - return _escapify(label, True) - - -class IDNA2003Codec(IDNACodec): - """IDNA 2003 encoder/decoder.""" - - def __init__(self, strict_decode=False): - """Initialize the IDNA 2003 encoder/decoder. - - *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2008. The default is `False`. - """ - - super(IDNA2003Codec, self).__init__() - self.strict_decode = strict_decode - - def encode(self, label): - """Encode *label*.""" - - if label == '': - return b'' - try: - return encodings.idna.ToASCII(label) - except UnicodeError: - raise LabelTooLong - - def decode(self, label): - """Decode *label*.""" - if not self.strict_decode: - return super(IDNA2003Codec, self).decode(label) - if label == b'': - return u'' - try: - return _escapify(encodings.idna.ToUnicode(label), True) - except Exception as e: - raise IDNAException(idna_exception=e) - - -class IDNA2008Codec(IDNACodec): - """IDNA 2008 encoder/decoder. - - *uts_46* is a ``bool``. If True, apply Unicode IDNA - compatibility processing as described in Unicode Technical - Standard #46 (http://unicode.org/reports/tr46/). - If False, do not apply the mapping. The default is False. - - *transitional* is a ``bool``: If True, use the - "transitional" mode described in Unicode Technical Standard - #46. The default is False. - - *allow_pure_ascii* is a ``bool``. If True, then a label which - consists of only ASCII characters is allowed. This is less - strict than regular IDNA 2008, but is also necessary for mixed - names, e.g. a name with starting with "_sip._tcp." and ending - in an IDN suffix which would otherwise be disallowed. The - default is False. - - *strict_decode* is a ``bool``: If True, then IDNA2008 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2003. The default is False. - """ - - def __init__(self, uts_46=False, transitional=False, - allow_pure_ascii=False, strict_decode=False): - """Initialize the IDNA 2008 encoder/decoder.""" - super(IDNA2008Codec, self).__init__() - self.uts_46 = uts_46 - self.transitional = transitional - self.allow_pure_ascii = allow_pure_ascii - self.strict_decode = strict_decode - - def is_all_ascii(self, label): - for c in label: - if ord(c) > 0x7f: - return False - return True - - def encode(self, label): - if label == '': - return b'' - if self.allow_pure_ascii and self.is_all_ascii(label): - return label.encode('ascii') - if not have_idna_2008: - raise NoIDNA2008 - try: - if self.uts_46: - label = idna.uts46_remap(label, False, self.transitional) - return idna.alabel(label) - except idna.IDNAError as e: - raise IDNAException(idna_exception=e) - - def decode(self, label): - if not self.strict_decode: - return super(IDNA2008Codec, self).decode(label) - if label == b'': - return u'' - if not have_idna_2008: - raise NoIDNA2008 - try: - if self.uts_46: - label = idna.uts46_remap(label, False, False) - return _escapify(idna.ulabel(label), True) - except idna.IDNAError as e: - raise IDNAException(idna_exception=e) - -_escaped = bytearray(b'"().;\\@$') - -IDNA_2003_Practical = IDNA2003Codec(False) -IDNA_2003_Strict = IDNA2003Codec(True) -IDNA_2003 = IDNA_2003_Practical -IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) -IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) -IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) -IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) -IDNA_2008 = IDNA_2008_Practical - -def _escapify(label, unicode_mode=False): - """Escape the characters in label which need it. - @param unicode_mode: escapify only special and whitespace (<= 0x20) - characters - @returns: the escaped string - @rtype: string""" - if not unicode_mode: - text = '' - if isinstance(label, text_type): - label = label.encode() - for c in bytearray(label): - if c in _escaped: - text += '\\' + chr(c) - elif c > 0x20 and c < 0x7F: - text += chr(c) - else: - text += '\\%03d' % c - return text.encode() - - text = u'' - if isinstance(label, binary_type): - label = label.decode() - for c in label: - if c > u'\x20' and c < u'\x7f': - text += c - else: - if c >= u'\x7f': - text += c - else: - text += u'\\%03d' % ord(c) - return text - -def _validate_labels(labels): - """Check for empty labels in the middle of a label sequence, - labels that are too long, and for too many labels. - - Raises ``dns.name.NameTooLong`` if the name as a whole is too long. - - Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root - label) and appears in a position other than the end of the label - sequence - - """ - - l = len(labels) - total = 0 - i = -1 - j = 0 - for label in labels: - ll = len(label) - total += ll + 1 - if ll > 63: - raise LabelTooLong - if i < 0 and label == b'': - i = j - j += 1 - if total > 255: - raise NameTooLong - if i >= 0 and i != l - 1: - raise EmptyLabel - - -def _maybe_convert_to_binary(label): - """If label is ``text``, convert it to ``binary``. If it is already - ``binary`` just return it. - - """ - - if isinstance(label, binary_type): - return label - if isinstance(label, text_type): - return label.encode() - raise ValueError - - -class Name(object): - - """A DNS name. - - The dns.name.Name class represents a DNS name as a tuple of - labels. Each label is a `binary` in DNS wire format. Instances - of the class are immutable. - """ - - __slots__ = ['labels'] - - def __init__(self, labels): - """*labels* is any iterable whose values are ``text`` or ``binary``. - """ - - labels = [_maybe_convert_to_binary(x) for x in labels] - super(Name, self).__setattr__('labels', tuple(labels)) - _validate_labels(self.labels) - - def __setattr__(self, name, value): - # Names are immutable - raise TypeError("object doesn't support attribute assignment") - - def __copy__(self): - return Name(self.labels) - - def __deepcopy__(self, memo): - return Name(copy.deepcopy(self.labels, memo)) - - def __getstate__(self): - # Names can be pickled - return {'labels': self.labels} - - def __setstate__(self, state): - super(Name, self).__setattr__('labels', state['labels']) - _validate_labels(self.labels) - - def is_absolute(self): - """Is the most significant label of this name the root label? - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[-1] == b'' - - def is_wild(self): - """Is this name wild? (I.e. Is the least significant label '*'?) - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[0] == b'*' - - def __hash__(self): - """Return a case-insensitive hash of the name. - - Returns an ``int``. - """ - - h = long(0) - for label in self.labels: - for c in bytearray(label.lower()): - h += (h << 3) + c - return int(h % maxint) - - def fullcompare(self, other): - """Compare two names, returning a 3-tuple - ``(relation, order, nlabels)``. - - *relation* describes the relation ship between the names, - and is one of: ``dns.name.NAMERELN_NONE``, - ``dns.name.NAMERELN_SUPERDOMAIN``, ``dns.name.NAMERELN_SUBDOMAIN``, - ``dns.name.NAMERELN_EQUAL``, or ``dns.name.NAMERELN_COMMONANCESTOR``. - - *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == - 0 if *self* == *other*. A relative name is always less than an - absolute name. If both names have the same relativity, then - the DNSSEC order relation is used to order them. - - *nlabels* is the number of significant labels that the two names - have in common. - - Here are some examples. Names ending in "." are absolute names, - those not ending in "." are relative names. - - ============= ============= =========== ===== ======= - self other relation order nlabels - ============= ============= =========== ===== ======= - www.example. www.example. equal 0 3 - www.example. example. subdomain > 0 2 - example. www.example. superdomain < 0 2 - example1.com. example2.com. common anc. < 0 2 - example1 example2. none < 0 0 - example1. example2 none > 0 0 - ============= ============= =========== ===== ======= - """ - - sabs = self.is_absolute() - oabs = other.is_absolute() - if sabs != oabs: - if sabs: - return (NAMERELN_NONE, 1, 0) - else: - return (NAMERELN_NONE, -1, 0) - l1 = len(self.labels) - l2 = len(other.labels) - ldiff = l1 - l2 - if ldiff < 0: - l = l1 - else: - l = l2 - - order = 0 - nlabels = 0 - namereln = NAMERELN_NONE - while l > 0: - l -= 1 - l1 -= 1 - l2 -= 1 - label1 = self.labels[l1].lower() - label2 = other.labels[l2].lower() - if label1 < label2: - order = -1 - if nlabels > 0: - namereln = NAMERELN_COMMONANCESTOR - return (namereln, order, nlabels) - elif label1 > label2: - order = 1 - if nlabels > 0: - namereln = NAMERELN_COMMONANCESTOR - return (namereln, order, nlabels) - nlabels += 1 - order = ldiff - if ldiff < 0: - namereln = NAMERELN_SUPERDOMAIN - elif ldiff > 0: - namereln = NAMERELN_SUBDOMAIN - else: - namereln = NAMERELN_EQUAL - return (namereln, order, nlabels) - - def is_subdomain(self, other): - """Is self a subdomain of other? - - Note that the notion of subdomain includes equality, e.g. - "dnpython.org" is a subdomain of itself. - - Returns a ``bool``. - """ - - (nr, o, nl) = self.fullcompare(other) - if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL: - return True - return False - - def is_superdomain(self, other): - """Is self a superdomain of other? - - Note that the notion of superdomain includes equality, e.g. - "dnpython.org" is a superdomain of itself. - - Returns a ``bool``. - """ - - (nr, o, nl) = self.fullcompare(other) - if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL: - return True - return False - - def canonicalize(self): - """Return a name which is equal to the current name, but is in - DNSSEC canonical form. - """ - - return Name([x.lower() for x in self.labels]) - - def __eq__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] == 0 - else: - return False - - def __ne__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] != 0 - else: - return True - - def __lt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] < 0 - else: - return NotImplemented - - def __le__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] <= 0 - else: - return NotImplemented - - def __ge__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] >= 0 - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] > 0 - else: - return NotImplemented - - def __repr__(self): - return '' - - def __str__(self): - return self.to_text(False) - - def to_text(self, omit_final_dot=False): - """Convert name to DNS text format. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - - Returns a ``text``. - """ - - if len(self.labels) == 0: - return maybe_decode(b'@') - if len(self.labels) == 1 and self.labels[0] == b'': - return maybe_decode(b'.') - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - s = b'.'.join(map(_escapify, l)) - return maybe_decode(s) - - def to_unicode(self, omit_final_dot=False, idna_codec=None): - """Convert name to Unicode text format. - - IDN ACE labels are converted to Unicode. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - *idna_codec* specifies the IDNA encoder/decoder. If None, the - dns.name.IDNA_2003_Practical encoder/decoder is used. - The IDNA_2003_Practical decoder does - not impose any policy, it just decodes punycode, so if you - don't want checking for compliance, you can use this decoder - for IDNA2008 as well. - - Returns a ``text``. - """ - - if len(self.labels) == 0: - return u'@' - if len(self.labels) == 1 and self.labels[0] == b'': - return u'.' - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - if idna_codec is None: - idna_codec = IDNA_2003_Practical - return u'.'.join([idna_codec.decode(x) for x in l]) - - def to_digestable(self, origin=None): - """Convert name to a format suitable for digesting in hashes. - - The name is canonicalized and converted to uncompressed wire - format. All names in wire format are absolute. If the name - is a relative name, then an origin must be supplied. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then origin will be appended - to the name. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``binary``. - """ - - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - labels = list(self.labels) - labels.extend(list(origin.labels)) - else: - labels = self.labels - dlabels = [struct.pack('!B%ds' % len(x), len(x), x.lower()) - for x in labels] - return b''.join(dlabels) - - def to_wire(self, file=None, compress=None, origin=None): - """Convert name to wire format, possibly compressing it. - - *file* is the file where the name is emitted (typically a - BytesIO file). If ``None`` (the default), a ``binary`` - containing the wire name will be returned. - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``binary`` or ``None``. - """ - - if file is None: - file = BytesIO() - want_return = True - else: - want_return = False - - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - labels = list(self.labels) - labels.extend(list(origin.labels)) - else: - labels = self.labels - i = 0 - for label in labels: - n = Name(labels[i:]) - i += 1 - if compress is not None: - pos = compress.get(n) - else: - pos = None - if pos is not None: - value = 0xc000 + pos - s = struct.pack('!H', value) - file.write(s) - break - else: - if compress is not None and len(n) > 1: - pos = file.tell() - if pos <= 0x3fff: - compress[n] = pos - l = len(label) - file.write(struct.pack('!B', l)) - if l > 0: - file.write(label) - if want_return: - return file.getvalue() - - def __len__(self): - """The length of the name (in labels). - - Returns an ``int``. - """ - - return len(self.labels) - - def __getitem__(self, index): - return self.labels[index] - - def __add__(self, other): - return self.concatenate(other) - - def __sub__(self, other): - return self.relativize(other) - - def split(self, depth): - """Split a name into a prefix and suffix names at the specified depth. - - *depth* is an ``int`` specifying the number of labels in the suffix - - Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the - name. - - Returns the tuple ``(prefix, suffix)``. - """ - - l = len(self.labels) - if depth == 0: - return (self, dns.name.empty) - elif depth == l: - return (dns.name.empty, self) - elif depth < 0 or depth > l: - raise ValueError( - 'depth must be >= 0 and <= the length of the name') - return (Name(self[: -depth]), Name(self[-depth:])) - - def concatenate(self, other): - """Return a new name which is the concatenation of self and other. - - Raises ``dns.name.AbsoluteConcatenation`` if the name is - absolute and *other* is not the empty name. - - Returns a ``dns.name.Name``. - """ - - if self.is_absolute() and len(other) > 0: - raise AbsoluteConcatenation - labels = list(self.labels) - labels.extend(list(other.labels)) - return Name(labels) - - def relativize(self, origin): - """If the name is a subdomain of *origin*, return a new name which is - the name relative to origin. Otherwise return the name. - - For example, relativizing ``www.dnspython.org.`` to origin - ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if origin is not None and self.is_subdomain(origin): - return Name(self[: -len(origin)]) - else: - return self - - def derelativize(self, origin): - """If the name is a relative name, return a new name which is the - concatenation of the name and origin. Otherwise return the name. - - For example, derelativizing ``www`` to origin ``dnspython.org.`` - returns the name ``www.dnspython.org.``. Derelativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if not self.is_absolute(): - return self.concatenate(origin) - else: - return self - - def choose_relativity(self, origin=None, relativize=True): - """Return a name with the relativity desired by the caller. - - If *origin* is ``None``, then the name is returned. - Otherwise, if *relativize* is ``True`` the name is - relativized, and if *relativize* is ``False`` the name is - derelativized. - - Returns a ``dns.name.Name``. - """ - - if origin: - if relativize: - return self.relativize(origin) - else: - return self.derelativize(origin) - else: - return self - - def parent(self): - """Return the parent of the name. - - For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. - - Raises ``dns.name.NoParent`` if the name is either the root name or the - empty name, and thus has no parent. - - Returns a ``dns.name.Name``. - """ - - if self == root or self == empty: - raise NoParent - return Name(self.labels[1:]) - -#: The root name, '.' -root = Name([b'']) - -#: The empty name. -empty = Name([]) - -def from_unicode(text, origin=root, idna_codec=None): - """Convert unicode text into a Name object. - - Labels are encoded in IDN ACE form according to rules specified by - the IDNA codec. - - *text*, a ``text``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if not isinstance(text, text_type): - raise ValueError("input to from_unicode() must be a unicode string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = u'' - escaping = False - edigits = 0 - total = 0 - if idna_codec is None: - idna_codec = IDNA_2003 - if text == u'@': - text = u'' - if text: - if text == u'.': - return Name([b'']) # no Unicode "u" on this constant! - for c in text: - if escaping: - if edigits == 0: - if c.isdigit(): - total = int(c) - edigits += 1 - else: - label += c - escaping = False - else: - if not c.isdigit(): - raise BadEscape - total *= 10 - total += int(c) - edigits += 1 - if edigits == 3: - escaping = False - label += unichr(total) - elif c in [u'.', u'\u3002', u'\uff0e', u'\uff61']: - if len(label) == 0: - raise EmptyLabel - labels.append(idna_codec.encode(label)) - label = u'' - elif c == u'\\': - escaping = True - edigits = 0 - total = 0 - else: - label += c - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(idna_codec.encode(label)) - else: - labels.append(b'') - - if (len(labels) == 0 or labels[-1] != b'') and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -def from_text(text, origin=root, idna_codec=None): - """Convert text into a Name object. - - *text*, a ``text``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if isinstance(text, text_type): - return from_unicode(text, origin, idna_codec) - if not isinstance(text, binary_type): - raise ValueError("input to from_text() must be a string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = b'' - escaping = False - edigits = 0 - total = 0 - if text == b'@': - text = b'' - if text: - if text == b'.': - return Name([b'']) - for c in bytearray(text): - byte_ = struct.pack('!B', c) - if escaping: - if edigits == 0: - if byte_.isdigit(): - total = int(byte_) - edigits += 1 - else: - label += byte_ - escaping = False - else: - if not byte_.isdigit(): - raise BadEscape - total *= 10 - total += int(byte_) - edigits += 1 - if edigits == 3: - escaping = False - label += struct.pack('!B', total) - elif byte_ == b'.': - if len(label) == 0: - raise EmptyLabel - labels.append(label) - label = b'' - elif byte_ == b'\\': - escaping = True - edigits = 0 - total = 0 - else: - label += byte_ - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(label) - else: - labels.append(b'') - if (len(labels) == 0 or labels[-1] != b'') and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -def from_wire(message, current): - """Convert possibly compressed wire format into a Name. - - *message* is a ``binary`` containing an entire DNS message in DNS - wire form. - - *current*, an ``int``, is the offset of the beginning of the name - from the start of the message - - Raises ``dns.name.BadPointer`` if a compression pointer did not - point backwards in the message. - - Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. - - Returns a ``(dns.name.Name, int)`` tuple consisting of the name - that was read and the number of bytes of the wire format message - which were consumed reading it. - """ - - if not isinstance(message, binary_type): - raise ValueError("input to from_wire() must be a byte string") - message = dns.wiredata.maybe_wrap(message) - labels = [] - biggest_pointer = current - hops = 0 - count = message[current] - current += 1 - cused = 1 - while count != 0: - if count < 64: - labels.append(message[current: current + count].unwrap()) - current += count - if hops == 0: - cused += count - elif count >= 192: - current = (count & 0x3f) * 256 + message[current] - if hops == 0: - cused += 1 - if current >= biggest_pointer: - raise BadPointer - biggest_pointer = current - hops += 1 - else: - raise BadLabelType - count = message[current] - current += 1 - if hops == 0: - cused += 1 - labels.append('') - return (Name(labels), cused) diff --git a/venv/lib/python3.6/site-packages/dns/namedict.py b/venv/lib/python3.6/site-packages/dns/namedict.py deleted file mode 100644 index 37a1310..0000000 --- a/venv/lib/python3.6/site-packages/dns/namedict.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# Copyright (C) 2016 Coresec Systems AB -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC -# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS name dictionary""" - -import collections -import dns.name -from ._compat import xrange - - -class NameDict(collections.MutableMapping): - """A dictionary whose keys are dns.name.Name objects. - - In addition to being like a regular Python dictionary, this - dictionary can also get the deepest match for a given key. - """ - - __slots__ = ["max_depth", "max_depth_items", "__store"] - - def __init__(self, *args, **kwargs): - super(NameDict, self).__init__() - self.__store = dict() - #: the maximum depth of the keys that have ever been added - self.max_depth = 0 - #: the number of items of maximum depth - self.max_depth_items = 0 - self.update(dict(*args, **kwargs)) - - def __update_max_depth(self, key): - if len(key) == self.max_depth: - self.max_depth_items = self.max_depth_items + 1 - elif len(key) > self.max_depth: - self.max_depth = len(key) - self.max_depth_items = 1 - - def __getitem__(self, key): - return self.__store[key] - - def __setitem__(self, key, value): - if not isinstance(key, dns.name.Name): - raise ValueError('NameDict key must be a name') - self.__store[key] = value - self.__update_max_depth(key) - - def __delitem__(self, key): - value = self.__store.pop(key) - if len(value) == self.max_depth: - self.max_depth_items = self.max_depth_items - 1 - if self.max_depth_items == 0: - self.max_depth = 0 - for k in self.__store: - self.__update_max_depth(k) - - def __iter__(self): - return iter(self.__store) - - def __len__(self): - return len(self.__store) - - def has_key(self, key): - return key in self.__store - - def get_deepest_match(self, name): - """Find the deepest match to *fname* in the dictionary. - - The deepest match is the longest name in the dictionary which is - a superdomain of *name*. Note that *superdomain* includes matching - *name* itself. - - *name*, a ``dns.name.Name``, the name to find. - - Returns a ``(key, value)`` where *key* is the deepest - ``dns.name.Name``, and *value* is the value associated with *key*. - """ - - depth = len(name) - if depth > self.max_depth: - depth = self.max_depth - for i in xrange(-depth, 0): - n = dns.name.Name(name[i:]) - if n in self: - return (n, self[n]) - v = self[dns.name.empty] - return (dns.name.empty, v) diff --git a/venv/lib/python3.6/site-packages/dns/node.py b/venv/lib/python3.6/site-packages/dns/node.py deleted file mode 100644 index 8a7f19f..0000000 --- a/venv/lib/python3.6/site-packages/dns/node.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS nodes. A node is a set of rdatasets.""" - -from io import StringIO - -import dns.rdataset -import dns.rdatatype -import dns.renderer - - -class Node(object): - - """A Node is a set of rdatasets.""" - - __slots__ = ['rdatasets'] - - def __init__(self): - #: the set of rdatsets, represented as a list. - self.rdatasets = [] - - def to_text(self, name, **kw): - """Convert a node to text format. - - Each rdataset at the node is printed. Any keyword arguments - to this method are passed on to the rdataset's to_text() method. - - *name*, a ``dns.name.Name`` or ``text``, the owner name of the rdatasets. - - Returns a ``text``. - """ - - s = StringIO() - for rds in self.rdatasets: - if len(rds) > 0: - s.write(rds.to_text(name, **kw)) - s.write(u'\n') - return s.getvalue()[:-1] - - def __repr__(self): - return '' - - def __eq__(self, other): - # - # This is inefficient. Good thing we don't need to do it much. - # - for rd in self.rdatasets: - if rd not in other.rdatasets: - return False - for rd in other.rdatasets: - if rd not in self.rdatasets: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.rdatasets) - - def __iter__(self): - return iter(self.rdatasets) - - def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, - create=False): - """Find an rdataset matching the specified properties in the - current node. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. Usually this value is - dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or - dns.rdatatype.RRSIG, then the covers value will be the rdata - type the SIG/RRSIG covers. The library treats the SIG and RRSIG - types as if they were a family of - types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much - easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Raises ``KeyError`` if an rdataset of the desired type and class does - not exist and *create* is not ``True``. - - Returns a ``dns.rdataset.Rdataset``. - """ - - for rds in self.rdatasets: - if rds.match(rdclass, rdtype, covers): - return rds - if not create: - raise KeyError - rds = dns.rdataset.Rdataset(rdclass, rdtype) - self.rdatasets.append(rds) - return rds - - def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, - create=False): - """Get an rdataset matching the specified properties in the - current node. - - None is returned if an rdataset of the specified type and - class does not exist and *create* is not ``True``. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. Usually this value is - dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or - dns.rdatatype.RRSIG, then the covers value will be the rdata - type the SIG/RRSIG covers. The library treats the SIG and RRSIG - types as if they were a family of - types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much - easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Returns a ``dns.rdataset.Rdataset`` or ``None``. - """ - - try: - rds = self.find_rdataset(rdclass, rdtype, covers, create) - except KeyError: - rds = None - return rds - - def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): - """Delete the rdataset matching the specified properties in the - current node. - - If a matching rdataset does not exist, it is not an error. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. - """ - - rds = self.get_rdataset(rdclass, rdtype, covers) - if rds is not None: - self.rdatasets.remove(rds) - - def replace_rdataset(self, replacement): - """Replace an rdataset. - - It is not an error if there is no rdataset matching *replacement*. - - Ownership of the *replacement* object is transferred to the node; - in other words, this method does not store a copy of *replacement* - at the node, it stores *replacement* itself. - - *replacement*, a ``dns.rdataset.Rdataset``. - - Raises ``ValueError`` if *replacement* is not a - ``dns.rdataset.Rdataset``. - """ - - if not isinstance(replacement, dns.rdataset.Rdataset): - raise ValueError('replacement is not an rdataset') - self.delete_rdataset(replacement.rdclass, replacement.rdtype, - replacement.covers) - self.rdatasets.append(replacement) diff --git a/venv/lib/python3.6/site-packages/dns/opcode.py b/venv/lib/python3.6/site-packages/dns/opcode.py deleted file mode 100644 index c0735ba..0000000 --- a/venv/lib/python3.6/site-packages/dns/opcode.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Opcodes.""" - -import dns.exception - -#: Query -QUERY = 0 -#: Inverse Query (historical) -IQUERY = 1 -#: Server Status (unspecified and unimplemented anywhere) -STATUS = 2 -#: Notify -NOTIFY = 4 -#: Dynamic Update -UPDATE = 5 - -_by_text = { - 'QUERY': QUERY, - 'IQUERY': IQUERY, - 'STATUS': STATUS, - 'NOTIFY': NOTIFY, - 'UPDATE': UPDATE -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be true inverse. - -_by_value = {y: x for x, y in _by_text.items()} - - -class UnknownOpcode(dns.exception.DNSException): - """An DNS opcode is unknown.""" - - -def from_text(text): - """Convert text into an opcode. - - *text*, a ``text``, the textual opcode - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns an ``int``. - """ - - if text.isdigit(): - value = int(text) - if value >= 0 and value <= 15: - return value - value = _by_text.get(text.upper()) - if value is None: - raise UnknownOpcode - return value - - -def from_flags(flags): - """Extract an opcode from DNS message flags. - - *flags*, an ``int``, the DNS flags. - - Returns an ``int``. - """ - - return (flags & 0x7800) >> 11 - - -def to_flags(value): - """Convert an opcode to a value suitable for ORing into DNS message - flags. - - *value*, an ``int``, the DNS opcode value. - - Returns an ``int``. - """ - - return (value << 11) & 0x7800 - - -def to_text(value): - """Convert an opcode to text. - - *value*, an ``int`` the opcode value, - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns a ``text``. - """ - - text = _by_value.get(value) - if text is None: - text = str(value) - return text - - -def is_update(flags): - """Is the opcode in flags UPDATE? - - *flags*, an ``int``, the DNS message flags. - - Returns a ``bool``. - """ - - return from_flags(flags) == UPDATE diff --git a/venv/lib/python3.6/site-packages/dns/py.typed b/venv/lib/python3.6/site-packages/dns/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.6/site-packages/dns/query.py b/venv/lib/python3.6/site-packages/dns/query.py deleted file mode 100644 index c0c517c..0000000 --- a/venv/lib/python3.6/site-packages/dns/query.py +++ /dev/null @@ -1,683 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Talk to a DNS server.""" - -from __future__ import generators - -import errno -import select -import socket -import struct -import sys -import time - -import dns.exception -import dns.inet -import dns.name -import dns.message -import dns.rcode -import dns.rdataclass -import dns.rdatatype -from ._compat import long, string_types, PY3 - -if PY3: - select_error = OSError -else: - select_error = select.error - -# Function used to create a socket. Can be overridden if needed in special -# situations. -socket_factory = socket.socket - -class UnexpectedSource(dns.exception.DNSException): - """A DNS query response came from an unexpected address or port.""" - - -class BadResponse(dns.exception.FormError): - """A DNS query response does not respond to the question asked.""" - - -class TransferError(dns.exception.DNSException): - """A zone transfer response got a non-zero rcode.""" - - def __init__(self, rcode): - message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode) - super(TransferError, self).__init__(message) - self.rcode = rcode - - -def _compute_expiration(timeout): - if timeout is None: - return None - else: - return time.time() + timeout - -# This module can use either poll() or select() as the "polling backend". -# -# A backend function takes an fd, bools for readability, writablity, and -# error detection, and a timeout. - -def _poll_for(fd, readable, writable, error, timeout): - """Poll polling backend.""" - - event_mask = 0 - if readable: - event_mask |= select.POLLIN - if writable: - event_mask |= select.POLLOUT - if error: - event_mask |= select.POLLERR - - pollable = select.poll() - pollable.register(fd, event_mask) - - if timeout: - event_list = pollable.poll(long(timeout * 1000)) - else: - event_list = pollable.poll() - - return bool(event_list) - - -def _select_for(fd, readable, writable, error, timeout): - """Select polling backend.""" - - rset, wset, xset = [], [], [] - - if readable: - rset = [fd] - if writable: - wset = [fd] - if error: - xset = [fd] - - if timeout is None: - (rcount, wcount, xcount) = select.select(rset, wset, xset) - else: - (rcount, wcount, xcount) = select.select(rset, wset, xset, timeout) - - return bool((rcount or wcount or xcount)) - - -def _wait_for(fd, readable, writable, error, expiration): - # Use the selected polling backend to wait for any of the specified - # events. An "expiration" absolute time is converted into a relative - # timeout. - - done = False - while not done: - if expiration is None: - timeout = None - else: - timeout = expiration - time.time() - if timeout <= 0.0: - raise dns.exception.Timeout - try: - if not _polling_backend(fd, readable, writable, error, timeout): - raise dns.exception.Timeout - except select_error as e: - if e.args[0] != errno.EINTR: - raise e - done = True - - -def _set_polling_backend(fn): - # Internal API. Do not use. - - global _polling_backend - - _polling_backend = fn - -if hasattr(select, 'poll'): - # Prefer poll() on platforms that support it because it has no - # limits on the maximum value of a file descriptor (plus it will - # be more efficient for high values). - _polling_backend = _poll_for -else: - _polling_backend = _select_for - - -def _wait_for_readable(s, expiration): - _wait_for(s, True, False, True, expiration) - - -def _wait_for_writable(s, expiration): - _wait_for(s, False, True, True, expiration) - - -def _addresses_equal(af, a1, a2): - # Convert the first value of the tuple, which is a textual format - # address into binary form, so that we are not confused by different - # textual representations of the same address - try: - n1 = dns.inet.inet_pton(af, a1[0]) - n2 = dns.inet.inet_pton(af, a2[0]) - except dns.exception.SyntaxError: - return False - return n1 == n2 and a1[1:] == a2[1:] - - -def _destination_and_source(af, where, port, source, source_port): - # Apply defaults and compute destination and source tuples - # suitable for use in connect(), sendto(), or bind(). - if af is None: - try: - af = dns.inet.af_for_address(where) - except Exception: - af = dns.inet.AF_INET - if af == dns.inet.AF_INET: - destination = (where, port) - if source is not None or source_port != 0: - if source is None: - source = '0.0.0.0' - source = (source, source_port) - elif af == dns.inet.AF_INET6: - destination = (where, port, 0, 0) - if source is not None or source_port != 0: - if source is None: - source = '::' - source = (source, source_port, 0, 0) - return (af, destination, source) - - -def send_udp(sock, what, destination, expiration=None): - """Send a DNS message to the specified UDP socket. - - *sock*, a ``socket``. - - *what*, a ``binary`` or ``dns.message.Message``, the message to send. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where to send the query. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - _wait_for_writable(sock, expiration) - sent_time = time.time() - n = sock.sendto(what, destination) - return (n, sent_time) - - -def receive_udp(sock, destination, expiration=None, - ignore_unexpected=False, one_rr_per_rrset=False, - keyring=None, request_mac=b'', ignore_trailing=False): - """Read a DNS message from a UDP socket. - - *sock*, a ``socket``. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where the associated query was sent. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``binary``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - Returns a ``dns.message.Message`` object. - """ - - wire = b'' - while 1: - _wait_for_readable(sock, expiration) - (wire, from_address) = sock.recvfrom(65535) - if _addresses_equal(sock.family, from_address, destination) or \ - (dns.inet.is_multicast(destination[0]) and - from_address[1:] == destination[1:]): - break - if not ignore_unexpected: - raise UnexpectedSource('got a response from ' - '%s instead of %s' % (from_address, - destination)) - received_time = time.time() - r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing) - return (r, received_time) - -def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0, - ignore_unexpected=False, one_rr_per_rrset=False, ignore_trailing=False): - """Return the response obtained after sending a query via UDP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``text`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *af*, an ``int``, the address family to use. The default is ``None``, - which causes the address family to use to be inferred from the form of - *where*. If the inference attempt fails, AF_INET is used. This - parameter is historical; you need never set it. - - *source*, a ``text`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (af, destination, source) = _destination_and_source(af, where, port, - source, source_port) - s = socket_factory(af, socket.SOCK_DGRAM, 0) - received_time = None - sent_time = None - try: - expiration = _compute_expiration(timeout) - s.setblocking(0) - if source is not None: - s.bind(source) - (_, sent_time) = send_udp(s, wire, destination, expiration) - (r, received_time) = receive_udp(s, destination, expiration, - ignore_unexpected, one_rr_per_rrset, - q.keyring, q.mac, ignore_trailing) - finally: - if sent_time is None or received_time is None: - response_time = 0 - else: - response_time = received_time - sent_time - s.close() - r.time = response_time - if not q.is_response(r): - raise BadResponse - return r - - -def _net_read(sock, count, expiration): - """Read the specified number of bytes from sock. Keep trying until we - either get the desired amount, or we hit EOF. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - s = b'' - while count > 0: - _wait_for_readable(sock, expiration) - n = sock.recv(count) - if n == b'': - raise EOFError - count = count - len(n) - s = s + n - return s - - -def _net_write(sock, data, expiration): - """Write the specified data to the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - current = 0 - l = len(data) - while current < l: - _wait_for_writable(sock, expiration) - current += sock.send(data[current:]) - - -def send_tcp(sock, what, expiration=None): - """Send a DNS message to the specified TCP socket. - - *sock*, a ``socket``. - - *what*, a ``binary`` or ``dns.message.Message``, the message to send. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - l = len(what) - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = struct.pack("!H", l) + what - _wait_for_writable(sock, expiration) - sent_time = time.time() - _net_write(sock, tcpmsg, expiration) - return (len(tcpmsg), sent_time) - -def receive_tcp(sock, expiration=None, one_rr_per_rrset=False, - keyring=None, request_mac=b'', ignore_trailing=False): - """Read a DNS message from a TCP socket. - - *sock*, a ``socket``. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``binary``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - Returns a ``dns.message.Message`` object. - """ - - ldata = _net_read(sock, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = _net_read(sock, l, expiration) - received_time = time.time() - r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing) - return (r, received_time) - -def _connect(s, address): - try: - s.connect(address) - except socket.error: - (ty, v) = sys.exc_info()[:2] - - if hasattr(v, 'errno'): - v_err = v.errno - else: - v_err = v[0] - if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]: - raise v - - -def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0, - one_rr_per_rrset=False, ignore_trailing=False): - """Return the response obtained after sending a query via TCP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``text`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *af*, an ``int``, the address family to use. The default is ``None``, - which causes the address family to use to be inferred from the form of - *where*. If the inference attempt fails, AF_INET is used. This - parameter is historical; you need never set it. - - *source*, a ``text`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (af, destination, source) = _destination_and_source(af, where, port, - source, source_port) - s = socket_factory(af, socket.SOCK_STREAM, 0) - begin_time = None - received_time = None - try: - expiration = _compute_expiration(timeout) - s.setblocking(0) - begin_time = time.time() - if source is not None: - s.bind(source) - _connect(s, destination) - send_tcp(s, wire, expiration) - (r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset, - q.keyring, q.mac, ignore_trailing) - finally: - if begin_time is None or received_time is None: - response_time = 0 - else: - response_time = received_time - begin_time - s.close() - r.time = response_time - if not q.is_response(r): - raise BadResponse - return r - - -def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN, - timeout=None, port=53, keyring=None, keyname=None, relativize=True, - af=None, lifetime=None, source=None, source_port=0, serial=0, - use_udp=False, keyalgorithm=dns.tsig.default_algorithm): - """Return a generator for the responses to a zone transfer. - - *where*. If the inference attempt fails, AF_INET is used. This - parameter is historical; you need never set it. - - *zone*, a ``dns.name.Name`` or ``text``, the name of the zone to transfer. - - *rdtype*, an ``int`` or ``text``, the type of zone transfer. The - default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be - used to do an incremental transfer instead. - - *rdclass*, an ``int`` or ``text``, the class of the zone transfer. - The default is ``dns.rdataclass.IN``. - - *timeout*, a ``float``, the number of seconds to wait for each - response message. If None, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *keyname*, a ``dns.name.Name`` or ``text``, the name of the TSIG - key to use. - - *relativize*, a ``bool``. If ``True``, all names in the zone will be - relativized to the zone origin. It is essential that the - relativize setting matches the one specified to - ``dns.zone.from_xfr()`` if using this generator to make a zone. - - *af*, an ``int``, the address family to use. The default is ``None``, - which causes the address family to use to be inferred from the form of - *where*. If the inference attempt fails, AF_INET is used. This - parameter is historical; you need never set it. - - *lifetime*, a ``float``, the total number of seconds to spend - doing the transfer. If ``None``, the default, then there is no - limit on the time the transfer may take. - - *source*, a ``text`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *serial*, an ``int``, the SOA serial number to use as the base for - an IXFR diff sequence (only meaningful if *rdtype* is - ``dns.rdatatype.IXFR``). - - *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). - - *keyalgorithm*, a ``dns.name.Name`` or ``text``, the TSIG algorithm to use. - - Raises on errors, and so does the generator. - - Returns a generator of ``dns.message.Message`` objects. - """ - - if isinstance(zone, string_types): - zone = dns.name.from_text(zone) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - q = dns.message.make_query(zone, rdtype, rdclass) - if rdtype == dns.rdatatype.IXFR: - rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA', - '. . %u 0 0 0 0' % serial) - q.authority.append(rrset) - if keyring is not None: - q.use_tsig(keyring, keyname, algorithm=keyalgorithm) - wire = q.to_wire() - (af, destination, source) = _destination_and_source(af, where, port, - source, source_port) - if use_udp: - if rdtype != dns.rdatatype.IXFR: - raise ValueError('cannot do a UDP AXFR') - s = socket_factory(af, socket.SOCK_DGRAM, 0) - else: - s = socket_factory(af, socket.SOCK_STREAM, 0) - s.setblocking(0) - if source is not None: - s.bind(source) - expiration = _compute_expiration(lifetime) - _connect(s, destination) - l = len(wire) - if use_udp: - _wait_for_writable(s, expiration) - s.send(wire) - else: - tcpmsg = struct.pack("!H", l) + wire - _net_write(s, tcpmsg, expiration) - done = False - delete_mode = True - expecting_SOA = False - soa_rrset = None - if relativize: - origin = zone - oname = dns.name.empty - else: - origin = None - oname = zone - tsig_ctx = None - first = True - while not done: - mexpiration = _compute_expiration(timeout) - if mexpiration is None or mexpiration > expiration: - mexpiration = expiration - if use_udp: - _wait_for_readable(s, expiration) - (wire, from_address) = s.recvfrom(65535) - else: - ldata = _net_read(s, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - wire = _net_read(s, l, mexpiration) - is_ixfr = (rdtype == dns.rdatatype.IXFR) - r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac, - xfr=True, origin=origin, tsig_ctx=tsig_ctx, - multi=True, first=first, - one_rr_per_rrset=is_ixfr) - rcode = r.rcode() - if rcode != dns.rcode.NOERROR: - raise TransferError(rcode) - tsig_ctx = r.tsig_ctx - first = False - answer_index = 0 - if soa_rrset is None: - if not r.answer or r.answer[0].name != oname: - raise dns.exception.FormError( - "No answer or RRset not for qname") - rrset = r.answer[0] - if rrset.rdtype != dns.rdatatype.SOA: - raise dns.exception.FormError("first RRset is not an SOA") - answer_index = 1 - soa_rrset = rrset.copy() - if rdtype == dns.rdatatype.IXFR: - if soa_rrset[0].serial <= serial: - # - # We're already up-to-date. - # - done = True - else: - expecting_SOA = True - # - # Process SOAs in the answer section (other than the initial - # SOA in the first message). - # - for rrset in r.answer[answer_index:]: - if done: - raise dns.exception.FormError("answers after final SOA") - if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname: - if expecting_SOA: - if rrset[0].serial != serial: - raise dns.exception.FormError( - "IXFR base serial mismatch") - expecting_SOA = False - elif rdtype == dns.rdatatype.IXFR: - delete_mode = not delete_mode - # - # If this SOA RRset is equal to the first we saw then we're - # finished. If this is an IXFR we also check that we're seeing - # the record in the expected part of the response. - # - if rrset == soa_rrset and \ - (rdtype == dns.rdatatype.AXFR or - (rdtype == dns.rdatatype.IXFR and delete_mode)): - done = True - elif expecting_SOA: - # - # We made an IXFR request and are expecting another - # SOA RR, but saw something else, so this must be an - # AXFR response. - # - rdtype = dns.rdatatype.AXFR - expecting_SOA = False - if done and q.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") - yield r - s.close() diff --git a/venv/lib/python3.6/site-packages/dns/rcode.py b/venv/lib/python3.6/site-packages/dns/rcode.py deleted file mode 100644 index 5191e1b..0000000 --- a/venv/lib/python3.6/site-packages/dns/rcode.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Result Codes.""" - -import dns.exception -from ._compat import long - -#: No error -NOERROR = 0 -#: Form error -FORMERR = 1 -#: Server failure -SERVFAIL = 2 -#: Name does not exist ("Name Error" in RFC 1025 terminology). -NXDOMAIN = 3 -#: Not implemented -NOTIMP = 4 -#: Refused -REFUSED = 5 -#: Name exists. -YXDOMAIN = 6 -#: RRset exists. -YXRRSET = 7 -#: RRset does not exist. -NXRRSET = 8 -#: Not authoritative. -NOTAUTH = 9 -#: Name not in zone. -NOTZONE = 10 -#: Bad EDNS version. -BADVERS = 16 - -_by_text = { - 'NOERROR': NOERROR, - 'FORMERR': FORMERR, - 'SERVFAIL': SERVFAIL, - 'NXDOMAIN': NXDOMAIN, - 'NOTIMP': NOTIMP, - 'REFUSED': REFUSED, - 'YXDOMAIN': YXDOMAIN, - 'YXRRSET': YXRRSET, - 'NXRRSET': NXRRSET, - 'NOTAUTH': NOTAUTH, - 'NOTZONE': NOTZONE, - 'BADVERS': BADVERS -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be a true inverse. - -_by_value = {y: x for x, y in _by_text.items()} - - -class UnknownRcode(dns.exception.DNSException): - """A DNS rcode is unknown.""" - - -def from_text(text): - """Convert text into an rcode. - - *text*, a ``text``, the textual rcode or an integer in textual form. - - Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. - - Returns an ``int``. - """ - - if text.isdigit(): - v = int(text) - if v >= 0 and v <= 4095: - return v - v = _by_text.get(text.upper()) - if v is None: - raise UnknownRcode - return v - - -def from_flags(flags, ednsflags): - """Return the rcode value encoded by flags and ednsflags. - - *flags*, an ``int``, the DNS flags field. - - *ednsflags*, an ``int``, the EDNS flags field. - - Raises ``ValueError`` if rcode is < 0 or > 4095 - - Returns an ``int``. - """ - - value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0) - if value < 0 or value > 4095: - raise ValueError('rcode must be >= 0 and <= 4095') - return value - - -def to_flags(value): - """Return a (flags, ednsflags) tuple which encodes the rcode. - - *value*, an ``int``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns an ``(int, int)`` tuple. - """ - - if value < 0 or value > 4095: - raise ValueError('rcode must be >= 0 and <= 4095') - v = value & 0xf - ev = long(value & 0xff0) << 20 - return (v, ev) - - -def to_text(value): - """Convert rcode into text. - - *value*, and ``int``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns a ``text``. - """ - - if value < 0 or value > 4095: - raise ValueError('rcode must be >= 0 and <= 4095') - text = _by_value.get(value) - if text is None: - text = str(value) - return text diff --git a/venv/lib/python3.6/site-packages/dns/rdata.py b/venv/lib/python3.6/site-packages/dns/rdata.py deleted file mode 100644 index ea1971d..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdata.py +++ /dev/null @@ -1,456 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata.""" - -from io import BytesIO -import base64 -import binascii - -import dns.exception -import dns.name -import dns.rdataclass -import dns.rdatatype -import dns.tokenizer -import dns.wiredata -from ._compat import xrange, string_types, text_type - -try: - import threading as _threading -except ImportError: - import dummy_threading as _threading - -_hex_chunksize = 32 - - -def _hexify(data, chunksize=_hex_chunksize): - """Convert a binary string into its hex encoding, broken up into chunks - of chunksize characters separated by a space. - """ - - line = binascii.hexlify(data) - return b' '.join([line[i:i + chunksize] - for i - in range(0, len(line), chunksize)]).decode() - -_base64_chunksize = 32 - - -def _base64ify(data, chunksize=_base64_chunksize): - """Convert a binary string into its base64 encoding, broken up into chunks - of chunksize characters separated by a space. - """ - - line = base64.b64encode(data) - return b' '.join([line[i:i + chunksize] - for i - in range(0, len(line), chunksize)]).decode() - -__escaped = bytearray(b'"\\') - -def _escapify(qstring): - """Escape the characters in a quoted string which need it.""" - - if isinstance(qstring, text_type): - qstring = qstring.encode() - if not isinstance(qstring, bytearray): - qstring = bytearray(qstring) - - text = '' - for c in qstring: - if c in __escaped: - text += '\\' + chr(c) - elif c >= 0x20 and c < 0x7F: - text += chr(c) - else: - text += '\\%03d' % c - return text - - -def _truncate_bitmap(what): - """Determine the index of greatest byte that isn't all zeros, and - return the bitmap that contains all the bytes less than that index. - """ - - for i in xrange(len(what) - 1, -1, -1): - if what[i] != 0: - return what[0: i + 1] - return what[0:1] - - -class Rdata(object): - """Base class for all DNS rdata types.""" - - __slots__ = ['rdclass', 'rdtype'] - - def __init__(self, rdclass, rdtype): - """Initialize an rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - *rdtype*, an ``int`` is the rdatatype of the Rdata. - """ - - self.rdclass = rdclass - self.rdtype = rdtype - - def covers(self): - """Return the type a Rdata covers. - - DNS SIG/RRSIG rdatas apply to a specific type; this type is - returned by the covers() function. If the rdata type is not - SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when - creating rdatasets, allowing the rdataset to contain only RRSIGs - of a particular type, e.g. RRSIG(NS). - - Returns an ``int``. - """ - - return dns.rdatatype.NONE - - def extended_rdatatype(self): - """Return a 32-bit type value, the least significant 16 bits of - which are the ordinary DNS type, and the upper 16 bits of which are - the "covered" type, if any. - - Returns an ``int``. - """ - - return self.covers() << 16 | self.rdtype - - def to_text(self, origin=None, relativize=True, **kw): - """Convert an rdata to text format. - - Returns a ``text``. - """ - - raise NotImplementedError - - def to_wire(self, file, compress=None, origin=None): - """Convert an rdata to wire format. - - Returns a ``binary``. - """ - - raise NotImplementedError - - def to_digestable(self, origin=None): - """Convert rdata to a format suitable for digesting in hashes. This - is also the DNSSEC canonical form. - - Returns a ``binary``. - """ - - f = BytesIO() - self.to_wire(f, None, origin) - return f.getvalue() - - def validate(self): - """Check that the current contents of the rdata's fields are - valid. - - If you change an rdata by assigning to its fields, - it is a good idea to call validate() when you are done making - changes. - - Raises various exceptions if there are problems. - - Returns ``None``. - """ - - dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text()) - - def __repr__(self): - covers = self.covers() - if covers == dns.rdatatype.NONE: - ctext = '' - else: - ctext = '(' + dns.rdatatype.to_text(covers) + ')' - return '' - - def __str__(self): - return self.to_text() - - def _cmp(self, other): - """Compare an rdata with another rdata of the same rdtype and - rdclass. - - Return < 0 if self < other in the DNSSEC ordering, 0 if self - == other, and > 0 if self > other. - - """ - our = self.to_digestable(dns.name.root) - their = other.to_digestable(dns.name.root) - if our == their: - return 0 - elif our > their: - return 1 - else: - return -1 - - def __eq__(self, other): - if not isinstance(other, Rdata): - return False - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return False - return self._cmp(other) == 0 - - def __ne__(self, other): - if not isinstance(other, Rdata): - return True - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return True - return self._cmp(other) != 0 - - def __lt__(self, other): - if not isinstance(other, Rdata) or \ - self.rdclass != other.rdclass or self.rdtype != other.rdtype: - - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if not isinstance(other, Rdata) or \ - self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if not isinstance(other, Rdata) or \ - self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if not isinstance(other, Rdata) or \ - self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return NotImplemented - return self._cmp(other) > 0 - - def __hash__(self): - return hash(self.to_digestable(dns.name.root)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - raise NotImplementedError - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - raise NotImplementedError - - def choose_relativity(self, origin=None, relativize=True): - """Convert any domain names in the rdata to the specified - relativization. - """ - -class GenericRdata(Rdata): - - """Generic Rdata Class - - This class is used for rdata types for which we have no better - implementation. It implements the DNS "unknown RRs" scheme. - """ - - __slots__ = ['data'] - - def __init__(self, rdclass, rdtype, data): - super(GenericRdata, self).__init__(rdclass, rdtype) - self.data = data - - def to_text(self, origin=None, relativize=True, **kw): - return r'\# %d ' % len(self.data) + _hexify(self.data) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - token = tok.get() - if not token.is_identifier() or token.value != r'\#': - raise dns.exception.SyntaxError( - r'generic rdata does not start with \#') - length = tok.get_int() - chunks = [] - while 1: - token = tok.get() - if token.is_eol_or_eof(): - break - chunks.append(token.value.encode()) - hex = b''.join(chunks) - data = binascii.unhexlify(hex) - if len(data) != length: - raise dns.exception.SyntaxError( - 'generic rdata hex data has wrong length') - return cls(rdclass, rdtype, data) - - def to_wire(self, file, compress=None, origin=None): - file.write(self.data) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - return cls(rdclass, rdtype, wire[current: current + rdlen]) - -_rdata_modules = {} -_module_prefix = 'dns.rdtypes' -_import_lock = _threading.Lock() - -def get_rdata_class(rdclass, rdtype): - - def import_module(name): - with _import_lock: - mod = __import__(name) - components = name.split('.') - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - mod = _rdata_modules.get((rdclass, rdtype)) - rdclass_text = dns.rdataclass.to_text(rdclass) - rdtype_text = dns.rdatatype.to_text(rdtype) - rdtype_text = rdtype_text.replace('-', '_') - if not mod: - mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype)) - if not mod: - try: - mod = import_module('.'.join([_module_prefix, - rdclass_text, rdtype_text])) - _rdata_modules[(rdclass, rdtype)] = mod - except ImportError: - try: - mod = import_module('.'.join([_module_prefix, - 'ANY', rdtype_text])) - _rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod - except ImportError: - mod = None - if mod: - cls = getattr(mod, rdtype_text) - else: - cls = GenericRdata - return cls - - -def from_text(rdclass, rdtype, tok, origin=None, relativize=True): - """Build an rdata object from text format. - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_text() class method is called - with the parameters to this function. - - If *tok* is a ``text``, then a tokenizer is created and the string - is used as its input. - - *rdclass*, an ``int``, the rdataclass. - - *rdtype*, an ``int``, the rdatatype. - - *tok*, a ``dns.tokenizer.Tokenizer`` or a ``text``. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized to - the specified origin. - - Returns an instance of the chosen Rdata subclass. - """ - - if isinstance(tok, string_types): - tok = dns.tokenizer.Tokenizer(tok) - cls = get_rdata_class(rdclass, rdtype) - if cls != GenericRdata: - # peek at first token - token = tok.get() - tok.unget(token) - if token.is_identifier() and \ - token.value == r'\#': - # - # Known type using the generic syntax. Extract the - # wire form from the generic syntax, and then run - # from_wire on it. - # - rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin, - relativize) - return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data), - origin) - return cls.from_text(rdclass, rdtype, tok, origin, relativize) - - -def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None): - """Build an rdata object from wire format - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_wire() class method is called - with the parameters to this function. - - *rdclass*, an ``int``, the rdataclass. - - *rdtype*, an ``int``, the rdatatype. - - *wire*, a ``binary``, the wire-format message. - - *current*, an ``int``, the offset in wire of the beginning of - the rdata. - - *rdlen*, an ``int``, the length of the wire-format rdata - - *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, - then names will be relativized to this origin. - - Returns an instance of the chosen Rdata subclass. - """ - - wire = dns.wiredata.maybe_wrap(wire) - cls = get_rdata_class(rdclass, rdtype) - return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin) - - -class RdatatypeExists(dns.exception.DNSException): - """DNS rdatatype already exists.""" - supp_kwargs = {'rdclass', 'rdtype'} - fmt = "The rdata type with class {rdclass} and rdtype {rdtype} " + \ - "already exists." - - -def register_type(implementation, rdtype, rdtype_text, is_singleton=False, - rdclass=dns.rdataclass.IN): - """Dynamically register a module to handle an rdatatype. - - *implementation*, a module implementing the type in the usual dnspython - way. - - *rdtype*, an ``int``, the rdatatype to register. - - *rdtype_text*, a ``text``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - - *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if - it applies to all classes. - """ - - existing_cls = get_rdata_class(rdclass, rdtype) - if existing_cls != GenericRdata: - raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) - _rdata_modules[(rdclass, rdtype)] = implementation - dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/venv/lib/python3.6/site-packages/dns/rdataclass.py b/venv/lib/python3.6/site-packages/dns/rdataclass.py deleted file mode 100644 index b88aa85..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdataclass.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Classes.""" - -import re - -import dns.exception - -RESERVED0 = 0 -IN = 1 -CH = 3 -HS = 4 -NONE = 254 -ANY = 255 - -_by_text = { - 'RESERVED0': RESERVED0, - 'IN': IN, - 'CH': CH, - 'HS': HS, - 'NONE': NONE, - 'ANY': ANY -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be true inverse. - -_by_value = {y: x for x, y in _by_text.items()} - -# Now that we've built the inverse map, we can add class aliases to -# the _by_text mapping. - -_by_text.update({ - 'INTERNET': IN, - 'CHAOS': CH, - 'HESIOD': HS -}) - -_metaclasses = { - NONE: True, - ANY: True -} - -_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I) - - -class UnknownRdataclass(dns.exception.DNSException): - """A DNS class is unknown.""" - - -def from_text(text): - """Convert text into a DNS rdata class value. - - The input text can be a defined DNS RR class mnemonic or - instance of the DNS generic class syntax. - - For example, "IN" and "CLASS1" will both result in a value of 1. - - Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns an ``int``. - """ - - value = _by_text.get(text.upper()) - if value is None: - match = _unknown_class_pattern.match(text) - if match is None: - raise UnknownRdataclass - value = int(match.group(1)) - if value < 0 or value > 65535: - raise ValueError("class must be between >= 0 and <= 65535") - return value - - -def to_text(value): - """Convert a DNS rdata type value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic class syntax will be used. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - if value < 0 or value > 65535: - raise ValueError("class must be between >= 0 and <= 65535") - text = _by_value.get(value) - if text is None: - text = 'CLASS' + repr(value) - return text - - -def is_metaclass(rdclass): - """True if the specified class is a metaclass. - - The currently defined metaclasses are ANY and NONE. - - *rdclass* is an ``int``. - """ - - if rdclass in _metaclasses: - return True - return False diff --git a/venv/lib/python3.6/site-packages/dns/rdataset.py b/venv/lib/python3.6/site-packages/dns/rdataset.py deleted file mode 100644 index f1afe24..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdataset.py +++ /dev/null @@ -1,347 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" - -import random -from io import StringIO -import struct - -import dns.exception -import dns.rdatatype -import dns.rdataclass -import dns.rdata -import dns.set -from ._compat import string_types - -# define SimpleSet here for backwards compatibility -SimpleSet = dns.set.Set - - -class DifferingCovers(dns.exception.DNSException): - """An attempt was made to add a DNS SIG/RRSIG whose covered type - is not the same as that of the other rdatas in the rdataset.""" - - -class IncompatibleTypes(dns.exception.DNSException): - """An attempt was made to add DNS RR data of an incompatible type.""" - - -class Rdataset(dns.set.Set): - - """A DNS rdataset.""" - - __slots__ = ['rdclass', 'rdtype', 'covers', 'ttl'] - - def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE, ttl=0): - """Create a new rdataset of the specified class and type. - - *rdclass*, an ``int``, the rdataclass. - - *rdtype*, an ``int``, the rdatatype. - - *covers*, an ``int``, the covered rdatatype. - - *ttl*, an ``int``, the TTL. - """ - - super(Rdataset, self).__init__() - self.rdclass = rdclass - self.rdtype = rdtype - self.covers = covers - self.ttl = ttl - - def _clone(self): - obj = super(Rdataset, self)._clone() - obj.rdclass = self.rdclass - obj.rdtype = self.rdtype - obj.covers = self.covers - obj.ttl = self.ttl - return obj - - def update_ttl(self, ttl): - """Perform TTL minimization. - - Set the TTL of the rdataset to be the lesser of the set's current - TTL or the specified TTL. If the set contains no rdatas, set the TTL - to the specified TTL. - - *ttl*, an ``int``. - """ - - if len(self) == 0: - self.ttl = ttl - elif ttl < self.ttl: - self.ttl = ttl - - def add(self, rd, ttl=None): - """Add the specified rdata to the rdataset. - - If the optional *ttl* parameter is supplied, then - ``self.update_ttl(ttl)`` will be called prior to adding the rdata. - - *rd*, a ``dns.rdata.Rdata``, the rdata - - *ttl*, an ``int``, the TTL. - - Raises ``dns.rdataset.IncompatibleTypes`` if the type and class - do not match the type and class of the rdataset. - - Raises ``dns.rdataset.DifferingCovers`` if the type is a signature - type and the covered type does not match that of the rdataset. - """ - - # - # If we're adding a signature, do some special handling to - # check that the signature covers the same type as the - # other rdatas in this rdataset. If this is the first rdata - # in the set, initialize the covers field. - # - if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: - raise IncompatibleTypes - if ttl is not None: - self.update_ttl(ttl) - if self.rdtype == dns.rdatatype.RRSIG or \ - self.rdtype == dns.rdatatype.SIG: - covers = rd.covers() - if len(self) == 0 and self.covers == dns.rdatatype.NONE: - self.covers = covers - elif self.covers != covers: - raise DifferingCovers - if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: - self.clear() - super(Rdataset, self).add(rd) - - def union_update(self, other): - self.update_ttl(other.ttl) - super(Rdataset, self).union_update(other) - - def intersection_update(self, other): - self.update_ttl(other.ttl) - super(Rdataset, self).intersection_update(other) - - def update(self, other): - """Add all rdatas in other to self. - - *other*, a ``dns.rdataset.Rdataset``, the rdataset from which - to update. - """ - - self.update_ttl(other.ttl) - super(Rdataset, self).update(other) - - def __repr__(self): - if self.covers == 0: - ctext = '' - else: - ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' - return '' - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if not isinstance(other, Rdataset): - return False - if self.rdclass != other.rdclass or \ - self.rdtype != other.rdtype or \ - self.covers != other.covers: - return False - return super(Rdataset, self).__eq__(other) - - def __ne__(self, other): - return not self.__eq__(other) - - def to_text(self, name=None, origin=None, relativize=True, - override_rdclass=None, **kw): - """Convert the rdataset into DNS master file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with - *name* as the owner name. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - """ - - if name is not None: - name = name.choose_relativity(origin, relativize) - ntext = str(name) - pad = ' ' - else: - ntext = '' - pad = '' - s = StringIO() - if override_rdclass is not None: - rdclass = override_rdclass - else: - rdclass = self.rdclass - if len(self) == 0: - # - # Empty rdatasets are used for the question section, and in - # some dynamic updates, so we don't need to print out the TTL - # (which is meaningless anyway). - # - s.write(u'{}{}{} {}\n'.format(ntext, pad, - dns.rdataclass.to_text(rdclass), - dns.rdatatype.to_text(self.rdtype))) - else: - for rd in self: - s.write(u'%s%s%d %s %s %s\n' % - (ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass), - dns.rdatatype.to_text(self.rdtype), - rd.to_text(origin=origin, relativize=relativize, - **kw))) - # - # We strip off the final \n for the caller's convenience in printing - # - return s.getvalue()[:-1] - - def to_wire(self, name, file, compress=None, origin=None, - override_rdclass=None, want_shuffle=True): - """Convert the rdataset to wire format. - - *name*, a ``dns.name.Name`` is the owner name to use. - - *file* is the file where the name is emitted (typically a - BytesIO file). - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - *override_rdclass*, an ``int``, is used as the class instead of the - class of the rdataset. This is useful when rendering rdatasets - associated with dynamic updates. - - *want_shuffle*, a ``bool``. If ``True``, then the order of the - Rdatas within the Rdataset will be shuffled before rendering. - - Returns an ``int``, the number of records emitted. - """ - - if override_rdclass is not None: - rdclass = override_rdclass - want_shuffle = False - else: - rdclass = self.rdclass - file.seek(0, 2) - if len(self) == 0: - name.to_wire(file, compress, origin) - stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0) - file.write(stuff) - return 1 - else: - if want_shuffle: - l = list(self) - random.shuffle(l) - else: - l = self - for rd in l: - name.to_wire(file, compress, origin) - stuff = struct.pack("!HHIH", self.rdtype, rdclass, - self.ttl, 0) - file.write(stuff) - start = file.tell() - rd.to_wire(file, compress, origin) - end = file.tell() - assert end - start < 65536 - file.seek(start - 2) - stuff = struct.pack("!H", end - start) - file.write(stuff) - file.seek(0, 2) - return len(self) - - def match(self, rdclass, rdtype, covers): - """Returns ``True`` if this rdataset matches the specified class, - type, and covers. - """ - if self.rdclass == rdclass and \ - self.rdtype == rdtype and \ - self.covers == covers: - return True - return False - - -def from_text_list(rdclass, rdtype, ttl, text_rdatas): - """Create an rdataset with the specified class, type, and TTL, and with - the specified list of rdatas in text format. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - if isinstance(rdclass, string_types): - rdclass = dns.rdataclass.from_text(rdclass) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - r = Rdataset(rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text(r.rdclass, r.rdtype, t) - r.add(rd) - return r - - -def from_text(rdclass, rdtype, ttl, *text_rdatas): - """Create an rdataset with the specified class, type, and TTL, and with - the specified rdatas in text format. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_text_list(rdclass, rdtype, ttl, text_rdatas) - - -def from_rdata_list(ttl, rdatas): - """Create an rdataset with the specified TTL, and with - the specified list of rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = Rdataset(rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - return r - - -def from_rdata(ttl, *rdatas): - """Create an rdataset with the specified TTL, and with - the specified rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_rdata_list(ttl, rdatas) diff --git a/venv/lib/python3.6/site-packages/dns/rdatatype.py b/venv/lib/python3.6/site-packages/dns/rdatatype.py deleted file mode 100644 index b247bc9..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdatatype.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Types.""" - -import re - -import dns.exception - -NONE = 0 -A = 1 -NS = 2 -MD = 3 -MF = 4 -CNAME = 5 -SOA = 6 -MB = 7 -MG = 8 -MR = 9 -NULL = 10 -WKS = 11 -PTR = 12 -HINFO = 13 -MINFO = 14 -MX = 15 -TXT = 16 -RP = 17 -AFSDB = 18 -X25 = 19 -ISDN = 20 -RT = 21 -NSAP = 22 -NSAP_PTR = 23 -SIG = 24 -KEY = 25 -PX = 26 -GPOS = 27 -AAAA = 28 -LOC = 29 -NXT = 30 -SRV = 33 -NAPTR = 35 -KX = 36 -CERT = 37 -A6 = 38 -DNAME = 39 -OPT = 41 -APL = 42 -DS = 43 -SSHFP = 44 -IPSECKEY = 45 -RRSIG = 46 -NSEC = 47 -DNSKEY = 48 -DHCID = 49 -NSEC3 = 50 -NSEC3PARAM = 51 -TLSA = 52 -HIP = 55 -CDS = 59 -CDNSKEY = 60 -OPENPGPKEY = 61 -CSYNC = 62 -SPF = 99 -UNSPEC = 103 -EUI48 = 108 -EUI64 = 109 -TKEY = 249 -TSIG = 250 -IXFR = 251 -AXFR = 252 -MAILB = 253 -MAILA = 254 -ANY = 255 -URI = 256 -CAA = 257 -AVC = 258 -TA = 32768 -DLV = 32769 - -_by_text = { - 'NONE': NONE, - 'A': A, - 'NS': NS, - 'MD': MD, - 'MF': MF, - 'CNAME': CNAME, - 'SOA': SOA, - 'MB': MB, - 'MG': MG, - 'MR': MR, - 'NULL': NULL, - 'WKS': WKS, - 'PTR': PTR, - 'HINFO': HINFO, - 'MINFO': MINFO, - 'MX': MX, - 'TXT': TXT, - 'RP': RP, - 'AFSDB': AFSDB, - 'X25': X25, - 'ISDN': ISDN, - 'RT': RT, - 'NSAP': NSAP, - 'NSAP-PTR': NSAP_PTR, - 'SIG': SIG, - 'KEY': KEY, - 'PX': PX, - 'GPOS': GPOS, - 'AAAA': AAAA, - 'LOC': LOC, - 'NXT': NXT, - 'SRV': SRV, - 'NAPTR': NAPTR, - 'KX': KX, - 'CERT': CERT, - 'A6': A6, - 'DNAME': DNAME, - 'OPT': OPT, - 'APL': APL, - 'DS': DS, - 'SSHFP': SSHFP, - 'IPSECKEY': IPSECKEY, - 'RRSIG': RRSIG, - 'NSEC': NSEC, - 'DNSKEY': DNSKEY, - 'DHCID': DHCID, - 'NSEC3': NSEC3, - 'NSEC3PARAM': NSEC3PARAM, - 'TLSA': TLSA, - 'HIP': HIP, - 'CDS': CDS, - 'CDNSKEY': CDNSKEY, - 'OPENPGPKEY': OPENPGPKEY, - 'CSYNC': CSYNC, - 'SPF': SPF, - 'UNSPEC': UNSPEC, - 'EUI48': EUI48, - 'EUI64': EUI64, - 'TKEY': TKEY, - 'TSIG': TSIG, - 'IXFR': IXFR, - 'AXFR': AXFR, - 'MAILB': MAILB, - 'MAILA': MAILA, - 'ANY': ANY, - 'URI': URI, - 'CAA': CAA, - 'AVC': AVC, - 'TA': TA, - 'DLV': DLV, -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be true inverse. - -_by_value = {y: x for x, y in _by_text.items()} - -_metatypes = { - OPT: True -} - -_singletons = { - SOA: True, - NXT: True, - DNAME: True, - NSEC: True, - CNAME: True, -} - -_unknown_type_pattern = re.compile('TYPE([0-9]+)$', re.I) - - -class UnknownRdatatype(dns.exception.DNSException): - """DNS resource record type is unknown.""" - - -def from_text(text): - """Convert text into a DNS rdata type value. - - The input text can be a defined DNS RR type mnemonic or - instance of the DNS generic type syntax. - - For example, "NS" and "TYPE2" will both result in a value of 2. - - Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns an ``int``. - """ - - value = _by_text.get(text.upper()) - if value is None: - match = _unknown_type_pattern.match(text) - if match is None: - raise UnknownRdatatype - value = int(match.group(1)) - if value < 0 or value > 65535: - raise ValueError("type must be between >= 0 and <= 65535") - return value - - -def to_text(value): - """Convert a DNS rdata type value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic type syntax will be used. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - if value < 0 or value > 65535: - raise ValueError("type must be between >= 0 and <= 65535") - text = _by_value.get(value) - if text is None: - text = 'TYPE' + repr(value) - return text - - -def is_metatype(rdtype): - """True if the specified type is a metatype. - - *rdtype* is an ``int``. - - The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, - MAILB, ANY, and OPT. - - Returns a ``bool``. - """ - - if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes: - return True - return False - - -def is_singleton(rdtype): - """Is the specified type a singleton type? - - Singleton types can only have a single rdata in an rdataset, or a single - RR in an RRset. - - The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and - SOA. - - *rdtype* is an ``int``. - - Returns a ``bool``. - """ - - if rdtype in _singletons: - return True - return False - - -def register_type(rdtype, rdtype_text, is_singleton=False): # pylint: disable=redefined-outer-name - """Dynamically register an rdatatype. - - *rdtype*, an ``int``, the rdatatype to register. - - *rdtype_text*, a ``text``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - """ - - _by_text[rdtype_text] = rdtype - _by_value[rdtype] = rdtype_text - if is_singleton: - _singletons[rdtype] = True diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AFSDB.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AFSDB.py deleted file mode 100644 index c6a700c..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AFSDB.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.mxbase - - -class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): - - """AFSDB record - - @ivar subtype: the subtype value - @type subtype: int - @ivar hostname: the hostname name - @type hostname: dns.name.Name object""" - - # Use the property mechanism to make "subtype" an alias for the - # "preference" attribute, and "hostname" an alias for the "exchange" - # attribute. - # - # This lets us inherit the UncompressedMX implementation but lets - # the caller use appropriate attribute names for the rdata type. - # - # We probably lose some performance vs. a cut-and-paste - # implementation, but this way we don't copy code, and that's - # good. - - def get_subtype(self): - return self.preference - - def set_subtype(self, subtype): - self.preference = subtype - - subtype = property(get_subtype, set_subtype) - - def get_hostname(self): - return self.exchange - - def set_hostname(self, hostname): - self.exchange = hostname - - hostname = property(get_hostname, set_hostname) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AVC.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AVC.py deleted file mode 100644 index 7f340b3..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/AVC.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.txtbase - - -class AVC(dns.rdtypes.txtbase.TXTBase): - - """AVC record - - @see: U{http://www.iana.org/assignments/dns-parameters/AVC/avc-completed-template}""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CAA.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CAA.py deleted file mode 100644 index 0acf201..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CAA.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer - - -class CAA(dns.rdata.Rdata): - - """CAA (Certification Authority Authorization) record - - @ivar flags: the flags - @type flags: int - @ivar tag: the tag - @type tag: string - @ivar value: the value - @type value: string - @see: RFC 6844""" - - __slots__ = ['flags', 'tag', 'value'] - - def __init__(self, rdclass, rdtype, flags, tag, value): - super(CAA, self).__init__(rdclass, rdtype) - self.flags = flags - self.tag = tag - self.value = value - - def to_text(self, origin=None, relativize=True, **kw): - return '%u %s "%s"' % (self.flags, - dns.rdata._escapify(self.tag), - dns.rdata._escapify(self.value)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - flags = tok.get_uint8() - tag = tok.get_string().encode() - if len(tag) > 255: - raise dns.exception.SyntaxError("tag too long") - if not tag.isalnum(): - raise dns.exception.SyntaxError("tag is not alphanumeric") - value = tok.get_string().encode() - return cls(rdclass, rdtype, flags, tag, value) - - def to_wire(self, file, compress=None, origin=None): - file.write(struct.pack('!B', self.flags)) - l = len(self.tag) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.tag) - file.write(self.value) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (flags, l) = struct.unpack('!BB', wire[current: current + 2]) - current += 2 - tag = wire[current: current + l] - value = wire[current + l:current + rdlen - 2] - return cls(rdclass, rdtype, flags, tag, value) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDNSKEY.py deleted file mode 100644 index 653ae1b..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDNSKEY.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.dnskeybase -from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set - - -__all__ = ['flags_to_text_set', 'flags_from_text_set'] - - -class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - - """CDNSKEY record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDS.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDS.py deleted file mode 100644 index a63041d..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CDS.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.dsbase - - -class CDS(dns.rdtypes.dsbase.DSBase): - - """CDS record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CERT.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CERT.py deleted file mode 100644 index eea27b5..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CERT.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import base64 - -import dns.exception -import dns.dnssec -import dns.rdata -import dns.tokenizer - -_ctype_by_value = { - 1: 'PKIX', - 2: 'SPKI', - 3: 'PGP', - 253: 'URI', - 254: 'OID', -} - -_ctype_by_name = { - 'PKIX': 1, - 'SPKI': 2, - 'PGP': 3, - 'URI': 253, - 'OID': 254, -} - - -def _ctype_from_text(what): - v = _ctype_by_name.get(what) - if v is not None: - return v - return int(what) - - -def _ctype_to_text(what): - v = _ctype_by_value.get(what) - if v is not None: - return v - return str(what) - - -class CERT(dns.rdata.Rdata): - - """CERT record - - @ivar certificate_type: certificate type - @type certificate_type: int - @ivar key_tag: key tag - @type key_tag: int - @ivar algorithm: algorithm - @type algorithm: int - @ivar certificate: the certificate or CRL - @type certificate: string - @see: RFC 2538""" - - __slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate'] - - def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm, - certificate): - super(CERT, self).__init__(rdclass, rdtype) - self.certificate_type = certificate_type - self.key_tag = key_tag - self.algorithm = algorithm - self.certificate = certificate - - def to_text(self, origin=None, relativize=True, **kw): - certificate_type = _ctype_to_text(self.certificate_type) - return "%s %d %s %s" % (certificate_type, self.key_tag, - dns.dnssec.algorithm_to_text(self.algorithm), - dns.rdata._base64ify(self.certificate)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - certificate_type = _ctype_from_text(tok.get_string()) - key_tag = tok.get_uint16() - algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) - if algorithm < 0 or algorithm > 255: - raise dns.exception.SyntaxError("bad algorithm type") - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - certificate = base64.b64decode(b64) - return cls(rdclass, rdtype, certificate_type, key_tag, - algorithm, certificate) - - def to_wire(self, file, compress=None, origin=None): - prefix = struct.pack("!HHB", self.certificate_type, self.key_tag, - self.algorithm) - file.write(prefix) - file.write(self.certificate) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - prefix = wire[current: current + 5].unwrap() - current += 5 - rdlen -= 5 - if rdlen < 0: - raise dns.exception.FormError - (certificate_type, key_tag, algorithm) = struct.unpack("!HHB", prefix) - certificate = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, - certificate) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CNAME.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CNAME.py deleted file mode 100644 index 11d42aa..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CNAME.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.nsbase - - -class CNAME(dns.rdtypes.nsbase.NSBase): - - """CNAME record - - Note: although CNAME is officially a singleton type, dnspython allows - non-singleton CNAME rdatasets because such sets have been commonly - used by BIND and other nameservers for load balancing.""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CSYNC.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CSYNC.py deleted file mode 100644 index 06292fb..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/CSYNC.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.rdatatype -import dns.name -from dns._compat import xrange - -class CSYNC(dns.rdata.Rdata): - - """CSYNC record - - @ivar serial: the SOA serial number - @type serial: int - @ivar flags: the CSYNC flags - @type flags: int - @ivar windows: the windowed bitmap list - @type windows: list of (window number, string) tuples""" - - __slots__ = ['serial', 'flags', 'windows'] - - def __init__(self, rdclass, rdtype, serial, flags, windows): - super(CSYNC, self).__init__(rdclass, rdtype) - self.serial = serial - self.flags = flags - self.windows = windows - - def to_text(self, origin=None, relativize=True, **kw): - text = '' - for (window, bitmap) in self.windows: - bits = [] - for i in xrange(0, len(bitmap)): - byte = bitmap[i] - for j in xrange(0, 8): - if byte & (0x80 >> j): - bits.append(dns.rdatatype.to_text(window * 256 + - i * 8 + j)) - text += (' ' + ' '.join(bits)) - return '%d %d%s' % (self.serial, self.flags, text) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - serial = tok.get_uint32() - flags = tok.get_uint16() - rdtypes = [] - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - nrdtype = dns.rdatatype.from_text(token.value) - if nrdtype == 0: - raise dns.exception.SyntaxError("CSYNC with bit 0") - if nrdtype > 65535: - raise dns.exception.SyntaxError("CSYNC with bit > 65535") - rdtypes.append(nrdtype) - rdtypes.sort() - window = 0 - octets = 0 - prior_rdtype = 0 - bitmap = bytearray(b'\0' * 32) - windows = [] - for nrdtype in rdtypes: - if nrdtype == prior_rdtype: - continue - prior_rdtype = nrdtype - new_window = nrdtype // 256 - if new_window != window: - windows.append((window, bitmap[0:octets])) - bitmap = bytearray(b'\0' * 32) - window = new_window - offset = nrdtype % 256 - byte = offset // 8 - bit = offset % 8 - octets = byte + 1 - bitmap[byte] = bitmap[byte] | (0x80 >> bit) - - windows.append((window, bitmap[0:octets])) - return cls(rdclass, rdtype, serial, flags, windows) - - def to_wire(self, file, compress=None, origin=None): - file.write(struct.pack('!IH', self.serial, self.flags)) - for (window, bitmap) in self.windows: - file.write(struct.pack('!BB', window, len(bitmap))) - file.write(bitmap) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - if rdlen < 6: - raise dns.exception.FormError("CSYNC too short") - (serial, flags) = struct.unpack("!IH", wire[current: current + 6]) - current += 6 - rdlen -= 6 - windows = [] - while rdlen > 0: - if rdlen < 3: - raise dns.exception.FormError("CSYNC too short") - window = wire[current] - octets = wire[current + 1] - if octets == 0 or octets > 32: - raise dns.exception.FormError("bad CSYNC octets") - current += 2 - rdlen -= 2 - if rdlen < octets: - raise dns.exception.FormError("bad CSYNC bitmap length") - bitmap = bytearray(wire[current: current + octets].unwrap()) - current += octets - rdlen -= octets - windows.append((window, bitmap)) - return cls(rdclass, rdtype, serial, flags, windows) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DLV.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DLV.py deleted file mode 100644 index 1635212..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DLV.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.dsbase - - -class DLV(dns.rdtypes.dsbase.DSBase): - - """DLV record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNAME.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNAME.py deleted file mode 100644 index 2499283..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNAME.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.nsbase - - -class DNAME(dns.rdtypes.nsbase.UncompressedNS): - - """DNAME record""" - - def to_digestable(self, origin=None): - return self.target.to_digestable(origin) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNSKEY.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNSKEY.py deleted file mode 100644 index e36f7bc..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DNSKEY.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.dnskeybase -from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set - - -__all__ = ['flags_to_text_set', 'flags_from_text_set'] - - -class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - - """DNSKEY record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DS.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DS.py deleted file mode 100644 index 7d457b2..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/DS.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.dsbase - - -class DS(dns.rdtypes.dsbase.DSBase): - - """DS record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI48.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI48.py deleted file mode 100644 index aa260e2..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI48.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.euibase - - -class EUI48(dns.rdtypes.euibase.EUIBase): - - """EUI48 record - - @ivar fingerprint: 48-bit Extended Unique Identifier (EUI-48) - @type fingerprint: string - @see: rfc7043.txt""" - - byte_len = 6 # 0123456789ab (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI64.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI64.py deleted file mode 100644 index 5eba350..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/EUI64.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.euibase - - -class EUI64(dns.rdtypes.euibase.EUIBase): - - """EUI64 record - - @ivar fingerprint: 64-bit Extended Unique Identifier (EUI-64) - @type fingerprint: string - @see: rfc7043.txt""" - - byte_len = 8 # 0123456789abcdef (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/GPOS.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/GPOS.py deleted file mode 100644 index 422822f..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/GPOS.py +++ /dev/null @@ -1,162 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer -from dns._compat import long, text_type - - -def _validate_float_string(what): - if what[0] == b'-'[0] or what[0] == b'+'[0]: - what = what[1:] - if what.isdigit(): - return - (left, right) = what.split(b'.') - if left == b'' and right == b'': - raise dns.exception.FormError - if not left == b'' and not left.decode().isdigit(): - raise dns.exception.FormError - if not right == b'' and not right.decode().isdigit(): - raise dns.exception.FormError - - -def _sanitize(value): - if isinstance(value, text_type): - return value.encode() - return value - - -class GPOS(dns.rdata.Rdata): - - """GPOS record - - @ivar latitude: latitude - @type latitude: string - @ivar longitude: longitude - @type longitude: string - @ivar altitude: altitude - @type altitude: string - @see: RFC 1712""" - - __slots__ = ['latitude', 'longitude', 'altitude'] - - def __init__(self, rdclass, rdtype, latitude, longitude, altitude): - super(GPOS, self).__init__(rdclass, rdtype) - if isinstance(latitude, float) or \ - isinstance(latitude, int) or \ - isinstance(latitude, long): - latitude = str(latitude) - if isinstance(longitude, float) or \ - isinstance(longitude, int) or \ - isinstance(longitude, long): - longitude = str(longitude) - if isinstance(altitude, float) or \ - isinstance(altitude, int) or \ - isinstance(altitude, long): - altitude = str(altitude) - latitude = _sanitize(latitude) - longitude = _sanitize(longitude) - altitude = _sanitize(altitude) - _validate_float_string(latitude) - _validate_float_string(longitude) - _validate_float_string(altitude) - self.latitude = latitude - self.longitude = longitude - self.altitude = altitude - - def to_text(self, origin=None, relativize=True, **kw): - return '{} {} {}'.format(self.latitude.decode(), - self.longitude.decode(), - self.altitude.decode()) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - latitude = tok.get_string() - longitude = tok.get_string() - altitude = tok.get_string() - tok.get_eol() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.latitude) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.latitude) - l = len(self.longitude) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.longitude) - l = len(self.altitude) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.altitude) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen: - raise dns.exception.FormError - latitude = wire[current: current + l].unwrap() - current += l - rdlen -= l - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen: - raise dns.exception.FormError - longitude = wire[current: current + l].unwrap() - current += l - rdlen -= l - l = wire[current] - current += 1 - rdlen -= 1 - if l != rdlen: - raise dns.exception.FormError - altitude = wire[current: current + l].unwrap() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - def _get_float_latitude(self): - return float(self.latitude) - - def _set_float_latitude(self, value): - self.latitude = str(value) - - float_latitude = property(_get_float_latitude, _set_float_latitude, - doc="latitude as a floating point value") - - def _get_float_longitude(self): - return float(self.longitude) - - def _set_float_longitude(self, value): - self.longitude = str(value) - - float_longitude = property(_get_float_longitude, _set_float_longitude, - doc="longitude as a floating point value") - - def _get_float_altitude(self): - return float(self.altitude) - - def _set_float_altitude(self, value): - self.altitude = str(value) - - float_altitude = property(_get_float_altitude, _set_float_altitude, - doc="altitude as a floating point value") diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HINFO.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HINFO.py deleted file mode 100644 index e4e0b34..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HINFO.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer -from dns._compat import text_type - - -class HINFO(dns.rdata.Rdata): - - """HINFO record - - @ivar cpu: the CPU type - @type cpu: string - @ivar os: the OS type - @type os: string - @see: RFC 1035""" - - __slots__ = ['cpu', 'os'] - - def __init__(self, rdclass, rdtype, cpu, os): - super(HINFO, self).__init__(rdclass, rdtype) - if isinstance(cpu, text_type): - self.cpu = cpu.encode() - else: - self.cpu = cpu - if isinstance(os, text_type): - self.os = os.encode() - else: - self.os = os - - def to_text(self, origin=None, relativize=True, **kw): - return '"{}" "{}"'.format(dns.rdata._escapify(self.cpu), - dns.rdata._escapify(self.os)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - cpu = tok.get_string() - os = tok.get_string() - tok.get_eol() - return cls(rdclass, rdtype, cpu, os) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.cpu) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.cpu) - l = len(self.os) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.os) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen: - raise dns.exception.FormError - cpu = wire[current:current + l].unwrap() - current += l - rdlen -= l - l = wire[current] - current += 1 - rdlen -= 1 - if l != rdlen: - raise dns.exception.FormError - os = wire[current: current + l].unwrap() - return cls(rdclass, rdtype, cpu, os) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HIP.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HIP.py deleted file mode 100644 index 7c876b2..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/HIP.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import base64 -import binascii - -import dns.exception -import dns.rdata -import dns.rdatatype - - -class HIP(dns.rdata.Rdata): - - """HIP record - - @ivar hit: the host identity tag - @type hit: string - @ivar algorithm: the public key cryptographic algorithm - @type algorithm: int - @ivar key: the public key - @type key: string - @ivar servers: the rendezvous servers - @type servers: list of dns.name.Name objects - @see: RFC 5205""" - - __slots__ = ['hit', 'algorithm', 'key', 'servers'] - - def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): - super(HIP, self).__init__(rdclass, rdtype) - self.hit = hit - self.algorithm = algorithm - self.key = key - self.servers = servers - - def to_text(self, origin=None, relativize=True, **kw): - hit = binascii.hexlify(self.hit).decode() - key = base64.b64encode(self.key).replace(b'\n', b'').decode() - text = u'' - servers = [] - for server in self.servers: - servers.append(server.choose_relativity(origin, relativize)) - if len(servers) > 0: - text += (u' ' + u' '.join((x.to_unicode() for x in servers))) - return u'%u %s %s%s' % (self.algorithm, hit, key, text) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - algorithm = tok.get_uint8() - hit = binascii.unhexlify(tok.get_string().encode()) - if len(hit) > 255: - raise dns.exception.SyntaxError("HIT too long") - key = base64.b64decode(tok.get_string().encode()) - servers = [] - while 1: - token = tok.get() - if token.is_eol_or_eof(): - break - server = dns.name.from_text(token.value, origin) - server.choose_relativity(origin, relativize) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) - - def to_wire(self, file, compress=None, origin=None): - lh = len(self.hit) - lk = len(self.key) - file.write(struct.pack("!BBH", lh, self.algorithm, lk)) - file.write(self.hit) - file.write(self.key) - for server in self.servers: - server.to_wire(file, None, origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (lh, algorithm, lk) = struct.unpack('!BBH', - wire[current: current + 4]) - current += 4 - rdlen -= 4 - hit = wire[current: current + lh].unwrap() - current += lh - rdlen -= lh - key = wire[current: current + lk].unwrap() - current += lk - rdlen -= lk - servers = [] - while rdlen > 0: - (server, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - current += cused - rdlen -= cused - if origin is not None: - server = server.relativize(origin) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) - - def choose_relativity(self, origin=None, relativize=True): - servers = [] - for server in self.servers: - server = server.choose_relativity(origin, relativize) - servers.append(server) - self.servers = servers diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/ISDN.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/ISDN.py deleted file mode 100644 index f5f5f8b..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/ISDN.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer -from dns._compat import text_type - - -class ISDN(dns.rdata.Rdata): - - """ISDN record - - @ivar address: the ISDN address - @type address: string - @ivar subaddress: the ISDN subaddress (or '' if not present) - @type subaddress: string - @see: RFC 1183""" - - __slots__ = ['address', 'subaddress'] - - def __init__(self, rdclass, rdtype, address, subaddress): - super(ISDN, self).__init__(rdclass, rdtype) - if isinstance(address, text_type): - self.address = address.encode() - else: - self.address = address - if isinstance(address, text_type): - self.subaddress = subaddress.encode() - else: - self.subaddress = subaddress - - def to_text(self, origin=None, relativize=True, **kw): - if self.subaddress: - return '"{}" "{}"'.format(dns.rdata._escapify(self.address), - dns.rdata._escapify(self.subaddress)) - else: - return '"%s"' % dns.rdata._escapify(self.address) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_string() - t = tok.get() - if not t.is_eol_or_eof(): - tok.unget(t) - subaddress = tok.get_string() - else: - tok.unget(t) - subaddress = '' - tok.get_eol() - return cls(rdclass, rdtype, address, subaddress) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.address) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.address) - l = len(self.subaddress) - if l > 0: - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.subaddress) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen: - raise dns.exception.FormError - address = wire[current: current + l].unwrap() - current += l - rdlen -= l - if rdlen > 0: - l = wire[current] - current += 1 - rdlen -= 1 - if l != rdlen: - raise dns.exception.FormError - subaddress = wire[current: current + l].unwrap() - else: - subaddress = '' - return cls(rdclass, rdtype, address, subaddress) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/LOC.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/LOC.py deleted file mode 100644 index da9bb03..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/LOC.py +++ /dev/null @@ -1,327 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import division - -import struct - -import dns.exception -import dns.rdata -from dns._compat import long, xrange, round_py2_compat - - -_pows = tuple(long(10**i) for i in range(0, 11)) - -# default values are in centimeters -_default_size = 100.0 -_default_hprec = 1000000.0 -_default_vprec = 1000.0 - - -def _exponent_of(what, desc): - if what == 0: - return 0 - exp = None - for i in xrange(len(_pows)): - if what // _pows[i] == long(0): - exp = i - 1 - break - if exp is None or exp < 0: - raise dns.exception.SyntaxError("%s value out of bounds" % desc) - return exp - - -def _float_to_tuple(what): - if what < 0: - sign = -1 - what *= -1 - else: - sign = 1 - what = round_py2_compat(what * 3600000) - degrees = int(what // 3600000) - what -= degrees * 3600000 - minutes = int(what // 60000) - what -= minutes * 60000 - seconds = int(what // 1000) - what -= int(seconds * 1000) - what = int(what) - return (degrees, minutes, seconds, what, sign) - - -def _tuple_to_float(what): - value = float(what[0]) - value += float(what[1]) / 60.0 - value += float(what[2]) / 3600.0 - value += float(what[3]) / 3600000.0 - return float(what[4]) * value - - -def _encode_size(what, desc): - what = long(what) - exponent = _exponent_of(what, desc) & 0xF - base = what // pow(10, exponent) & 0xF - return base * 16 + exponent - - -def _decode_size(what, desc): - exponent = what & 0x0F - if exponent > 9: - raise dns.exception.SyntaxError("bad %s exponent" % desc) - base = (what & 0xF0) >> 4 - if base > 9: - raise dns.exception.SyntaxError("bad %s base" % desc) - return long(base) * pow(10, exponent) - - -class LOC(dns.rdata.Rdata): - - """LOC record - - @ivar latitude: latitude - @type latitude: (int, int, int, int, sign) tuple specifying the degrees, minutes, - seconds, milliseconds, and sign of the coordinate. - @ivar longitude: longitude - @type longitude: (int, int, int, int, sign) tuple specifying the degrees, - minutes, seconds, milliseconds, and sign of the coordinate. - @ivar altitude: altitude - @type altitude: float - @ivar size: size of the sphere - @type size: float - @ivar horizontal_precision: horizontal precision - @type horizontal_precision: float - @ivar vertical_precision: vertical precision - @type vertical_precision: float - @see: RFC 1876""" - - __slots__ = ['latitude', 'longitude', 'altitude', 'size', - 'horizontal_precision', 'vertical_precision'] - - def __init__(self, rdclass, rdtype, latitude, longitude, altitude, - size=_default_size, hprec=_default_hprec, - vprec=_default_vprec): - """Initialize a LOC record instance. - - The parameters I{latitude} and I{longitude} may be either a 4-tuple - of integers specifying (degrees, minutes, seconds, milliseconds), - or they may be floating point values specifying the number of - degrees. The other parameters are floats. Size, horizontal precision, - and vertical precision are specified in centimeters.""" - - super(LOC, self).__init__(rdclass, rdtype) - if isinstance(latitude, int) or isinstance(latitude, long): - latitude = float(latitude) - if isinstance(latitude, float): - latitude = _float_to_tuple(latitude) - self.latitude = latitude - if isinstance(longitude, int) or isinstance(longitude, long): - longitude = float(longitude) - if isinstance(longitude, float): - longitude = _float_to_tuple(longitude) - self.longitude = longitude - self.altitude = float(altitude) - self.size = float(size) - self.horizontal_precision = float(hprec) - self.vertical_precision = float(vprec) - - def to_text(self, origin=None, relativize=True, **kw): - if self.latitude[4] > 0: - lat_hemisphere = 'N' - else: - lat_hemisphere = 'S' - if self.longitude[4] > 0: - long_hemisphere = 'E' - else: - long_hemisphere = 'W' - text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( - self.latitude[0], self.latitude[1], - self.latitude[2], self.latitude[3], lat_hemisphere, - self.longitude[0], self.longitude[1], self.longitude[2], - self.longitude[3], long_hemisphere, - self.altitude / 100.0 - ) - - # do not print default values - if self.size != _default_size or \ - self.horizontal_precision != _default_hprec or \ - self.vertical_precision != _default_vprec: - text += " {:0.2f}m {:0.2f}m {:0.2f}m".format( - self.size / 100.0, self.horizontal_precision / 100.0, - self.vertical_precision / 100.0 - ) - return text - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - latitude = [0, 0, 0, 0, 1] - longitude = [0, 0, 0, 0, 1] - size = _default_size - hprec = _default_hprec - vprec = _default_vprec - - latitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - latitude[1] = int(t) - t = tok.get_string() - if '.' in t: - (seconds, milliseconds) = t.split('.') - if not seconds.isdigit(): - raise dns.exception.SyntaxError( - 'bad latitude seconds value') - latitude[2] = int(seconds) - if latitude[2] >= 60: - raise dns.exception.SyntaxError('latitude seconds >= 60') - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError( - 'bad latitude milliseconds value') - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - latitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - latitude[2] = int(t) - t = tok.get_string() - if t == 'S': - latitude[4] = -1 - elif t != 'N': - raise dns.exception.SyntaxError('bad latitude hemisphere value') - - longitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - longitude[1] = int(t) - t = tok.get_string() - if '.' in t: - (seconds, milliseconds) = t.split('.') - if not seconds.isdigit(): - raise dns.exception.SyntaxError( - 'bad longitude seconds value') - longitude[2] = int(seconds) - if longitude[2] >= 60: - raise dns.exception.SyntaxError('longitude seconds >= 60') - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError( - 'bad longitude milliseconds value') - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - longitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - longitude[2] = int(t) - t = tok.get_string() - if t == 'W': - longitude[4] = -1 - elif t != 'E': - raise dns.exception.SyntaxError('bad longitude hemisphere value') - - t = tok.get_string() - if t[-1] == 'm': - t = t[0: -1] - altitude = float(t) * 100.0 # m -> cm - - token = tok.get().unescape() - if not token.is_eol_or_eof(): - value = token.value - if value[-1] == 'm': - value = value[0: -1] - size = float(value) * 100.0 # m -> cm - token = tok.get().unescape() - if not token.is_eol_or_eof(): - value = token.value - if value[-1] == 'm': - value = value[0: -1] - hprec = float(value) * 100.0 # m -> cm - token = tok.get().unescape() - if not token.is_eol_or_eof(): - value = token.value - if value[-1] == 'm': - value = value[0: -1] - vprec = float(value) * 100.0 # m -> cm - tok.get_eol() - - return cls(rdclass, rdtype, latitude, longitude, altitude, - size, hprec, vprec) - - def to_wire(self, file, compress=None, origin=None): - milliseconds = (self.latitude[0] * 3600000 + - self.latitude[1] * 60000 + - self.latitude[2] * 1000 + - self.latitude[3]) * self.latitude[4] - latitude = long(0x80000000) + milliseconds - milliseconds = (self.longitude[0] * 3600000 + - self.longitude[1] * 60000 + - self.longitude[2] * 1000 + - self.longitude[3]) * self.longitude[4] - longitude = long(0x80000000) + milliseconds - altitude = long(self.altitude) + long(10000000) - size = _encode_size(self.size, "size") - hprec = _encode_size(self.horizontal_precision, "horizontal precision") - vprec = _encode_size(self.vertical_precision, "vertical precision") - wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude, - longitude, altitude) - file.write(wire) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (version, size, hprec, vprec, latitude, longitude, altitude) = \ - struct.unpack("!BBBBIII", wire[current: current + rdlen]) - if latitude > long(0x80000000): - latitude = float(latitude - long(0x80000000)) / 3600000 - else: - latitude = -1 * float(long(0x80000000) - latitude) / 3600000 - if latitude < -90.0 or latitude > 90.0: - raise dns.exception.FormError("bad latitude") - if longitude > long(0x80000000): - longitude = float(longitude - long(0x80000000)) / 3600000 - else: - longitude = -1 * float(long(0x80000000) - longitude) / 3600000 - if longitude < -180.0 or longitude > 180.0: - raise dns.exception.FormError("bad longitude") - altitude = float(altitude) - 10000000.0 - size = _decode_size(size, "size") - hprec = _decode_size(hprec, "horizontal precision") - vprec = _decode_size(vprec, "vertical precision") - return cls(rdclass, rdtype, latitude, longitude, altitude, - size, hprec, vprec) - - def _get_float_latitude(self): - return _tuple_to_float(self.latitude) - - def _set_float_latitude(self, value): - self.latitude = _float_to_tuple(value) - - float_latitude = property(_get_float_latitude, _set_float_latitude, - doc="latitude as a floating point value") - - def _get_float_longitude(self): - return _tuple_to_float(self.longitude) - - def _set_float_longitude(self, value): - self.longitude = _float_to_tuple(value) - - float_longitude = property(_get_float_longitude, _set_float_longitude, - doc="longitude as a floating point value") diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/MX.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/MX.py deleted file mode 100644 index 0a06494..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/MX.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.mxbase - - -class MX(dns.rdtypes.mxbase.MXBase): - - """MX record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NS.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NS.py deleted file mode 100644 index f9fcf63..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NS.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.nsbase - - -class NS(dns.rdtypes.nsbase.NSBase): - - """NS record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC.py deleted file mode 100644 index 4e3da72..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.rdatatype -import dns.name -from dns._compat import xrange - - -class NSEC(dns.rdata.Rdata): - - """NSEC record - - @ivar next: the next name - @type next: dns.name.Name object - @ivar windows: the windowed bitmap list - @type windows: list of (window number, string) tuples""" - - __slots__ = ['next', 'windows'] - - def __init__(self, rdclass, rdtype, next, windows): - super(NSEC, self).__init__(rdclass, rdtype) - self.next = next - self.windows = windows - - def to_text(self, origin=None, relativize=True, **kw): - next = self.next.choose_relativity(origin, relativize) - text = '' - for (window, bitmap) in self.windows: - bits = [] - for i in xrange(0, len(bitmap)): - byte = bitmap[i] - for j in xrange(0, 8): - if byte & (0x80 >> j): - bits.append(dns.rdatatype.to_text(window * 256 + - i * 8 + j)) - text += (' ' + ' '.join(bits)) - return '{}{}'.format(next, text) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - next = tok.get_name() - next = next.choose_relativity(origin, relativize) - rdtypes = [] - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - nrdtype = dns.rdatatype.from_text(token.value) - if nrdtype == 0: - raise dns.exception.SyntaxError("NSEC with bit 0") - if nrdtype > 65535: - raise dns.exception.SyntaxError("NSEC with bit > 65535") - rdtypes.append(nrdtype) - rdtypes.sort() - window = 0 - octets = 0 - prior_rdtype = 0 - bitmap = bytearray(b'\0' * 32) - windows = [] - for nrdtype in rdtypes: - if nrdtype == prior_rdtype: - continue - prior_rdtype = nrdtype - new_window = nrdtype // 256 - if new_window != window: - windows.append((window, bitmap[0:octets])) - bitmap = bytearray(b'\0' * 32) - window = new_window - offset = nrdtype % 256 - byte = offset // 8 - bit = offset % 8 - octets = byte + 1 - bitmap[byte] = bitmap[byte] | (0x80 >> bit) - - windows.append((window, bitmap[0:octets])) - return cls(rdclass, rdtype, next, windows) - - def to_wire(self, file, compress=None, origin=None): - self.next.to_wire(file, None, origin) - for (window, bitmap) in self.windows: - file.write(struct.pack('!BB', window, len(bitmap))) - file.write(bitmap) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (next, cused) = dns.name.from_wire(wire[: current + rdlen], current) - current += cused - rdlen -= cused - windows = [] - while rdlen > 0: - if rdlen < 3: - raise dns.exception.FormError("NSEC too short") - window = wire[current] - octets = wire[current + 1] - if octets == 0 or octets > 32: - raise dns.exception.FormError("bad NSEC octets") - current += 2 - rdlen -= 2 - if rdlen < octets: - raise dns.exception.FormError("bad NSEC bitmap length") - bitmap = bytearray(wire[current: current + octets].unwrap()) - current += octets - rdlen -= octets - windows.append((window, bitmap)) - if origin is not None: - next = next.relativize(origin) - return cls(rdclass, rdtype, next, windows) - - def choose_relativity(self, origin=None, relativize=True): - self.next = self.next.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3.py deleted file mode 100644 index 1c281c4..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import binascii -import string -import struct - -import dns.exception -import dns.rdata -import dns.rdatatype -from dns._compat import xrange, text_type, PY3 - -# pylint: disable=deprecated-string-function -if PY3: - b32_hex_to_normal = bytes.maketrans(b'0123456789ABCDEFGHIJKLMNOPQRSTUV', - b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567') - b32_normal_to_hex = bytes.maketrans(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', - b'0123456789ABCDEFGHIJKLMNOPQRSTUV') -else: - b32_hex_to_normal = string.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUV', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567') - b32_normal_to_hex = string.maketrans('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', - '0123456789ABCDEFGHIJKLMNOPQRSTUV') -# pylint: enable=deprecated-string-function - - -# hash algorithm constants -SHA1 = 1 - -# flag constants -OPTOUT = 1 - - -class NSEC3(dns.rdata.Rdata): - - """NSEC3 record - - @ivar algorithm: the hash algorithm number - @type algorithm: int - @ivar flags: the flags - @type flags: int - @ivar iterations: the number of iterations - @type iterations: int - @ivar salt: the salt - @type salt: string - @ivar next: the next name hash - @type next: string - @ivar windows: the windowed bitmap list - @type windows: list of (window number, string) tuples""" - - __slots__ = ['algorithm', 'flags', 'iterations', 'salt', 'next', 'windows'] - - def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt, - next, windows): - super(NSEC3, self).__init__(rdclass, rdtype) - self.algorithm = algorithm - self.flags = flags - self.iterations = iterations - if isinstance(salt, text_type): - self.salt = salt.encode() - else: - self.salt = salt - self.next = next - self.windows = windows - - def to_text(self, origin=None, relativize=True, **kw): - next = base64.b32encode(self.next).translate( - b32_normal_to_hex).lower().decode() - if self.salt == b'': - salt = '-' - else: - salt = binascii.hexlify(self.salt).decode() - text = u'' - for (window, bitmap) in self.windows: - bits = [] - for i in xrange(0, len(bitmap)): - byte = bitmap[i] - for j in xrange(0, 8): - if byte & (0x80 >> j): - bits.append(dns.rdatatype.to_text(window * 256 + - i * 8 + j)) - text += (u' ' + u' '.join(bits)) - return u'%u %u %u %s %s%s' % (self.algorithm, self.flags, - self.iterations, salt, next, text) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == u'-': - salt = b'' - else: - salt = binascii.unhexlify(salt.encode('ascii')) - next = tok.get_string().encode( - 'ascii').upper().translate(b32_hex_to_normal) - next = base64.b32decode(next) - rdtypes = [] - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - nrdtype = dns.rdatatype.from_text(token.value) - if nrdtype == 0: - raise dns.exception.SyntaxError("NSEC3 with bit 0") - if nrdtype > 65535: - raise dns.exception.SyntaxError("NSEC3 with bit > 65535") - rdtypes.append(nrdtype) - rdtypes.sort() - window = 0 - octets = 0 - prior_rdtype = 0 - bitmap = bytearray(b'\0' * 32) - windows = [] - for nrdtype in rdtypes: - if nrdtype == prior_rdtype: - continue - prior_rdtype = nrdtype - new_window = nrdtype // 256 - if new_window != window: - if octets != 0: - windows.append((window, bitmap[0:octets])) - bitmap = bytearray(b'\0' * 32) - window = new_window - offset = nrdtype % 256 - byte = offset // 8 - bit = offset % 8 - octets = byte + 1 - bitmap[byte] = bitmap[byte] | (0x80 >> bit) - if octets != 0: - windows.append((window, bitmap[0:octets])) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, - windows) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, - self.iterations, l)) - file.write(self.salt) - l = len(self.next) - file.write(struct.pack("!B", l)) - file.write(self.next) - for (window, bitmap) in self.windows: - file.write(struct.pack("!BB", window, len(bitmap))) - file.write(bitmap) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (algorithm, flags, iterations, slen) = \ - struct.unpack('!BBHB', wire[current: current + 5]) - - current += 5 - rdlen -= 5 - salt = wire[current: current + slen].unwrap() - current += slen - rdlen -= slen - nlen = wire[current] - current += 1 - rdlen -= 1 - next = wire[current: current + nlen].unwrap() - current += nlen - rdlen -= nlen - windows = [] - while rdlen > 0: - if rdlen < 3: - raise dns.exception.FormError("NSEC3 too short") - window = wire[current] - octets = wire[current + 1] - if octets == 0 or octets > 32: - raise dns.exception.FormError("bad NSEC3 octets") - current += 2 - rdlen -= 2 - if rdlen < octets: - raise dns.exception.FormError("bad NSEC3 bitmap length") - bitmap = bytearray(wire[current: current + octets].unwrap()) - current += octets - rdlen -= octets - windows.append((window, bitmap)) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, - windows) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py deleted file mode 100644 index 87c36e5..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import binascii - -import dns.exception -import dns.rdata -from dns._compat import text_type - - -class NSEC3PARAM(dns.rdata.Rdata): - - """NSEC3PARAM record - - @ivar algorithm: the hash algorithm number - @type algorithm: int - @ivar flags: the flags - @type flags: int - @ivar iterations: the number of iterations - @type iterations: int - @ivar salt: the salt - @type salt: string""" - - __slots__ = ['algorithm', 'flags', 'iterations', 'salt'] - - def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): - super(NSEC3PARAM, self).__init__(rdclass, rdtype) - self.algorithm = algorithm - self.flags = flags - self.iterations = iterations - if isinstance(salt, text_type): - self.salt = salt.encode() - else: - self.salt = salt - - def to_text(self, origin=None, relativize=True, **kw): - if self.salt == b'': - salt = '-' - else: - salt = binascii.hexlify(self.salt).decode() - return '%u %u %u %s' % (self.algorithm, self.flags, self.iterations, - salt) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == '-': - salt = '' - else: - salt = binascii.unhexlify(salt.encode()) - tok.get_eol() - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, - self.iterations, l)) - file.write(self.salt) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (algorithm, flags, iterations, slen) = \ - struct.unpack('!BBHB', - wire[current: current + 5]) - current += 5 - rdlen -= 5 - salt = wire[current: current + slen].unwrap() - current += slen - rdlen -= slen - if rdlen != 0: - raise dns.exception.FormError - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py deleted file mode 100644 index a066cf9..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception -import dns.rdata -import dns.tokenizer - -class OPENPGPKEY(dns.rdata.Rdata): - - """OPENPGPKEY record - - @ivar key: the key - @type key: bytes - @see: RFC 7929 - """ - - def __init__(self, rdclass, rdtype, key): - super(OPENPGPKEY, self).__init__(rdclass, rdtype) - self.key = key - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.key) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - key = base64.b64decode(b64) - return cls(rdclass, rdtype, key) - - def to_wire(self, file, compress=None, origin=None): - file.write(self.key) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - key = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, key) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/PTR.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/PTR.py deleted file mode 100644 index 20cd507..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/PTR.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.nsbase - - -class PTR(dns.rdtypes.nsbase.NSBase): - - """PTR record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RP.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RP.py deleted file mode 100644 index 8f07be9..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RP.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.rdata -import dns.name - - -class RP(dns.rdata.Rdata): - - """RP record - - @ivar mbox: The responsible person's mailbox - @type mbox: dns.name.Name object - @ivar txt: The owner name of a node with TXT records, or the root name - if no TXT records are associated with this RP. - @type txt: dns.name.Name object - @see: RFC 1183""" - - __slots__ = ['mbox', 'txt'] - - def __init__(self, rdclass, rdtype, mbox, txt): - super(RP, self).__init__(rdclass, rdtype) - self.mbox = mbox - self.txt = txt - - def to_text(self, origin=None, relativize=True, **kw): - mbox = self.mbox.choose_relativity(origin, relativize) - txt = self.txt.choose_relativity(origin, relativize) - return "{} {}".format(str(mbox), str(txt)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - mbox = tok.get_name() - txt = tok.get_name() - mbox = mbox.choose_relativity(origin, relativize) - txt = txt.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, mbox, txt) - - def to_wire(self, file, compress=None, origin=None): - self.mbox.to_wire(file, None, origin) - self.txt.to_wire(file, None, origin) - - def to_digestable(self, origin=None): - return self.mbox.to_digestable(origin) + \ - self.txt.to_digestable(origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (mbox, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - current += cused - rdlen -= cused - if rdlen <= 0: - raise dns.exception.FormError - (txt, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - mbox = mbox.relativize(origin) - txt = txt.relativize(origin) - return cls(rdclass, rdtype, mbox, txt) - - def choose_relativity(self, origin=None, relativize=True): - self.mbox = self.mbox.choose_relativity(origin, relativize) - self.txt = self.txt.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RRSIG.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RRSIG.py deleted file mode 100644 index d3756ec..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RRSIG.py +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import calendar -import struct -import time - -import dns.dnssec -import dns.exception -import dns.rdata -import dns.rdatatype - - -class BadSigTime(dns.exception.DNSException): - - """Time in DNS SIG or RRSIG resource record cannot be parsed.""" - - -def sigtime_to_posixtime(what): - if len(what) != 14: - raise BadSigTime - year = int(what[0:4]) - month = int(what[4:6]) - day = int(what[6:8]) - hour = int(what[8:10]) - minute = int(what[10:12]) - second = int(what[12:14]) - return calendar.timegm((year, month, day, hour, minute, second, - 0, 0, 0)) - - -def posixtime_to_sigtime(what): - return time.strftime('%Y%m%d%H%M%S', time.gmtime(what)) - - -class RRSIG(dns.rdata.Rdata): - - """RRSIG record - - @ivar type_covered: the rdata type this signature covers - @type type_covered: int - @ivar algorithm: the algorithm used for the sig - @type algorithm: int - @ivar labels: number of labels - @type labels: int - @ivar original_ttl: the original TTL - @type original_ttl: long - @ivar expiration: signature expiration time - @type expiration: long - @ivar inception: signature inception time - @type inception: long - @ivar key_tag: the key tag - @type key_tag: int - @ivar signer: the signer - @type signer: dns.name.Name object - @ivar signature: the signature - @type signature: string""" - - __slots__ = ['type_covered', 'algorithm', 'labels', 'original_ttl', - 'expiration', 'inception', 'key_tag', 'signer', - 'signature'] - - def __init__(self, rdclass, rdtype, type_covered, algorithm, labels, - original_ttl, expiration, inception, key_tag, signer, - signature): - super(RRSIG, self).__init__(rdclass, rdtype) - self.type_covered = type_covered - self.algorithm = algorithm - self.labels = labels - self.original_ttl = original_ttl - self.expiration = expiration - self.inception = inception - self.key_tag = key_tag - self.signer = signer - self.signature = signature - - def covers(self): - return self.type_covered - - def to_text(self, origin=None, relativize=True, **kw): - return '%s %d %d %d %s %s %d %s %s' % ( - dns.rdatatype.to_text(self.type_covered), - self.algorithm, - self.labels, - self.original_ttl, - posixtime_to_sigtime(self.expiration), - posixtime_to_sigtime(self.inception), - self.key_tag, - self.signer.choose_relativity(origin, relativize), - dns.rdata._base64ify(self.signature) - ) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - type_covered = dns.rdatatype.from_text(tok.get_string()) - algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) - labels = tok.get_int() - original_ttl = tok.get_ttl() - expiration = sigtime_to_posixtime(tok.get_string()) - inception = sigtime_to_posixtime(tok.get_string()) - key_tag = tok.get_int() - signer = tok.get_name() - signer = signer.choose_relativity(origin, relativize) - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - signature = base64.b64decode(b64) - return cls(rdclass, rdtype, type_covered, algorithm, labels, - original_ttl, expiration, inception, key_tag, signer, - signature) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack('!HBBIIIH', self.type_covered, - self.algorithm, self.labels, - self.original_ttl, self.expiration, - self.inception, self.key_tag) - file.write(header) - self.signer.to_wire(file, None, origin) - file.write(self.signature) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - header = struct.unpack('!HBBIIIH', wire[current: current + 18]) - current += 18 - rdlen -= 18 - (signer, cused) = dns.name.from_wire(wire[: current + rdlen], current) - current += cused - rdlen -= cused - if origin is not None: - signer = signer.relativize(origin) - signature = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], header[1], header[2], - header[3], header[4], header[5], header[6], signer, - signature) - - def choose_relativity(self, origin=None, relativize=True): - self.signer = self.signer.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RT.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RT.py deleted file mode 100644 index d0feb79..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/RT.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.mxbase - - -class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): - - """RT record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SOA.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SOA.py deleted file mode 100644 index aec81ca..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SOA.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.name - - -class SOA(dns.rdata.Rdata): - - """SOA record - - @ivar mname: the SOA MNAME (master name) field - @type mname: dns.name.Name object - @ivar rname: the SOA RNAME (responsible name) field - @type rname: dns.name.Name object - @ivar serial: The zone's serial number - @type serial: int - @ivar refresh: The zone's refresh value (in seconds) - @type refresh: int - @ivar retry: The zone's retry value (in seconds) - @type retry: int - @ivar expire: The zone's expiration value (in seconds) - @type expire: int - @ivar minimum: The zone's negative caching time (in seconds, called - "minimum" for historical reasons) - @type minimum: int - @see: RFC 1035""" - - __slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', - 'minimum'] - - def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry, - expire, minimum): - super(SOA, self).__init__(rdclass, rdtype) - self.mname = mname - self.rname = rname - self.serial = serial - self.refresh = refresh - self.retry = retry - self.expire = expire - self.minimum = minimum - - def to_text(self, origin=None, relativize=True, **kw): - mname = self.mname.choose_relativity(origin, relativize) - rname = self.rname.choose_relativity(origin, relativize) - return '%s %s %d %d %d %d %d' % ( - mname, rname, self.serial, self.refresh, self.retry, - self.expire, self.minimum) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - mname = tok.get_name() - rname = tok.get_name() - mname = mname.choose_relativity(origin, relativize) - rname = rname.choose_relativity(origin, relativize) - serial = tok.get_uint32() - refresh = tok.get_ttl() - retry = tok.get_ttl() - expire = tok.get_ttl() - minimum = tok.get_ttl() - tok.get_eol() - return cls(rdclass, rdtype, mname, rname, serial, refresh, retry, - expire, minimum) - - def to_wire(self, file, compress=None, origin=None): - self.mname.to_wire(file, compress, origin) - self.rname.to_wire(file, compress, origin) - five_ints = struct.pack('!IIIII', self.serial, self.refresh, - self.retry, self.expire, self.minimum) - file.write(five_ints) - - def to_digestable(self, origin=None): - return self.mname.to_digestable(origin) + \ - self.rname.to_digestable(origin) + \ - struct.pack('!IIIII', self.serial, self.refresh, - self.retry, self.expire, self.minimum) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (mname, cused) = dns.name.from_wire(wire[: current + rdlen], current) - current += cused - rdlen -= cused - (rname, cused) = dns.name.from_wire(wire[: current + rdlen], current) - current += cused - rdlen -= cused - if rdlen != 20: - raise dns.exception.FormError - five_ints = struct.unpack('!IIIII', - wire[current: current + rdlen]) - if origin is not None: - mname = mname.relativize(origin) - rname = rname.relativize(origin) - return cls(rdclass, rdtype, mname, rname, - five_ints[0], five_ints[1], five_ints[2], five_ints[3], - five_ints[4]) - - def choose_relativity(self, origin=None, relativize=True): - self.mname = self.mname.choose_relativity(origin, relativize) - self.rname = self.rname.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SPF.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SPF.py deleted file mode 100644 index 41dee62..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SPF.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.txtbase - - -class SPF(dns.rdtypes.txtbase.TXTBase): - - """SPF record - - @see: RFC 4408""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SSHFP.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SSHFP.py deleted file mode 100644 index c18311e..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/SSHFP.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import binascii - -import dns.rdata -import dns.rdatatype - - -class SSHFP(dns.rdata.Rdata): - - """SSHFP record - - @ivar algorithm: the algorithm - @type algorithm: int - @ivar fp_type: the digest type - @type fp_type: int - @ivar fingerprint: the fingerprint - @type fingerprint: string - @see: draft-ietf-secsh-dns-05.txt""" - - __slots__ = ['algorithm', 'fp_type', 'fingerprint'] - - def __init__(self, rdclass, rdtype, algorithm, fp_type, - fingerprint): - super(SSHFP, self).__init__(rdclass, rdtype) - self.algorithm = algorithm - self.fp_type = fp_type - self.fingerprint = fingerprint - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d %s' % (self.algorithm, - self.fp_type, - dns.rdata._hexify(self.fingerprint, - chunksize=128)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - algorithm = tok.get_uint8() - fp_type = tok.get_uint8() - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - fingerprint = b''.join(chunks) - fingerprint = binascii.unhexlify(fingerprint) - return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack("!BB", self.algorithm, self.fp_type) - file.write(header) - file.write(self.fingerprint) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - header = struct.unpack("!BB", wire[current: current + 2]) - current += 2 - rdlen -= 2 - fingerprint = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TLSA.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TLSA.py deleted file mode 100644 index a135c2b..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TLSA.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import binascii - -import dns.rdata -import dns.rdatatype - - -class TLSA(dns.rdata.Rdata): - - """TLSA record - - @ivar usage: The certificate usage - @type usage: int - @ivar selector: The selector field - @type selector: int - @ivar mtype: The 'matching type' field - @type mtype: int - @ivar cert: The 'Certificate Association Data' field - @type cert: string - @see: RFC 6698""" - - __slots__ = ['usage', 'selector', 'mtype', 'cert'] - - def __init__(self, rdclass, rdtype, usage, selector, - mtype, cert): - super(TLSA, self).__init__(rdclass, rdtype) - self.usage = usage - self.selector = selector - self.mtype = mtype - self.cert = cert - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d %d %s' % (self.usage, - self.selector, - self.mtype, - dns.rdata._hexify(self.cert, - chunksize=128)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - usage = tok.get_uint8() - selector = tok.get_uint8() - mtype = tok.get_uint8() - cert_chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - cert_chunks.append(t.value.encode()) - cert = b''.join(cert_chunks) - cert = binascii.unhexlify(cert) - return cls(rdclass, rdtype, usage, selector, mtype, cert) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack("!BBB", self.usage, self.selector, self.mtype) - file.write(header) - file.write(self.cert) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - header = struct.unpack("!BBB", wire[current: current + 3]) - current += 3 - rdlen -= 3 - cert = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TXT.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TXT.py deleted file mode 100644 index c5ae919..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/TXT.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.txtbase - - -class TXT(dns.rdtypes.txtbase.TXTBase): - - """TXT record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/URI.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/URI.py deleted file mode 100644 index f5b65ed..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/URI.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# Copyright (C) 2015 Red Hat, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.name -from dns._compat import text_type - - -class URI(dns.rdata.Rdata): - - """URI record - - @ivar priority: the priority - @type priority: int - @ivar weight: the weight - @type weight: int - @ivar target: the target host - @type target: dns.name.Name object - @see: draft-faltstrom-uri-13""" - - __slots__ = ['priority', 'weight', 'target'] - - def __init__(self, rdclass, rdtype, priority, weight, target): - super(URI, self).__init__(rdclass, rdtype) - self.priority = priority - self.weight = weight - if len(target) < 1: - raise dns.exception.SyntaxError("URI target cannot be empty") - if isinstance(target, text_type): - self.target = target.encode() - else: - self.target = target - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d "%s"' % (self.priority, self.weight, - self.target.decode()) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - priority = tok.get_uint16() - weight = tok.get_uint16() - target = tok.get().unescape() - if not (target.is_quoted_string() or target.is_identifier()): - raise dns.exception.SyntaxError("URI target must be a string") - tok.get_eol() - return cls(rdclass, rdtype, priority, weight, target.value) - - def to_wire(self, file, compress=None, origin=None): - two_ints = struct.pack("!HH", self.priority, self.weight) - file.write(two_ints) - file.write(self.target) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - if rdlen < 5: - raise dns.exception.FormError('URI RR is shorter than 5 octets') - - (priority, weight) = struct.unpack('!HH', wire[current: current + 4]) - current += 4 - rdlen -= 4 - target = wire[current: current + rdlen] - current += rdlen - - return cls(rdclass, rdtype, priority, weight, target) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/X25.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/X25.py deleted file mode 100644 index e530a2c..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/X25.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer -from dns._compat import text_type - - -class X25(dns.rdata.Rdata): - - """X25 record - - @ivar address: the PSDN address - @type address: string - @see: RFC 1183""" - - __slots__ = ['address'] - - def __init__(self, rdclass, rdtype, address): - super(X25, self).__init__(rdclass, rdtype) - if isinstance(address, text_type): - self.address = address.encode() - else: - self.address = address - - def to_text(self, origin=None, relativize=True, **kw): - return '"%s"' % dns.rdata._escapify(self.address) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_string() - tok.get_eol() - return cls(rdclass, rdtype, address) - - def to_wire(self, file, compress=None, origin=None): - l = len(self.address) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(self.address) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - l = wire[current] - current += 1 - rdlen -= 1 - if l != rdlen: - raise dns.exception.FormError - address = wire[current: current + l].unwrap() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/__init__.py b/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/__init__.py deleted file mode 100644 index ca41ef8..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/ANY/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class ANY (generic) rdata type classes.""" - -__all__ = [ - 'AFSDB', - 'AVC', - 'CAA', - 'CDNSKEY', - 'CDS', - 'CERT', - 'CNAME', - 'CSYNC', - 'DLV', - 'DNAME', - 'DNSKEY', - 'DS', - 'EUI48', - 'EUI64', - 'GPOS', - 'HINFO', - 'HIP', - 'ISDN', - 'LOC', - 'MX', - 'NS', - 'NSEC', - 'NSEC3', - 'NSEC3PARAM', - 'OPENPGPKEY', - 'PTR', - 'RP', - 'RRSIG', - 'RT', - 'SOA', - 'SPF', - 'SSHFP', - 'TLSA', - 'TXT', - 'URI', - 'X25', -] diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/CH/A.py b/venv/lib/python3.6/site-packages/dns/rdtypes/CH/A.py deleted file mode 100644 index e65d192..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/CH/A.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.mxbase -import struct - -class A(dns.rdtypes.mxbase.MXBase): - - """A record for Chaosnet - @ivar domain: the domain of the address - @type domain: dns.name.Name object - @ivar address: the 16-bit address - @type address: int""" - - __slots__ = ['domain', 'address'] - - def __init__(self, rdclass, rdtype, address, domain): - super(A, self).__init__(rdclass, rdtype, address, domain) - self.domain = domain - self.address = address - - def to_text(self, origin=None, relativize=True, **kw): - domain = self.domain.choose_relativity(origin, relativize) - return '%s %o' % (domain, self.address) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - domain = tok.get_name() - address = tok.get_uint16(base=8) - domain = domain.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, address, domain) - - def to_wire(self, file, compress=None, origin=None): - self.domain.to_wire(file, compress, origin) - pref = struct.pack("!H", self.address) - file.write(pref) - - def to_digestable(self, origin=None): - return self.domain.to_digestable(origin) + \ - struct.pack("!H", self.address) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (domain, cused) = dns.name.from_wire(wire[: current + rdlen-2], - current) - current += cused - (address,) = struct.unpack('!H', wire[current: current + 2]) - if cused+2 != rdlen: - raise dns.exception.FormError - if origin is not None: - domain = domain.relativize(origin) - return cls(rdclass, rdtype, address, domain) - - def choose_relativity(self, origin=None, relativize=True): - self.domain = self.domain.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/CH/__init__.py b/venv/lib/python3.6/site-packages/dns/rdtypes/CH/__init__.py deleted file mode 100644 index 7184a73..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/CH/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class CH rdata type classes.""" - -__all__ = [ - 'A', -] diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/A.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/A.py deleted file mode 100644 index 8998982..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/A.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.ipv4 -import dns.rdata -import dns.tokenizer - - -class A(dns.rdata.Rdata): - - """A record. - - @ivar address: an IPv4 address - @type address: string (in the standard "dotted quad" format)""" - - __slots__ = ['address'] - - def __init__(self, rdclass, rdtype, address): - super(A, self).__init__(rdclass, rdtype) - # check that it's OK - dns.ipv4.inet_aton(address) - self.address = address - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_identifier() - tok.get_eol() - return cls(rdclass, rdtype, address) - - def to_wire(self, file, compress=None, origin=None): - file.write(dns.ipv4.inet_aton(self.address)) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - address = dns.ipv4.inet_ntoa(wire[current: current + rdlen]) - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/AAAA.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/AAAA.py deleted file mode 100644 index a77c5bf..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/AAAA.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.inet -import dns.rdata -import dns.tokenizer - - -class AAAA(dns.rdata.Rdata): - - """AAAA record. - - @ivar address: an IPv6 address - @type address: string (in the standard IPv6 format)""" - - __slots__ = ['address'] - - def __init__(self, rdclass, rdtype, address): - super(AAAA, self).__init__(rdclass, rdtype) - # check that it's OK - dns.inet.inet_pton(dns.inet.AF_INET6, address) - self.address = address - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_identifier() - tok.get_eol() - return cls(rdclass, rdtype, address) - - def to_wire(self, file, compress=None, origin=None): - file.write(dns.inet.inet_pton(dns.inet.AF_INET6, self.address)) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - address = dns.inet.inet_ntop(dns.inet.AF_INET6, - wire[current: current + rdlen]) - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/APL.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/APL.py deleted file mode 100644 index 48faf88..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/APL.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import codecs -import struct - -import dns.exception -import dns.inet -import dns.rdata -import dns.tokenizer -from dns._compat import xrange, maybe_chr - - -class APLItem(object): - - """An APL list item. - - @ivar family: the address family (IANA address family registry) - @type family: int - @ivar negation: is this item negated? - @type negation: bool - @ivar address: the address - @type address: string - @ivar prefix: the prefix length - @type prefix: int - """ - - __slots__ = ['family', 'negation', 'address', 'prefix'] - - def __init__(self, family, negation, address, prefix): - self.family = family - self.negation = negation - self.address = address - self.prefix = prefix - - def __str__(self): - if self.negation: - return "!%d:%s/%s" % (self.family, self.address, self.prefix) - else: - return "%d:%s/%s" % (self.family, self.address, self.prefix) - - def to_wire(self, file): - if self.family == 1: - address = dns.inet.inet_pton(dns.inet.AF_INET, self.address) - elif self.family == 2: - address = dns.inet.inet_pton(dns.inet.AF_INET6, self.address) - else: - address = binascii.unhexlify(self.address) - # - # Truncate least significant zero bytes. - # - last = 0 - for i in xrange(len(address) - 1, -1, -1): - if address[i] != maybe_chr(0): - last = i + 1 - break - address = address[0: last] - l = len(address) - assert l < 128 - if self.negation: - l |= 0x80 - header = struct.pack('!HBB', self.family, self.prefix, l) - file.write(header) - file.write(address) - - -class APL(dns.rdata.Rdata): - - """APL record. - - @ivar items: a list of APL items - @type items: list of APL_Item - @see: RFC 3123""" - - __slots__ = ['items'] - - def __init__(self, rdclass, rdtype, items): - super(APL, self).__init__(rdclass, rdtype) - self.items = items - - def to_text(self, origin=None, relativize=True, **kw): - return ' '.join(map(str, self.items)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - items = [] - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - item = token.value - if item[0] == '!': - negation = True - item = item[1:] - else: - negation = False - (family, rest) = item.split(':', 1) - family = int(family) - (address, prefix) = rest.split('/', 1) - prefix = int(prefix) - item = APLItem(family, negation, address, prefix) - items.append(item) - - return cls(rdclass, rdtype, items) - - def to_wire(self, file, compress=None, origin=None): - for item in self.items: - item.to_wire(file) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - - items = [] - while 1: - if rdlen == 0: - break - if rdlen < 4: - raise dns.exception.FormError - header = struct.unpack('!HBB', wire[current: current + 4]) - afdlen = header[2] - if afdlen > 127: - negation = True - afdlen -= 128 - else: - negation = False - current += 4 - rdlen -= 4 - if rdlen < afdlen: - raise dns.exception.FormError - address = wire[current: current + afdlen].unwrap() - l = len(address) - if header[0] == 1: - if l < 4: - address += b'\x00' * (4 - l) - address = dns.inet.inet_ntop(dns.inet.AF_INET, address) - elif header[0] == 2: - if l < 16: - address += b'\x00' * (16 - l) - address = dns.inet.inet_ntop(dns.inet.AF_INET6, address) - else: - # - # This isn't really right according to the RFC, but it - # seems better than throwing an exception - # - address = codecs.encode(address, 'hex_codec') - current += afdlen - rdlen -= afdlen - item = APLItem(header[0], negation, address, header[1]) - items.append(item) - return cls(rdclass, rdtype, items) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/DHCID.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/DHCID.py deleted file mode 100644 index cec6459..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/DHCID.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception - - -class DHCID(dns.rdata.Rdata): - - """DHCID record - - @ivar data: the data (the content of the RR is opaque as far as the - DNS is concerned) - @type data: string - @see: RFC 4701""" - - __slots__ = ['data'] - - def __init__(self, rdclass, rdtype, data): - super(DHCID, self).__init__(rdclass, rdtype) - self.data = data - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.data) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - data = base64.b64decode(b64) - return cls(rdclass, rdtype, data) - - def to_wire(self, file, compress=None, origin=None): - file.write(self.data) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - data = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, data) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/IPSECKEY.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/IPSECKEY.py deleted file mode 100644 index 8f49ba1..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/IPSECKEY.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import base64 - -import dns.exception -import dns.inet -import dns.name - - -class IPSECKEY(dns.rdata.Rdata): - - """IPSECKEY record - - @ivar precedence: the precedence for this key data - @type precedence: int - @ivar gateway_type: the gateway type - @type gateway_type: int - @ivar algorithm: the algorithm to use - @type algorithm: int - @ivar gateway: the public key - @type gateway: None, IPv4 address, IPV6 address, or domain name - @ivar key: the public key - @type key: string - @see: RFC 4025""" - - __slots__ = ['precedence', 'gateway_type', 'algorithm', 'gateway', 'key'] - - def __init__(self, rdclass, rdtype, precedence, gateway_type, algorithm, - gateway, key): - super(IPSECKEY, self).__init__(rdclass, rdtype) - if gateway_type == 0: - if gateway != '.' and gateway is not None: - raise SyntaxError('invalid gateway for gateway type 0') - gateway = None - elif gateway_type == 1: - # check that it's OK - dns.inet.inet_pton(dns.inet.AF_INET, gateway) - elif gateway_type == 2: - # check that it's OK - dns.inet.inet_pton(dns.inet.AF_INET6, gateway) - elif gateway_type == 3: - pass - else: - raise SyntaxError( - 'invalid IPSECKEY gateway type: %d' % gateway_type) - self.precedence = precedence - self.gateway_type = gateway_type - self.algorithm = algorithm - self.gateway = gateway - self.key = key - - def to_text(self, origin=None, relativize=True, **kw): - if self.gateway_type == 0: - gateway = '.' - elif self.gateway_type == 1: - gateway = self.gateway - elif self.gateway_type == 2: - gateway = self.gateway - elif self.gateway_type == 3: - gateway = str(self.gateway.choose_relativity(origin, relativize)) - else: - raise ValueError('invalid gateway type') - return '%d %d %d %s %s' % (self.precedence, self.gateway_type, - self.algorithm, gateway, - dns.rdata._base64ify(self.key)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - precedence = tok.get_uint8() - gateway_type = tok.get_uint8() - algorithm = tok.get_uint8() - if gateway_type == 3: - gateway = tok.get_name().choose_relativity(origin, relativize) - else: - gateway = tok.get_string() - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - key = base64.b64decode(b64) - return cls(rdclass, rdtype, precedence, gateway_type, algorithm, - gateway, key) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack("!BBB", self.precedence, self.gateway_type, - self.algorithm) - file.write(header) - if self.gateway_type == 0: - pass - elif self.gateway_type == 1: - file.write(dns.inet.inet_pton(dns.inet.AF_INET, self.gateway)) - elif self.gateway_type == 2: - file.write(dns.inet.inet_pton(dns.inet.AF_INET6, self.gateway)) - elif self.gateway_type == 3: - self.gateway.to_wire(file, None, origin) - else: - raise ValueError('invalid gateway type') - file.write(self.key) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - if rdlen < 3: - raise dns.exception.FormError - header = struct.unpack('!BBB', wire[current: current + 3]) - gateway_type = header[1] - current += 3 - rdlen -= 3 - if gateway_type == 0: - gateway = None - elif gateway_type == 1: - gateway = dns.inet.inet_ntop(dns.inet.AF_INET, - wire[current: current + 4]) - current += 4 - rdlen -= 4 - elif gateway_type == 2: - gateway = dns.inet.inet_ntop(dns.inet.AF_INET6, - wire[current: current + 16]) - current += 16 - rdlen -= 16 - elif gateway_type == 3: - (gateway, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - current += cused - rdlen -= cused - else: - raise dns.exception.FormError('invalid IPSECKEY gateway type') - key = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], gateway_type, header[2], - gateway, key) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/KX.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/KX.py deleted file mode 100644 index 1318a58..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/KX.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.mxbase - - -class KX(dns.rdtypes.mxbase.UncompressedMX): - - """KX record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NAPTR.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NAPTR.py deleted file mode 100644 index 32fa474..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NAPTR.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.name -import dns.rdata -from dns._compat import xrange, text_type - - -def _write_string(file, s): - l = len(s) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(s) - - -def _sanitize(value): - if isinstance(value, text_type): - return value.encode() - return value - - -class NAPTR(dns.rdata.Rdata): - - """NAPTR record - - @ivar order: order - @type order: int - @ivar preference: preference - @type preference: int - @ivar flags: flags - @type flags: string - @ivar service: service - @type service: string - @ivar regexp: regular expression - @type regexp: string - @ivar replacement: replacement name - @type replacement: dns.name.Name object - @see: RFC 3403""" - - __slots__ = ['order', 'preference', 'flags', 'service', 'regexp', - 'replacement'] - - def __init__(self, rdclass, rdtype, order, preference, flags, service, - regexp, replacement): - super(NAPTR, self).__init__(rdclass, rdtype) - self.flags = _sanitize(flags) - self.service = _sanitize(service) - self.regexp = _sanitize(regexp) - self.order = order - self.preference = preference - self.replacement = replacement - - def to_text(self, origin=None, relativize=True, **kw): - replacement = self.replacement.choose_relativity(origin, relativize) - return '%d %d "%s" "%s" "%s" %s' % \ - (self.order, self.preference, - dns.rdata._escapify(self.flags), - dns.rdata._escapify(self.service), - dns.rdata._escapify(self.regexp), - replacement) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - order = tok.get_uint16() - preference = tok.get_uint16() - flags = tok.get_string() - service = tok.get_string() - regexp = tok.get_string() - replacement = tok.get_name() - replacement = replacement.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, order, preference, flags, service, - regexp, replacement) - - def to_wire(self, file, compress=None, origin=None): - two_ints = struct.pack("!HH", self.order, self.preference) - file.write(two_ints) - _write_string(file, self.flags) - _write_string(file, self.service) - _write_string(file, self.regexp) - self.replacement.to_wire(file, compress, origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (order, preference) = struct.unpack('!HH', wire[current: current + 4]) - current += 4 - rdlen -= 4 - strings = [] - for i in xrange(3): - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen or rdlen < 0: - raise dns.exception.FormError - s = wire[current: current + l].unwrap() - current += l - rdlen -= l - strings.append(s) - (replacement, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - replacement = replacement.relativize(origin) - return cls(rdclass, rdtype, order, preference, strings[0], strings[1], - strings[2], replacement) - - def choose_relativity(self, origin=None, relativize=True): - self.replacement = self.replacement.choose_relativity(origin, - relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP.py deleted file mode 100644 index 336befc..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.exception -import dns.rdata -import dns.tokenizer - - -class NSAP(dns.rdata.Rdata): - - """NSAP record. - - @ivar address: a NASP - @type address: string - @see: RFC 1706""" - - __slots__ = ['address'] - - def __init__(self, rdclass, rdtype, address): - super(NSAP, self).__init__(rdclass, rdtype) - self.address = address - - def to_text(self, origin=None, relativize=True, **kw): - return "0x%s" % binascii.hexlify(self.address).decode() - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_string() - tok.get_eol() - if address[0:2] != '0x': - raise dns.exception.SyntaxError('string does not start with 0x') - address = address[2:].replace('.', '') - if len(address) % 2 != 0: - raise dns.exception.SyntaxError('hexstring has odd length') - address = binascii.unhexlify(address.encode()) - return cls(rdclass, rdtype, address) - - def to_wire(self, file, compress=None, origin=None): - file.write(self.address) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - address = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, address) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP_PTR.py deleted file mode 100644 index a5b66c8..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/NSAP_PTR.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.rdtypes.nsbase - - -class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): - - """NSAP-PTR record""" diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/PX.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/PX.py deleted file mode 100644 index 2dbaee6..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/PX.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.name - - -class PX(dns.rdata.Rdata): - - """PX record. - - @ivar preference: the preference value - @type preference: int - @ivar map822: the map822 name - @type map822: dns.name.Name object - @ivar mapx400: the mapx400 name - @type mapx400: dns.name.Name object - @see: RFC 2163""" - - __slots__ = ['preference', 'map822', 'mapx400'] - - def __init__(self, rdclass, rdtype, preference, map822, mapx400): - super(PX, self).__init__(rdclass, rdtype) - self.preference = preference - self.map822 = map822 - self.mapx400 = mapx400 - - def to_text(self, origin=None, relativize=True, **kw): - map822 = self.map822.choose_relativity(origin, relativize) - mapx400 = self.mapx400.choose_relativity(origin, relativize) - return '%d %s %s' % (self.preference, map822, mapx400) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - preference = tok.get_uint16() - map822 = tok.get_name() - map822 = map822.choose_relativity(origin, relativize) - mapx400 = tok.get_name(None) - mapx400 = mapx400.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, preference, map822, mapx400) - - def to_wire(self, file, compress=None, origin=None): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.map822.to_wire(file, None, origin) - self.mapx400.to_wire(file, None, origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (preference, ) = struct.unpack('!H', wire[current: current + 2]) - current += 2 - rdlen -= 2 - (map822, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused > rdlen: - raise dns.exception.FormError - current += cused - rdlen -= cused - if origin is not None: - map822 = map822.relativize(origin) - (mapx400, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - mapx400 = mapx400.relativize(origin) - return cls(rdclass, rdtype, preference, map822, mapx400) - - def choose_relativity(self, origin=None, relativize=True): - self.map822 = self.map822.choose_relativity(origin, relativize) - self.mapx400 = self.mapx400.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/SRV.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/SRV.py deleted file mode 100644 index b2c1bc9..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/SRV.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.rdata -import dns.name - - -class SRV(dns.rdata.Rdata): - - """SRV record - - @ivar priority: the priority - @type priority: int - @ivar weight: the weight - @type weight: int - @ivar port: the port of the service - @type port: int - @ivar target: the target host - @type target: dns.name.Name object - @see: RFC 2782""" - - __slots__ = ['priority', 'weight', 'port', 'target'] - - def __init__(self, rdclass, rdtype, priority, weight, port, target): - super(SRV, self).__init__(rdclass, rdtype) - self.priority = priority - self.weight = weight - self.port = port - self.target = target - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return '%d %d %d %s' % (self.priority, self.weight, self.port, - target) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - priority = tok.get_uint16() - weight = tok.get_uint16() - port = tok.get_uint16() - target = tok.get_name(None) - target = target.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, priority, weight, port, target) - - def to_wire(self, file, compress=None, origin=None): - three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) - file.write(three_ints) - self.target.to_wire(file, compress, origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (priority, weight, port) = struct.unpack('!HHH', - wire[current: current + 6]) - current += 6 - rdlen -= 6 - (target, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - target = target.relativize(origin) - return cls(rdclass, rdtype, priority, weight, port, target) - - def choose_relativity(self, origin=None, relativize=True): - self.target = self.target.choose_relativity(origin, relativize) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/WKS.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/WKS.py deleted file mode 100644 index 96f98ad..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/WKS.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import socket -import struct - -import dns.ipv4 -import dns.rdata -from dns._compat import xrange - -_proto_tcp = socket.getprotobyname('tcp') -_proto_udp = socket.getprotobyname('udp') - - -class WKS(dns.rdata.Rdata): - - """WKS record - - @ivar address: the address - @type address: string - @ivar protocol: the protocol - @type protocol: int - @ivar bitmap: the bitmap - @type bitmap: string - @see: RFC 1035""" - - __slots__ = ['address', 'protocol', 'bitmap'] - - def __init__(self, rdclass, rdtype, address, protocol, bitmap): - super(WKS, self).__init__(rdclass, rdtype) - self.address = address - self.protocol = protocol - if not isinstance(bitmap, bytearray): - self.bitmap = bytearray(bitmap) - else: - self.bitmap = bitmap - - def to_text(self, origin=None, relativize=True, **kw): - bits = [] - for i in xrange(0, len(self.bitmap)): - byte = self.bitmap[i] - for j in xrange(0, 8): - if byte & (0x80 >> j): - bits.append(str(i * 8 + j)) - text = ' '.join(bits) - return '%s %d %s' % (self.address, self.protocol, text) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - address = tok.get_string() - protocol = tok.get_string() - if protocol.isdigit(): - protocol = int(protocol) - else: - protocol = socket.getprotobyname(protocol) - bitmap = bytearray() - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - if token.value.isdigit(): - serv = int(token.value) - else: - if protocol != _proto_udp and protocol != _proto_tcp: - raise NotImplementedError("protocol must be TCP or UDP") - if protocol == _proto_udp: - protocol_text = "udp" - else: - protocol_text = "tcp" - serv = socket.getservbyname(token.value, protocol_text) - i = serv // 8 - l = len(bitmap) - if l < i + 1: - for j in xrange(l, i + 1): - bitmap.append(0) - bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) - bitmap = dns.rdata._truncate_bitmap(bitmap) - return cls(rdclass, rdtype, address, protocol, bitmap) - - def to_wire(self, file, compress=None, origin=None): - file.write(dns.ipv4.inet_aton(self.address)) - protocol = struct.pack('!B', self.protocol) - file.write(protocol) - file.write(self.bitmap) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - address = dns.ipv4.inet_ntoa(wire[current: current + 4]) - protocol, = struct.unpack('!B', wire[current + 4: current + 5]) - current += 5 - rdlen -= 5 - bitmap = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/__init__.py b/venv/lib/python3.6/site-packages/dns/rdtypes/IN/__init__.py deleted file mode 100644 index d7e69c9..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/IN/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class IN rdata type classes.""" - -__all__ = [ - 'A', - 'AAAA', - 'APL', - 'DHCID', - 'IPSECKEY', - 'KX', - 'NAPTR', - 'NSAP', - 'NSAP_PTR', - 'PX', - 'SRV', - 'WKS', -] diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/__init__.py b/venv/lib/python3.6/site-packages/dns/rdtypes/__init__.py deleted file mode 100644 index 1ac137f..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata type classes""" - -__all__ = [ - 'ANY', - 'IN', - 'CH', - 'euibase', - 'mxbase', - 'nsbase', -] diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/dnskeybase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/dnskeybase.py deleted file mode 100644 index 3e7e87e..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/dnskeybase.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.dnssec -import dns.rdata - -# wildcard import -__all__ = ["SEP", "REVOKE", "ZONE", - "flags_to_text_set", "flags_from_text_set"] - -# flag constants -SEP = 0x0001 -REVOKE = 0x0080 -ZONE = 0x0100 - -_flag_by_text = { - 'SEP': SEP, - 'REVOKE': REVOKE, - 'ZONE': ZONE -} - -# We construct the inverse mapping programmatically to ensure that we -# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that -# would cause the mapping not to be true inverse. -_flag_by_value = {y: x for x, y in _flag_by_text.items()} - - -def flags_to_text_set(flags): - """Convert a DNSKEY flags value to set texts - @rtype: set([string])""" - - flags_set = set() - mask = 0x1 - while mask <= 0x8000: - if flags & mask: - text = _flag_by_value.get(mask) - if not text: - text = hex(mask) - flags_set.add(text) - mask <<= 1 - return flags_set - - -def flags_from_text_set(texts_set): - """Convert set of DNSKEY flag mnemonic texts to DNSKEY flag value - @rtype: int""" - - flags = 0 - for text in texts_set: - try: - flags += _flag_by_text[text] - except KeyError: - raise NotImplementedError( - "DNSKEY flag '%s' is not supported" % text) - return flags - - -class DNSKEYBase(dns.rdata.Rdata): - - """Base class for rdata that is like a DNSKEY record - - @ivar flags: the key flags - @type flags: int - @ivar protocol: the protocol for which this key may be used - @type protocol: int - @ivar algorithm: the algorithm used for the key - @type algorithm: int - @ivar key: the public key - @type key: string""" - - __slots__ = ['flags', 'protocol', 'algorithm', 'key'] - - def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): - super(DNSKEYBase, self).__init__(rdclass, rdtype) - self.flags = flags - self.protocol = protocol - self.algorithm = algorithm - self.key = key - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d %d %s' % (self.flags, self.protocol, self.algorithm, - dns.rdata._base64ify(self.key)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - flags = tok.get_uint16() - protocol = tok.get_uint8() - algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - b64 = b''.join(chunks) - key = base64.b64decode(b64) - return cls(rdclass, rdtype, flags, protocol, algorithm, key) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) - file.write(header) - file.write(self.key) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - if rdlen < 4: - raise dns.exception.FormError - header = struct.unpack('!HBB', wire[current: current + 4]) - current += 4 - rdlen -= 4 - key = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], header[1], header[2], - key) - - def flags_to_text_set(self): - """Convert a DNSKEY flags value to set texts - @rtype: set([string])""" - return flags_to_text_set(self.flags) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/dsbase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/dsbase.py deleted file mode 100644 index 26ae9d5..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/dsbase.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct -import binascii - -import dns.rdata -import dns.rdatatype - - -class DSBase(dns.rdata.Rdata): - - """Base class for rdata that is like a DS record - - @ivar key_tag: the key tag - @type key_tag: int - @ivar algorithm: the algorithm - @type algorithm: int - @ivar digest_type: the digest type - @type digest_type: int - @ivar digest: the digest - @type digest: int - @see: draft-ietf-dnsext-delegation-signer-14.txt""" - - __slots__ = ['key_tag', 'algorithm', 'digest_type', 'digest'] - - def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, - digest): - super(DSBase, self).__init__(rdclass, rdtype) - self.key_tag = key_tag - self.algorithm = algorithm - self.digest_type = digest_type - self.digest = digest - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d %d %s' % (self.key_tag, self.algorithm, - self.digest_type, - dns.rdata._hexify(self.digest, - chunksize=128)) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - key_tag = tok.get_uint16() - algorithm = tok.get_uint8() - digest_type = tok.get_uint8() - chunks = [] - while 1: - t = tok.get().unescape() - if t.is_eol_or_eof(): - break - if not t.is_identifier(): - raise dns.exception.SyntaxError - chunks.append(t.value.encode()) - digest = b''.join(chunks) - digest = binascii.unhexlify(digest) - return cls(rdclass, rdtype, key_tag, algorithm, digest_type, - digest) - - def to_wire(self, file, compress=None, origin=None): - header = struct.pack("!HBB", self.key_tag, self.algorithm, - self.digest_type) - file.write(header) - file.write(self.digest) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - header = struct.unpack("!HBB", wire[current: current + 4]) - current += 4 - rdlen -= 4 - digest = wire[current: current + rdlen].unwrap() - return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/euibase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/euibase.py deleted file mode 100644 index cc5fdaa..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/euibase.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.rdata -from dns._compat import xrange - - -class EUIBase(dns.rdata.Rdata): - - """EUIxx record - - @ivar fingerprint: xx-bit Extended Unique Identifier (EUI-xx) - @type fingerprint: string - @see: rfc7043.txt""" - - __slots__ = ['eui'] - # define these in subclasses - # byte_len = 6 # 0123456789ab (in hex) - # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab - - def __init__(self, rdclass, rdtype, eui): - super(EUIBase, self).__init__(rdclass, rdtype) - if len(eui) != self.byte_len: - raise dns.exception.FormError('EUI%s rdata has to have %s bytes' - % (self.byte_len * 8, self.byte_len)) - self.eui = eui - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._hexify(self.eui, chunksize=2).replace(' ', '-') - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - text = tok.get_string() - tok.get_eol() - if len(text) != cls.text_len: - raise dns.exception.SyntaxError( - 'Input text must have %s characters' % cls.text_len) - expected_dash_idxs = xrange(2, cls.byte_len * 3 - 1, 3) - for i in expected_dash_idxs: - if text[i] != '-': - raise dns.exception.SyntaxError('Dash expected at position %s' - % i) - text = text.replace('-', '') - try: - data = binascii.unhexlify(text.encode()) - except (ValueError, TypeError) as ex: - raise dns.exception.SyntaxError('Hex decoding error: %s' % str(ex)) - return cls(rdclass, rdtype, data) - - def to_wire(self, file, compress=None, origin=None): - file.write(self.eui) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - eui = wire[current:current + rdlen].unwrap() - return cls(rdclass, rdtype, eui) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/mxbase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/mxbase.py deleted file mode 100644 index 9a3fa62..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/mxbase.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""MX-like base classes.""" - -from io import BytesIO -import struct - -import dns.exception -import dns.rdata -import dns.name - - -class MXBase(dns.rdata.Rdata): - - """Base class for rdata that is like an MX record. - - @ivar preference: the preference value - @type preference: int - @ivar exchange: the exchange name - @type exchange: dns.name.Name object""" - - __slots__ = ['preference', 'exchange'] - - def __init__(self, rdclass, rdtype, preference, exchange): - super(MXBase, self).__init__(rdclass, rdtype) - self.preference = preference - self.exchange = exchange - - def to_text(self, origin=None, relativize=True, **kw): - exchange = self.exchange.choose_relativity(origin, relativize) - return '%d %s' % (self.preference, exchange) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - preference = tok.get_uint16() - exchange = tok.get_name() - exchange = exchange.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, preference, exchange) - - def to_wire(self, file, compress=None, origin=None): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.exchange.to_wire(file, compress, origin) - - def to_digestable(self, origin=None): - return struct.pack("!H", self.preference) + \ - self.exchange.to_digestable(origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (preference, ) = struct.unpack('!H', wire[current: current + 2]) - current += 2 - rdlen -= 2 - (exchange, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - exchange = exchange.relativize(origin) - return cls(rdclass, rdtype, preference, exchange) - - def choose_relativity(self, origin=None, relativize=True): - self.exchange = self.exchange.choose_relativity(origin, relativize) - - -class UncompressedMX(MXBase): - - """Base class for rdata that is like an MX record, but whose name - is not compressed when converted to DNS wire format, and whose - digestable form is not downcased.""" - - def to_wire(self, file, compress=None, origin=None): - super(UncompressedMX, self).to_wire(file, None, origin) - - def to_digestable(self, origin=None): - f = BytesIO() - self.to_wire(f, None, origin) - return f.getvalue() - - -class UncompressedDowncasingMX(MXBase): - - """Base class for rdata that is like an MX record, but whose name - is not compressed when convert to DNS wire format.""" - - def to_wire(self, file, compress=None, origin=None): - super(UncompressedDowncasingMX, self).to_wire(file, None, origin) diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/nsbase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/nsbase.py deleted file mode 100644 index 97a2232..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/nsbase.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""NS-like base classes.""" - -from io import BytesIO - -import dns.exception -import dns.rdata -import dns.name - - -class NSBase(dns.rdata.Rdata): - - """Base class for rdata that is like an NS record. - - @ivar target: the target name of the rdata - @type target: dns.name.Name object""" - - __slots__ = ['target'] - - def __init__(self, rdclass, rdtype, target): - super(NSBase, self).__init__(rdclass, rdtype) - self.target = target - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return str(target) - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - target = tok.get_name() - target = target.choose_relativity(origin, relativize) - tok.get_eol() - return cls(rdclass, rdtype, target) - - def to_wire(self, file, compress=None, origin=None): - self.target.to_wire(file, compress, origin) - - def to_digestable(self, origin=None): - return self.target.to_digestable(origin) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - (target, cused) = dns.name.from_wire(wire[: current + rdlen], - current) - if cused != rdlen: - raise dns.exception.FormError - if origin is not None: - target = target.relativize(origin) - return cls(rdclass, rdtype, target) - - def choose_relativity(self, origin=None, relativize=True): - self.target = self.target.choose_relativity(origin, relativize) - - -class UncompressedNS(NSBase): - - """Base class for rdata that is like an NS record, but whose name - is not compressed when convert to DNS wire format, and whose - digestable form is not downcased.""" - - def to_wire(self, file, compress=None, origin=None): - super(UncompressedNS, self).to_wire(file, None, origin) - - def to_digestable(self, origin=None): - f = BytesIO() - self.to_wire(f, None, origin) - return f.getvalue() diff --git a/venv/lib/python3.6/site-packages/dns/rdtypes/txtbase.py b/venv/lib/python3.6/site-packages/dns/rdtypes/txtbase.py deleted file mode 100644 index 645a57e..0000000 --- a/venv/lib/python3.6/site-packages/dns/rdtypes/txtbase.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""TXT-like base class.""" - -import struct - -import dns.exception -import dns.rdata -import dns.tokenizer -from dns._compat import binary_type, string_types - - -class TXTBase(dns.rdata.Rdata): - - """Base class for rdata that is like a TXT record - - @ivar strings: the strings - @type strings: list of binary - @see: RFC 1035""" - - __slots__ = ['strings'] - - def __init__(self, rdclass, rdtype, strings): - super(TXTBase, self).__init__(rdclass, rdtype) - if isinstance(strings, binary_type) or \ - isinstance(strings, string_types): - strings = [strings] - self.strings = [] - for string in strings: - if isinstance(string, string_types): - string = string.encode() - self.strings.append(string) - - def to_text(self, origin=None, relativize=True, **kw): - txt = '' - prefix = '' - for s in self.strings: - txt += '{}"{}"'.format(prefix, dns.rdata._escapify(s)) - prefix = ' ' - return txt - - @classmethod - def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): - strings = [] - while 1: - token = tok.get().unescape() - if token.is_eol_or_eof(): - break - if not (token.is_quoted_string() or token.is_identifier()): - raise dns.exception.SyntaxError("expected a string") - if len(token.value) > 255: - raise dns.exception.SyntaxError("string too long") - value = token.value - if isinstance(value, binary_type): - strings.append(value) - else: - strings.append(value.encode()) - if len(strings) == 0: - raise dns.exception.UnexpectedEnd - return cls(rdclass, rdtype, strings) - - def to_wire(self, file, compress=None, origin=None): - for s in self.strings: - l = len(s) - assert l < 256 - file.write(struct.pack('!B', l)) - file.write(s) - - @classmethod - def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): - strings = [] - while rdlen > 0: - l = wire[current] - current += 1 - rdlen -= 1 - if l > rdlen: - raise dns.exception.FormError - s = wire[current: current + l].unwrap() - current += l - rdlen -= l - strings.append(s) - return cls(rdclass, rdtype, strings) diff --git a/venv/lib/python3.6/site-packages/dns/renderer.py b/venv/lib/python3.6/site-packages/dns/renderer.py deleted file mode 100644 index d7ef8c7..0000000 --- a/venv/lib/python3.6/site-packages/dns/renderer.py +++ /dev/null @@ -1,291 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Help for building DNS wire format messages""" - -from io import BytesIO -import struct -import random -import time - -import dns.exception -import dns.tsig -from ._compat import long - - -QUESTION = 0 -ANSWER = 1 -AUTHORITY = 2 -ADDITIONAL = 3 - - -class Renderer(object): - """Helper class for building DNS wire-format messages. - - Most applications can use the higher-level L{dns.message.Message} - class and its to_wire() method to generate wire-format messages. - This class is for those applications which need finer control - over the generation of messages. - - Typical use:: - - r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) - r.add_question(qname, qtype, qclass) - r.add_rrset(dns.renderer.ANSWER, rrset_1) - r.add_rrset(dns.renderer.ANSWER, rrset_2) - r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) - r.add_edns(0, 0, 4096) - r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_1) - r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_2) - r.write_header() - r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) - wire = r.get_wire() - - output, a BytesIO, where rendering is written - - id: the message id - - flags: the message flags - - max_size: the maximum size of the message - - origin: the origin to use when rendering relative names - - compress: the compression table - - section: an int, the section currently being rendered - - counts: list of the number of RRs in each section - - mac: the MAC of the rendered message (if TSIG was used) - """ - - def __init__(self, id=None, flags=0, max_size=65535, origin=None): - """Initialize a new renderer.""" - - self.output = BytesIO() - if id is None: - self.id = random.randint(0, 65535) - else: - self.id = id - self.flags = flags - self.max_size = max_size - self.origin = origin - self.compress = {} - self.section = QUESTION - self.counts = [0, 0, 0, 0] - self.output.write(b'\x00' * 12) - self.mac = '' - - def _rollback(self, where): - """Truncate the output buffer at offset *where*, and remove any - compression table entries that pointed beyond the truncation - point. - """ - - self.output.seek(where) - self.output.truncate() - keys_to_delete = [] - for k, v in self.compress.items(): - if v >= where: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.compress[k] - - def _set_section(self, section): - """Set the renderer's current section. - - Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, - ADDITIONAL. Sections may be empty. - - Raises dns.exception.FormError if an attempt was made to set - a section value less than the current section. - """ - - if self.section != section: - if self.section > section: - raise dns.exception.FormError - self.section = section - - def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): - """Add a question to the message.""" - - self._set_section(QUESTION) - before = self.output.tell() - qname.to_wire(self.output, self.compress, self.origin) - self.output.write(struct.pack("!HH", rdtype, rdclass)) - after = self.output.tell() - if after >= self.max_size: - self._rollback(before) - raise dns.exception.TooBig - self.counts[QUESTION] += 1 - - def add_rrset(self, section, rrset, **kw): - """Add the rrset to the specified section. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - before = self.output.tell() - n = rrset.to_wire(self.output, self.compress, self.origin, **kw) - after = self.output.tell() - if after >= self.max_size: - self._rollback(before) - raise dns.exception.TooBig - self.counts[section] += n - - def add_rdataset(self, section, name, rdataset, **kw): - """Add the rdataset to the specified section, using the specified - name as the owner name. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - before = self.output.tell() - n = rdataset.to_wire(name, self.output, self.compress, self.origin, - **kw) - after = self.output.tell() - if after >= self.max_size: - self._rollback(before) - raise dns.exception.TooBig - self.counts[section] += n - - def add_edns(self, edns, ednsflags, payload, options=None): - """Add an EDNS OPT record to the message.""" - - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= long(0xFF00FFFF) - ednsflags |= (edns << 16) - self._set_section(ADDITIONAL) - before = self.output.tell() - self.output.write(struct.pack('!BHHIH', 0, dns.rdatatype.OPT, payload, - ednsflags, 0)) - if options is not None: - lstart = self.output.tell() - for opt in options: - stuff = struct.pack("!HH", opt.otype, 0) - self.output.write(stuff) - start = self.output.tell() - opt.to_wire(self.output) - end = self.output.tell() - assert end - start < 65536 - self.output.seek(start - 2) - stuff = struct.pack("!H", end - start) - self.output.write(stuff) - self.output.seek(0, 2) - lend = self.output.tell() - assert lend - lstart < 65536 - self.output.seek(lstart - 2) - stuff = struct.pack("!H", lend - lstart) - self.output.write(stuff) - self.output.seek(0, 2) - after = self.output.tell() - if after >= self.max_size: - self._rollback(before) - raise dns.exception.TooBig - self.counts[ADDITIONAL] += 1 - - def add_tsig(self, keyname, secret, fudge, id, tsig_error, other_data, - request_mac, algorithm=dns.tsig.default_algorithm): - """Add a TSIG signature to the message.""" - - s = self.output.getvalue() - (tsig_rdata, self.mac, ctx) = dns.tsig.sign(s, - keyname, - secret, - int(time.time()), - fudge, - id, - tsig_error, - other_data, - request_mac, - algorithm=algorithm) - self._write_tsig(tsig_rdata, keyname) - - def add_multi_tsig(self, ctx, keyname, secret, fudge, id, tsig_error, - other_data, request_mac, - algorithm=dns.tsig.default_algorithm): - """Add a TSIG signature to the message. Unlike add_tsig(), this can be - used for a series of consecutive DNS envelopes, e.g. for a zone - transfer over TCP [RFC2845, 4.4]. - - For the first message in the sequence, give ctx=None. For each - subsequent message, give the ctx that was returned from the - add_multi_tsig() call for the previous message.""" - - s = self.output.getvalue() - (tsig_rdata, self.mac, ctx) = dns.tsig.sign(s, - keyname, - secret, - int(time.time()), - fudge, - id, - tsig_error, - other_data, - request_mac, - ctx=ctx, - first=ctx is None, - multi=True, - algorithm=algorithm) - self._write_tsig(tsig_rdata, keyname) - return ctx - - def _write_tsig(self, tsig_rdata, keyname): - self._set_section(ADDITIONAL) - before = self.output.tell() - - keyname.to_wire(self.output, self.compress, self.origin) - self.output.write(struct.pack('!HHIH', dns.rdatatype.TSIG, - dns.rdataclass.ANY, 0, 0)) - rdata_start = self.output.tell() - self.output.write(tsig_rdata) - - after = self.output.tell() - assert after - rdata_start < 65536 - if after >= self.max_size: - self._rollback(before) - raise dns.exception.TooBig - - self.output.seek(rdata_start - 2) - self.output.write(struct.pack('!H', after - rdata_start)) - self.counts[ADDITIONAL] += 1 - self.output.seek(10) - self.output.write(struct.pack('!H', self.counts[ADDITIONAL])) - self.output.seek(0, 2) - - def write_header(self): - """Write the DNS message header. - - Writing the DNS message header is done after all sections - have been rendered, but before the optional TSIG signature - is added. - """ - - self.output.seek(0) - self.output.write(struct.pack('!HHHHHH', self.id, self.flags, - self.counts[0], self.counts[1], - self.counts[2], self.counts[3])) - self.output.seek(0, 2) - - def get_wire(self): - """Return the wire format message.""" - - return self.output.getvalue() diff --git a/venv/lib/python3.6/site-packages/dns/resolver.py b/venv/lib/python3.6/site-packages/dns/resolver.py deleted file mode 100644 index 806e5b2..0000000 --- a/venv/lib/python3.6/site-packages/dns/resolver.py +++ /dev/null @@ -1,1383 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS stub resolver.""" - -import socket -import sys -import time -import random - -try: - import threading as _threading -except ImportError: - import dummy_threading as _threading - -import dns.exception -import dns.flags -import dns.ipv4 -import dns.ipv6 -import dns.message -import dns.name -import dns.query -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.reversename -import dns.tsig -from ._compat import xrange, string_types - -if sys.platform == 'win32': - try: - import winreg as _winreg - except ImportError: - import _winreg # pylint: disable=import-error - -class NXDOMAIN(dns.exception.DNSException): - """The DNS query name does not exist.""" - supp_kwargs = {'qnames', 'responses'} - fmt = None # we have our own __str__ implementation - - def _check_kwargs(self, qnames, responses=None): - if not isinstance(qnames, (list, tuple, set)): - raise AttributeError("qnames must be a list, tuple or set") - if len(qnames) == 0: - raise AttributeError("qnames must contain at least one element") - if responses is None: - responses = {} - elif not isinstance(responses, dict): - raise AttributeError("responses must be a dict(qname=response)") - kwargs = dict(qnames=qnames, responses=responses) - return kwargs - - def __str__(self): - if 'qnames' not in self.kwargs: - return super(NXDOMAIN, self).__str__() - qnames = self.kwargs['qnames'] - if len(qnames) > 1: - msg = 'None of DNS query names exist' - else: - msg = 'The DNS query name does not exist' - qnames = ', '.join(map(str, qnames)) - return "{}: {}".format(msg, qnames) - - def canonical_name(self): - if not 'qnames' in self.kwargs: - raise TypeError("parametrized exception required") - IN = dns.rdataclass.IN - CNAME = dns.rdatatype.CNAME - cname = None - for qname in self.kwargs['qnames']: - response = self.kwargs['responses'][qname] - for answer in response.answer: - if answer.rdtype != CNAME or answer.rdclass != IN: - continue - cname = answer.items[0].target.to_text() - if cname is not None: - return dns.name.from_text(cname) - return self.kwargs['qnames'][0] - canonical_name = property(canonical_name, doc=( - "Return the unresolved canonical name.")) - - def __add__(self, e_nx): - """Augment by results from another NXDOMAIN exception.""" - qnames0 = list(self.kwargs.get('qnames', [])) - responses0 = dict(self.kwargs.get('responses', {})) - responses1 = e_nx.kwargs.get('responses', {}) - for qname1 in e_nx.kwargs.get('qnames', []): - if qname1 not in qnames0: - qnames0.append(qname1) - if qname1 in responses1: - responses0[qname1] = responses1[qname1] - return NXDOMAIN(qnames=qnames0, responses=responses0) - - def qnames(self): - """All of the names that were tried. - - Returns a list of ``dns.name.Name``. - """ - return self.kwargs['qnames'] - - def responses(self): - """A map from queried names to their NXDOMAIN responses. - - Returns a dict mapping a ``dns.name.Name`` to a - ``dns.message.Message``. - """ - return self.kwargs['responses'] - - def response(self, qname): - """The response for query *qname*. - - Returns a ``dns.message.Message``. - """ - return self.kwargs['responses'][qname] - - -class YXDOMAIN(dns.exception.DNSException): - """The DNS query name is too long after DNAME substitution.""" - -# The definition of the Timeout exception has moved from here to the -# dns.exception module. We keep dns.resolver.Timeout defined for -# backwards compatibility. - -Timeout = dns.exception.Timeout - - -class NoAnswer(dns.exception.DNSException): - """The DNS response does not contain an answer to the question.""" - fmt = 'The DNS response does not contain an answer ' + \ - 'to the question: {query}' - supp_kwargs = {'response'} - - def _fmt_kwargs(self, **kwargs): - return super(NoAnswer, self)._fmt_kwargs( - query=kwargs['response'].question) - - -class NoNameservers(dns.exception.DNSException): - """All nameservers failed to answer the query. - - errors: list of servers and respective errors - The type of errors is - [(server IP address, any object convertible to string)]. - Non-empty errors list will add explanatory message () - """ - - msg = "All nameservers failed to answer the query." - fmt = "%s {query}: {errors}" % msg[:-1] - supp_kwargs = {'request', 'errors'} - - def _fmt_kwargs(self, **kwargs): - srv_msgs = [] - for err in kwargs['errors']: - srv_msgs.append('Server {} {} port {} answered {}'.format(err[0], - 'TCP' if err[1] else 'UDP', err[2], err[3])) - return super(NoNameservers, self)._fmt_kwargs( - query=kwargs['request'].question, errors='; '.join(srv_msgs)) - - -class NotAbsolute(dns.exception.DNSException): - """An absolute domain name is required but a relative name was provided.""" - - -class NoRootSOA(dns.exception.DNSException): - """There is no SOA RR at the DNS root name. This should never happen!""" - - -class NoMetaqueries(dns.exception.DNSException): - """DNS metaqueries are not allowed.""" - - -class Answer(object): - """DNS stub resolver answer. - - Instances of this class bundle up the result of a successful DNS - resolution. - - For convenience, the answer object implements much of the sequence - protocol, forwarding to its ``rrset`` attribute. E.g. - ``for a in answer`` is equivalent to ``for a in answer.rrset``. - ``answer[i]`` is equivalent to ``answer.rrset[i]``, and - ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. - - Note that CNAMEs or DNAMEs in the response may mean that answer - RRset's name might not be the query name. - """ - - def __init__(self, qname, rdtype, rdclass, response, - raise_on_no_answer=True): - self.qname = qname - self.rdtype = rdtype - self.rdclass = rdclass - self.response = response - min_ttl = -1 - rrset = None - for count in xrange(0, 15): - try: - rrset = response.find_rrset(response.answer, qname, - rdclass, rdtype) - if min_ttl == -1 or rrset.ttl < min_ttl: - min_ttl = rrset.ttl - break - except KeyError: - if rdtype != dns.rdatatype.CNAME: - try: - crrset = response.find_rrset(response.answer, - qname, - rdclass, - dns.rdatatype.CNAME) - if min_ttl == -1 or crrset.ttl < min_ttl: - min_ttl = crrset.ttl - for rd in crrset: - qname = rd.target - break - continue - except KeyError: - if raise_on_no_answer: - raise NoAnswer(response=response) - if raise_on_no_answer: - raise NoAnswer(response=response) - if rrset is None and raise_on_no_answer: - raise NoAnswer(response=response) - self.canonical_name = qname - self.rrset = rrset - if rrset is None: - while 1: - # Look for a SOA RR whose owner name is a superdomain - # of qname. - try: - srrset = response.find_rrset(response.authority, qname, - rdclass, dns.rdatatype.SOA) - if min_ttl == -1 or srrset.ttl < min_ttl: - min_ttl = srrset.ttl - if srrset[0].minimum < min_ttl: - min_ttl = srrset[0].minimum - break - except KeyError: - try: - qname = qname.parent() - except dns.name.NoParent: - break - self.expiration = time.time() + min_ttl - - def __getattr__(self, attr): - if attr == 'name': - return self.rrset.name - elif attr == 'ttl': - return self.rrset.ttl - elif attr == 'covers': - return self.rrset.covers - elif attr == 'rdclass': - return self.rrset.rdclass - elif attr == 'rdtype': - return self.rrset.rdtype - else: - raise AttributeError(attr) - - def __len__(self): - return self.rrset and len(self.rrset) or 0 - - def __iter__(self): - return self.rrset and iter(self.rrset) or iter(tuple()) - - def __getitem__(self, i): - if self.rrset is None: - raise IndexError - return self.rrset[i] - - def __delitem__(self, i): - if self.rrset is None: - raise IndexError - del self.rrset[i] - - -class Cache(object): - """Simple thread-safe DNS answer cache.""" - - def __init__(self, cleaning_interval=300.0): - """*cleaning_interval*, a ``float`` is the number of seconds between - periodic cleanings. - """ - - self.data = {} - self.cleaning_interval = cleaning_interval - self.next_cleaning = time.time() + self.cleaning_interval - self.lock = _threading.Lock() - - def _maybe_clean(self): - """Clean the cache if it's time to do so.""" - - now = time.time() - if self.next_cleaning <= now: - keys_to_delete = [] - for (k, v) in self.data.items(): - if v.expiration <= now: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.data[k] - now = time.time() - self.next_cleaning = now + self.cleaning_interval - - def get(self, key): - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - try: - self.lock.acquire() - self._maybe_clean() - v = self.data.get(key) - if v is None or v.expiration <= time.time(): - return None - return v - finally: - self.lock.release() - - def put(self, key, value): - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - try: - self.lock.acquire() - self._maybe_clean() - self.data[key] = value - finally: - self.lock.release() - - def flush(self, key=None): - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise - the entire cache is flushed. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - """ - - try: - self.lock.acquire() - if key is not None: - if key in self.data: - del self.data[key] - else: - self.data = {} - self.next_cleaning = time.time() + self.cleaning_interval - finally: - self.lock.release() - - -class LRUCacheNode(object): - """LRUCache node.""" - - def __init__(self, key, value): - self.key = key - self.value = value - self.prev = self - self.next = self - - def link_before(self, node): - self.prev = node.prev - self.next = node - node.prev.next = self - node.prev = self - - def link_after(self, node): - self.prev = node - self.next = node.next - node.next.prev = self - node.next = self - - def unlink(self): - self.next.prev = self.prev - self.prev.next = self.next - - -class LRUCache(object): - """Thread-safe, bounded, least-recently-used DNS answer cache. - - This cache is better than the simple cache (above) if you're - running a web crawler or other process that does a lot of - resolutions. The LRUCache has a maximum number of nodes, and when - it is full, the least-recently used node is removed to make space - for a new one. - """ - - def __init__(self, max_size=100000): - """*max_size*, an ``int``, is the maximum number of nodes to cache; - it must be greater than 0. - """ - - self.data = {} - self.set_max_size(max_size) - self.sentinel = LRUCacheNode(None, None) - self.lock = _threading.Lock() - - def set_max_size(self, max_size): - if max_size < 1: - max_size = 1 - self.max_size = max_size - - def get(self, key): - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - try: - self.lock.acquire() - node = self.data.get(key) - if node is None: - return None - # Unlink because we're either going to move the node to the front - # of the LRU list or we're going to free it. - node.unlink() - if node.value.expiration <= time.time(): - del self.data[node.key] - return None - node.link_after(self.sentinel) - return node.value - finally: - self.lock.release() - - def put(self, key, value): - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - try: - self.lock.acquire() - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - while len(self.data) >= self.max_size: - node = self.sentinel.prev - node.unlink() - del self.data[node.key] - node = LRUCacheNode(key, value) - node.link_after(self.sentinel) - self.data[key] = node - finally: - self.lock.release() - - def flush(self, key=None): - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise - the entire cache is flushed. - - *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the - query name, rdtype, and rdclass respectively. - """ - - try: - self.lock.acquire() - if key is not None: - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - else: - node = self.sentinel.next - while node != self.sentinel: - next = node.next - node.prev = None - node.next = None - node = next - self.data = {} - finally: - self.lock.release() - - -class Resolver(object): - """DNS stub resolver.""" - - def __init__(self, filename='/etc/resolv.conf', configure=True): - """*filename*, a ``text`` or file object, specifying a file - in standard /etc/resolv.conf format. This parameter is meaningful - only when *configure* is true and the platform is POSIX. - - *configure*, a ``bool``. If True (the default), the resolver - instance is configured in the normal fashion for the operating - system the resolver is running on. (I.e. by reading a - /etc/resolv.conf file on POSIX systems and from the registry - on Windows systems.) - """ - - self.domain = None - self.nameservers = None - self.nameserver_ports = None - self.port = None - self.search = None - self.timeout = None - self.lifetime = None - self.keyring = None - self.keyname = None - self.keyalgorithm = None - self.edns = None - self.ednsflags = None - self.payload = None - self.cache = None - self.flags = None - self.retry_servfail = False - self.rotate = False - - self.reset() - if configure: - if sys.platform == 'win32': - self.read_registry() - elif filename: - self.read_resolv_conf(filename) - - def reset(self): - """Reset all resolver configuration to the defaults.""" - - self.domain = \ - dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) - if len(self.domain) == 0: - self.domain = dns.name.root - self.nameservers = [] - self.nameserver_ports = {} - self.port = 53 - self.search = [] - self.timeout = 2.0 - self.lifetime = 30.0 - self.keyring = None - self.keyname = None - self.keyalgorithm = dns.tsig.default_algorithm - self.edns = -1 - self.ednsflags = 0 - self.payload = 0 - self.cache = None - self.flags = None - self.retry_servfail = False - self.rotate = False - - def read_resolv_conf(self, f): - """Process *f* as a file in the /etc/resolv.conf format. If f is - a ``text``, it is used as the name of the file to open; otherwise it - is treated as the file itself.""" - - if isinstance(f, string_types): - try: - f = open(f, 'r') - except IOError: - # /etc/resolv.conf doesn't exist, can't be read, etc. - # We'll just use the default resolver configuration. - self.nameservers = ['127.0.0.1'] - return - want_close = True - else: - want_close = False - try: - for l in f: - if len(l) == 0 or l[0] == '#' or l[0] == ';': - continue - tokens = l.split() - - # Any line containing less than 2 tokens is malformed - if len(tokens) < 2: - continue - - if tokens[0] == 'nameserver': - self.nameservers.append(tokens[1]) - elif tokens[0] == 'domain': - self.domain = dns.name.from_text(tokens[1]) - elif tokens[0] == 'search': - for suffix in tokens[1:]: - self.search.append(dns.name.from_text(suffix)) - elif tokens[0] == 'options': - if 'rotate' in tokens[1:]: - self.rotate = True - finally: - if want_close: - f.close() - if len(self.nameservers) == 0: - self.nameservers.append('127.0.0.1') - - def _determine_split_char(self, entry): - # - # The windows registry irritatingly changes the list element - # delimiter in between ' ' and ',' (and vice-versa) in various - # versions of windows. - # - if entry.find(' ') >= 0: - split_char = ' ' - elif entry.find(',') >= 0: - split_char = ',' - else: - # probably a singleton; treat as a space-separated list. - split_char = ' ' - return split_char - - def _config_win32_nameservers(self, nameservers): - # we call str() on nameservers to convert it from unicode to ascii - nameservers = str(nameservers) - split_char = self._determine_split_char(nameservers) - ns_list = nameservers.split(split_char) - for ns in ns_list: - if ns not in self.nameservers: - self.nameservers.append(ns) - - def _config_win32_domain(self, domain): - # we call str() on domain to convert it from unicode to ascii - self.domain = dns.name.from_text(str(domain)) - - def _config_win32_search(self, search): - # we call str() on search to convert it from unicode to ascii - search = str(search) - split_char = self._determine_split_char(search) - search_list = search.split(split_char) - for s in search_list: - if s not in self.search: - self.search.append(dns.name.from_text(s)) - - def _config_win32_fromkey(self, key, always_try_domain): - try: - servers, rtype = _winreg.QueryValueEx(key, 'NameServer') - except WindowsError: # pylint: disable=undefined-variable - servers = None - if servers: - self._config_win32_nameservers(servers) - if servers or always_try_domain: - try: - dom, rtype = _winreg.QueryValueEx(key, 'Domain') - if dom: - self._config_win32_domain(dom) - except WindowsError: # pylint: disable=undefined-variable - pass - else: - try: - servers, rtype = _winreg.QueryValueEx(key, 'DhcpNameServer') - except WindowsError: # pylint: disable=undefined-variable - servers = None - if servers: - self._config_win32_nameservers(servers) - try: - dom, rtype = _winreg.QueryValueEx(key, 'DhcpDomain') - if dom: - self._config_win32_domain(dom) - except WindowsError: # pylint: disable=undefined-variable - pass - try: - search, rtype = _winreg.QueryValueEx(key, 'SearchList') - except WindowsError: # pylint: disable=undefined-variable - search = None - if search: - self._config_win32_search(search) - - def read_registry(self): - """Extract resolver configuration from the Windows registry.""" - - lm = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) - want_scan = False - try: - try: - # XP, 2000 - tcp_params = _winreg.OpenKey(lm, - r'SYSTEM\CurrentControlSet' - r'\Services\Tcpip\Parameters') - want_scan = True - except EnvironmentError: - # ME - tcp_params = _winreg.OpenKey(lm, - r'SYSTEM\CurrentControlSet' - r'\Services\VxD\MSTCP') - try: - self._config_win32_fromkey(tcp_params, True) - finally: - tcp_params.Close() - if want_scan: - interfaces = _winreg.OpenKey(lm, - r'SYSTEM\CurrentControlSet' - r'\Services\Tcpip\Parameters' - r'\Interfaces') - try: - i = 0 - while True: - try: - guid = _winreg.EnumKey(interfaces, i) - i += 1 - key = _winreg.OpenKey(interfaces, guid) - if not self._win32_is_nic_enabled(lm, guid, key): - continue - try: - self._config_win32_fromkey(key, False) - finally: - key.Close() - except EnvironmentError: - break - finally: - interfaces.Close() - finally: - lm.Close() - - def _win32_is_nic_enabled(self, lm, guid, interface_key): - # Look in the Windows Registry to determine whether the network - # interface corresponding to the given guid is enabled. - # - # (Code contributed by Paul Marks, thanks!) - # - try: - # This hard-coded location seems to be consistent, at least - # from Windows 2000 through Vista. - connection_key = _winreg.OpenKey( - lm, - r'SYSTEM\CurrentControlSet\Control\Network' - r'\{4D36E972-E325-11CE-BFC1-08002BE10318}' - r'\%s\Connection' % guid) - - try: - # The PnpInstanceID points to a key inside Enum - (pnp_id, ttype) = _winreg.QueryValueEx( - connection_key, 'PnpInstanceID') - - if ttype != _winreg.REG_SZ: - raise ValueError - - device_key = _winreg.OpenKey( - lm, r'SYSTEM\CurrentControlSet\Enum\%s' % pnp_id) - - try: - # Get ConfigFlags for this device - (flags, ttype) = _winreg.QueryValueEx( - device_key, 'ConfigFlags') - - if ttype != _winreg.REG_DWORD: - raise ValueError - - # Based on experimentation, bit 0x1 indicates that the - # device is disabled. - return not flags & 0x1 - - finally: - device_key.Close() - finally: - connection_key.Close() - except (EnvironmentError, ValueError): - # Pre-vista, enabled interfaces seem to have a non-empty - # NTEContextList; this was how dnspython detected enabled - # nics before the code above was contributed. We've retained - # the old method since we don't know if the code above works - # on Windows 95/98/ME. - try: - (nte, ttype) = _winreg.QueryValueEx(interface_key, - 'NTEContextList') - return nte is not None - except WindowsError: # pylint: disable=undefined-variable - return False - - def _compute_timeout(self, start, lifetime=None): - lifetime = self.lifetime if lifetime is None else lifetime - now = time.time() - duration = now - start - if duration < 0: - if duration < -1: - # Time going backwards is bad. Just give up. - raise Timeout(timeout=duration) - else: - # Time went backwards, but only a little. This can - # happen, e.g. under vmware with older linux kernels. - # Pretend it didn't happen. - now = start - if duration >= lifetime: - raise Timeout(timeout=duration) - return min(lifetime - duration, self.timeout) - - def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, - tcp=False, source=None, raise_on_no_answer=True, source_port=0, - lifetime=None): - """Query nameservers to find the answer to the question. - - The *qname*, *rdtype*, and *rdclass* parameters may be objects - of the appropriate type, or strings that can be converted into objects - of the appropriate type. - - *qname*, a ``dns.name.Name`` or ``text``, the query name. - - *rdtype*, an ``int`` or ``text``, the query type. - - *rdclass*, an ``int`` or ``text``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *source*, a ``text`` or ``None``. If not ``None``, bind to this IP - address when making queries. - - *raise_on_no_answer*, a ``bool``. If ``True``, raise - ``dns.resolver.NoAnswer`` if there's no answer to the question. - - *source_port*, an ``int``, the port from which to send the message. - - *lifetime*, a ``float``, how long query should run before timing out. - - Raises ``dns.exception.Timeout`` if no answers could be found - in the specified lifetime. - - Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. - - Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after - DNAME substitution. - - Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is - ``True`` and the query name exists but has no RRset of the - desired type and class. - - Raises ``dns.resolver.NoNameservers`` if no non-broken - nameservers are available to answer the question. - - Returns a ``dns.resolver.Answer`` instance. - """ - - if isinstance(qname, string_types): - qname = dns.name.from_text(qname, None) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if dns.rdatatype.is_metatype(rdtype): - raise NoMetaqueries - if isinstance(rdclass, string_types): - rdclass = dns.rdataclass.from_text(rdclass) - if dns.rdataclass.is_metaclass(rdclass): - raise NoMetaqueries - qnames_to_try = [] - if qname.is_absolute(): - qnames_to_try.append(qname) - else: - if len(qname) > 1: - qnames_to_try.append(qname.concatenate(dns.name.root)) - if self.search: - for suffix in self.search: - qnames_to_try.append(qname.concatenate(suffix)) - else: - qnames_to_try.append(qname.concatenate(self.domain)) - all_nxdomain = True - nxdomain_responses = {} - start = time.time() - _qname = None # make pylint happy - for _qname in qnames_to_try: - if self.cache: - answer = self.cache.get((_qname, rdtype, rdclass)) - if answer is not None: - if answer.rrset is None and raise_on_no_answer: - raise NoAnswer(response=answer.response) - else: - return answer - request = dns.message.make_query(_qname, rdtype, rdclass) - if self.keyname is not None: - request.use_tsig(self.keyring, self.keyname, - algorithm=self.keyalgorithm) - request.use_edns(self.edns, self.ednsflags, self.payload) - if self.flags is not None: - request.flags = self.flags - response = None - # - # make a copy of the servers list so we can alter it later. - # - nameservers = self.nameservers[:] - errors = [] - if self.rotate: - random.shuffle(nameservers) - backoff = 0.10 - while response is None: - if len(nameservers) == 0: - raise NoNameservers(request=request, errors=errors) - for nameserver in nameservers[:]: - timeout = self._compute_timeout(start, lifetime) - port = self.nameserver_ports.get(nameserver, self.port) - try: - tcp_attempt = tcp - if tcp: - response = dns.query.tcp(request, nameserver, - timeout, port, - source=source, - source_port=source_port) - else: - response = dns.query.udp(request, nameserver, - timeout, port, - source=source, - source_port=source_port) - if response.flags & dns.flags.TC: - # Response truncated; retry with TCP. - tcp_attempt = True - timeout = self._compute_timeout(start, lifetime) - response = \ - dns.query.tcp(request, nameserver, - timeout, port, - source=source, - source_port=source_port) - except (socket.error, dns.exception.Timeout) as ex: - # - # Communication failure or timeout. Go to the - # next server - # - errors.append((nameserver, tcp_attempt, port, ex, - response)) - response = None - continue - except dns.query.UnexpectedSource as ex: - # - # Who knows? Keep going. - # - errors.append((nameserver, tcp_attempt, port, ex, - response)) - response = None - continue - except dns.exception.FormError as ex: - # - # We don't understand what this server is - # saying. Take it out of the mix and - # continue. - # - nameservers.remove(nameserver) - errors.append((nameserver, tcp_attempt, port, ex, - response)) - response = None - continue - except EOFError as ex: - # - # We're using TCP and they hung up on us. - # Probably they don't support TCP (though - # they're supposed to!). Take it out of the - # mix and continue. - # - nameservers.remove(nameserver) - errors.append((nameserver, tcp_attempt, port, ex, - response)) - response = None - continue - rcode = response.rcode() - if rcode == dns.rcode.YXDOMAIN: - ex = YXDOMAIN() - errors.append((nameserver, tcp_attempt, port, ex, - response)) - raise ex - if rcode == dns.rcode.NOERROR or \ - rcode == dns.rcode.NXDOMAIN: - break - # - # We got a response, but we're not happy with the - # rcode in it. Remove the server from the mix if - # the rcode isn't SERVFAIL. - # - if rcode != dns.rcode.SERVFAIL or not self.retry_servfail: - nameservers.remove(nameserver) - errors.append((nameserver, tcp_attempt, port, - dns.rcode.to_text(rcode), response)) - response = None - if response is not None: - break - # - # All nameservers failed! - # - if len(nameservers) > 0: - # - # But we still have servers to try. Sleep a bit - # so we don't pound them! - # - timeout = self._compute_timeout(start, lifetime) - sleep_time = min(timeout, backoff) - backoff *= 2 - time.sleep(sleep_time) - if response.rcode() == dns.rcode.NXDOMAIN: - nxdomain_responses[_qname] = response - continue - all_nxdomain = False - break - if all_nxdomain: - raise NXDOMAIN(qnames=qnames_to_try, responses=nxdomain_responses) - answer = Answer(_qname, rdtype, rdclass, response, - raise_on_no_answer) - if self.cache: - self.cache.put((_qname, rdtype, rdclass), answer) - return answer - - def use_tsig(self, keyring, keyname=None, - algorithm=dns.tsig.default_algorithm): - """Add a TSIG signature to the query. - - See the documentation of the Message class for a complete - description of the keyring dictionary. - - *keyring*, a ``dict``, the TSIG keyring to use. If a - *keyring* is specified but a *keyname* is not, then the key - used will be the first key in the *keyring*. Note that the - order of keys in a dictionary is not defined, so applications - should supply a keyname when a keyring is used, unless they - know the keyring contains only one key. - - *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key - to use; defaults to ``None``. The key must be defined in the keyring. - - *algorithm*, a ``dns.name.Name``, the TSIG algorithm to use. - """ - - self.keyring = keyring - if keyname is None: - self.keyname = list(self.keyring.keys())[0] - else: - self.keyname = keyname - self.keyalgorithm = algorithm - - def use_edns(self, edns, ednsflags, payload): - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying - ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case - the other parameters are ignored. Specifying ``True`` is - equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - """ - - if edns is None: - edns = -1 - self.edns = edns - self.ednsflags = ednsflags - self.payload = payload - - def set_flags(self, flags): - """Overrides the default flags with your own. - - *flags*, an ``int``, the message flags to use. - """ - - self.flags = flags - - -#: The default resolver. -default_resolver = None - - -def get_default_resolver(): - """Get the default resolver, initializing it if necessary.""" - if default_resolver is None: - reset_default_resolver() - return default_resolver - - -def reset_default_resolver(): - """Re-initialize default resolver. - - Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX - systems) will be re-read immediately. - """ - - global default_resolver - default_resolver = Resolver() - - -def query(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, - tcp=False, source=None, raise_on_no_answer=True, - source_port=0, lifetime=None): - """Query nameservers to find the answer to the question. - - This is a convenience function that uses the default resolver - object to make the query. - - See ``dns.resolver.Resolver.query`` for more information on the - parameters. - """ - - return get_default_resolver().query(qname, rdtype, rdclass, tcp, source, - raise_on_no_answer, source_port, - lifetime) - - -def zone_for_name(name, rdclass=dns.rdataclass.IN, tcp=False, resolver=None): - """Find the name of the zone which contains the specified name. - - *name*, an absolute ``dns.name.Name`` or ``text``, the query name. - - *rdclass*, an ``int``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - If ``None``, the default resolver is used. - - Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS - root. (This is only likely to happen if you're using non-default - root servers in your network and they are misconfigured.) - - Returns a ``dns.name.Name``. - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, dns.name.root) - if resolver is None: - resolver = get_default_resolver() - if not name.is_absolute(): - raise NotAbsolute(name) - while 1: - try: - answer = resolver.query(name, dns.rdatatype.SOA, rdclass, tcp) - if answer.rrset.name == name: - return name - # otherwise we were CNAMEd or DNAMEd and need to look higher - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): - pass - try: - name = name.parent() - except dns.name.NoParent: - raise NoRootSOA - -# -# Support for overriding the system resolver for all python code in the -# running process. -# - -_protocols_for_socktype = { - socket.SOCK_DGRAM: [socket.SOL_UDP], - socket.SOCK_STREAM: [socket.SOL_TCP], -} - -_resolver = None -_original_getaddrinfo = socket.getaddrinfo -_original_getnameinfo = socket.getnameinfo -_original_getfqdn = socket.getfqdn -_original_gethostbyname = socket.gethostbyname -_original_gethostbyname_ex = socket.gethostbyname_ex -_original_gethostbyaddr = socket.gethostbyaddr - - -def _getaddrinfo(host=None, service=None, family=socket.AF_UNSPEC, socktype=0, - proto=0, flags=0): - if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: - raise NotImplementedError - if host is None and service is None: - raise socket.gaierror(socket.EAI_NONAME) - v6addrs = [] - v4addrs = [] - canonical_name = None - try: - # Is host None or a V6 address literal? - if host is None: - canonical_name = 'localhost' - if flags & socket.AI_PASSIVE != 0: - v6addrs.append('::') - v4addrs.append('0.0.0.0') - else: - v6addrs.append('::1') - v4addrs.append('127.0.0.1') - else: - parts = host.split('%') - if len(parts) == 2: - ahost = parts[0] - else: - ahost = host - addr = dns.ipv6.inet_aton(ahost) - v6addrs.append(host) - canonical_name = host - except Exception: - try: - # Is it a V4 address literal? - addr = dns.ipv4.inet_aton(host) - v4addrs.append(host) - canonical_name = host - except Exception: - if flags & socket.AI_NUMERICHOST == 0: - try: - if family == socket.AF_INET6 or family == socket.AF_UNSPEC: - v6 = _resolver.query(host, dns.rdatatype.AAAA, - raise_on_no_answer=False) - # Note that setting host ensures we query the same name - # for A as we did for AAAA. - host = v6.qname - canonical_name = v6.canonical_name.to_text(True) - if v6.rrset is not None: - for rdata in v6.rrset: - v6addrs.append(rdata.address) - if family == socket.AF_INET or family == socket.AF_UNSPEC: - v4 = _resolver.query(host, dns.rdatatype.A, - raise_on_no_answer=False) - host = v4.qname - canonical_name = v4.canonical_name.to_text(True) - if v4.rrset is not None: - for rdata in v4.rrset: - v4addrs.append(rdata.address) - except dns.resolver.NXDOMAIN: - raise socket.gaierror(socket.EAI_NONAME) - except Exception: - raise socket.gaierror(socket.EAI_SYSTEM) - port = None - try: - # Is it a port literal? - if service is None: - port = 0 - else: - port = int(service) - except Exception: - if flags & socket.AI_NUMERICSERV == 0: - try: - port = socket.getservbyname(service) - except Exception: - pass - if port is None: - raise socket.gaierror(socket.EAI_NONAME) - tuples = [] - if socktype == 0: - socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] - else: - socktypes = [socktype] - if flags & socket.AI_CANONNAME != 0: - cname = canonical_name - else: - cname = '' - if family == socket.AF_INET6 or family == socket.AF_UNSPEC: - for addr in v6addrs: - for socktype in socktypes: - for proto in _protocols_for_socktype[socktype]: - tuples.append((socket.AF_INET6, socktype, proto, - cname, (addr, port, 0, 0))) - if family == socket.AF_INET or family == socket.AF_UNSPEC: - for addr in v4addrs: - for socktype in socktypes: - for proto in _protocols_for_socktype[socktype]: - tuples.append((socket.AF_INET, socktype, proto, - cname, (addr, port))) - if len(tuples) == 0: - raise socket.gaierror(socket.EAI_NONAME) - return tuples - - -def _getnameinfo(sockaddr, flags=0): - host = sockaddr[0] - port = sockaddr[1] - if len(sockaddr) == 4: - scope = sockaddr[3] - family = socket.AF_INET6 - else: - scope = None - family = socket.AF_INET - tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, - socket.SOL_TCP, 0) - if len(tuples) > 1: - raise socket.error('sockaddr resolved to multiple addresses') - addr = tuples[0][4][0] - if flags & socket.NI_DGRAM: - pname = 'udp' - else: - pname = 'tcp' - qname = dns.reversename.from_address(addr) - if flags & socket.NI_NUMERICHOST == 0: - try: - answer = _resolver.query(qname, 'PTR') - hostname = answer.rrset[0].target.to_text(True) - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): - if flags & socket.NI_NAMEREQD: - raise socket.gaierror(socket.EAI_NONAME) - hostname = addr - if scope is not None: - hostname += '%' + str(scope) - else: - hostname = addr - if scope is not None: - hostname += '%' + str(scope) - if flags & socket.NI_NUMERICSERV: - service = str(port) - else: - service = socket.getservbyport(port, pname) - return (hostname, service) - - -def _getfqdn(name=None): - if name is None: - name = socket.gethostname() - try: - return _getnameinfo(_getaddrinfo(name, 80)[0][4])[0] - except Exception: - return name - - -def _gethostbyname(name): - return _gethostbyname_ex(name)[2][0] - - -def _gethostbyname_ex(name): - aliases = [] - addresses = [] - tuples = _getaddrinfo(name, 0, socket.AF_INET, socket.SOCK_STREAM, - socket.SOL_TCP, socket.AI_CANONNAME) - canonical = tuples[0][3] - for item in tuples: - addresses.append(item[4][0]) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def _gethostbyaddr(ip): - try: - dns.ipv6.inet_aton(ip) - sockaddr = (ip, 80, 0, 0) - family = socket.AF_INET6 - except Exception: - sockaddr = (ip, 80) - family = socket.AF_INET - (name, port) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) - aliases = [] - addresses = [] - tuples = _getaddrinfo(name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, - socket.AI_CANONNAME) - canonical = tuples[0][3] - for item in tuples: - addresses.append(item[4][0]) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def override_system_resolver(resolver=None): - """Override the system resolver routines in the socket module with - versions which use dnspython's resolver. - - This can be useful in testing situations where you want to control - the resolution behavior of python code without having to change - the system's resolver settings (e.g. /etc/resolv.conf). - - The resolver to use may be specified; if it's not, the default - resolver will be used. - - resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - """ - - if resolver is None: - resolver = get_default_resolver() - global _resolver - _resolver = resolver - socket.getaddrinfo = _getaddrinfo - socket.getnameinfo = _getnameinfo - socket.getfqdn = _getfqdn - socket.gethostbyname = _gethostbyname - socket.gethostbyname_ex = _gethostbyname_ex - socket.gethostbyaddr = _gethostbyaddr - - -def restore_system_resolver(): - """Undo the effects of prior override_system_resolver().""" - - global _resolver - _resolver = None - socket.getaddrinfo = _original_getaddrinfo - socket.getnameinfo = _original_getnameinfo - socket.getfqdn = _original_getfqdn - socket.gethostbyname = _original_gethostbyname - socket.gethostbyname_ex = _original_gethostbyname_ex - socket.gethostbyaddr = _original_gethostbyaddr diff --git a/venv/lib/python3.6/site-packages/dns/reversename.py b/venv/lib/python3.6/site-packages/dns/reversename.py deleted file mode 100644 index 8f095fa..0000000 --- a/venv/lib/python3.6/site-packages/dns/reversename.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Reverse Map Names.""" - -import binascii - -import dns.name -import dns.ipv6 -import dns.ipv4 - -from dns._compat import PY3 - -ipv4_reverse_domain = dns.name.from_text('in-addr.arpa.') -ipv6_reverse_domain = dns.name.from_text('ip6.arpa.') - - -def from_address(text): - """Convert an IPv4 or IPv6 address in textual form into a Name object whose - value is the reverse-map domain name of the address. - - *text*, a ``text``, is an IPv4 or IPv6 address in textual form - (e.g. '127.0.0.1', '::1') - - Raises ``dns.exception.SyntaxError`` if the address is badly formed. - - Returns a ``dns.name.Name``. - """ - - try: - v6 = dns.ipv6.inet_aton(text) - if dns.ipv6.is_mapped(v6): - if PY3: - parts = ['%d' % byte for byte in v6[12:]] - else: - parts = ['%d' % ord(byte) for byte in v6[12:]] - origin = ipv4_reverse_domain - else: - parts = [x for x in str(binascii.hexlify(v6).decode())] - origin = ipv6_reverse_domain - except Exception: - parts = ['%d' % - byte for byte in bytearray(dns.ipv4.inet_aton(text))] - origin = ipv4_reverse_domain - parts.reverse() - return dns.name.from_text('.'.join(parts), origin=origin) - - -def to_address(name): - """Convert a reverse map domain name into textual address form. - - *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name - form. - - Raises ``dns.exception.SyntaxError`` if the name does not have a - reverse-map form. - - Returns a ``text``. - """ - - if name.is_subdomain(ipv4_reverse_domain): - name = name.relativize(ipv4_reverse_domain) - labels = list(name.labels) - labels.reverse() - text = b'.'.join(labels) - # run through inet_aton() to check syntax and make pretty. - return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) - elif name.is_subdomain(ipv6_reverse_domain): - name = name.relativize(ipv6_reverse_domain) - labels = list(name.labels) - labels.reverse() - parts = [] - i = 0 - l = len(labels) - while i < l: - parts.append(b''.join(labels[i:i + 4])) - i += 4 - text = b':'.join(parts) - # run through inet_aton() to check syntax and make pretty. - return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) - else: - raise dns.exception.SyntaxError('unknown reverse-map address family') diff --git a/venv/lib/python3.6/site-packages/dns/rrset.py b/venv/lib/python3.6/site-packages/dns/rrset.py deleted file mode 100644 index a53ec32..0000000 --- a/venv/lib/python3.6/site-packages/dns/rrset.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS RRsets (an RRset is a named rdataset)""" - - -import dns.name -import dns.rdataset -import dns.rdataclass -import dns.renderer -from ._compat import string_types - - -class RRset(dns.rdataset.Rdataset): - - """A DNS RRset (named rdataset). - - RRset inherits from Rdataset, and RRsets can be treated as - Rdatasets in most cases. There are, however, a few notable - exceptions. RRsets have different to_wire() and to_text() method - arguments, reflecting the fact that RRsets always have an owner - name. - """ - - __slots__ = ['name', 'deleting'] - - def __init__(self, name, rdclass, rdtype, covers=dns.rdatatype.NONE, - deleting=None): - """Create a new RRset.""" - - super(RRset, self).__init__(rdclass, rdtype, covers) - self.name = name - self.deleting = deleting - - def _clone(self): - obj = super(RRset, self)._clone() - obj.name = self.name - obj.deleting = self.deleting - return obj - - def __repr__(self): - if self.covers == 0: - ctext = '' - else: - ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' - if self.deleting is not None: - dtext = ' delete=' + dns.rdataclass.to_text(self.deleting) - else: - dtext = '' - return '' - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if not isinstance(other, RRset): - return False - if self.name != other.name: - return False - return super(RRset, self).__eq__(other) - - def match(self, name, rdclass, rdtype, covers, deleting=None): - """Returns ``True`` if this rrset matches the specified class, type, - covers, and deletion state. - """ - - if not super(RRset, self).match(rdclass, rdtype, covers): - return False - if self.name != name or self.deleting != deleting: - return False - return True - - def to_text(self, origin=None, relativize=True, **kw): - """Convert the RRset into DNS master file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - """ - - return super(RRset, self).to_text(self.name, origin, relativize, - self.deleting, **kw) - - def to_wire(self, file, compress=None, origin=None, **kw): - """Convert the RRset to wire format. - - All keyword arguments are passed to ``dns.rdataset.to_wire()``; see - that function for details. - - Returns an ``int``, the number of records emitted. - """ - - return super(RRset, self).to_wire(self.name, file, compress, origin, - self.deleting, **kw) - - def to_rdataset(self): - """Convert an RRset into an Rdataset. - - Returns a ``dns.rdataset.Rdataset``. - """ - return dns.rdataset.from_rdata_list(self.ttl, list(self)) - - -def from_text_list(name, ttl, rdclass, rdtype, text_rdatas, - idna_codec=None): - """Create an RRset with the specified name, TTL, class, and type, and with - the specified list of rdatas in text format. - - Returns a ``dns.rrset.RRset`` object. - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - if isinstance(rdclass, string_types): - rdclass = dns.rdataclass.from_text(rdclass) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - r = RRset(name, rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text(r.rdclass, r.rdtype, t) - r.add(rd) - return r - - -def from_text(name, ttl, rdclass, rdtype, *text_rdatas): - """Create an RRset with the specified name, TTL, class, and type and with - the specified rdatas in text format. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_text_list(name, ttl, rdclass, rdtype, text_rdatas) - - -def from_rdata_list(name, ttl, rdatas, idna_codec=None): - """Create an RRset with the specified name and TTL, and with - the specified list of rdata objects. - - Returns a ``dns.rrset.RRset`` object. - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = RRset(name, rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - return r - - -def from_rdata(name, ttl, *rdatas): - """Create an RRset with the specified name and TTL, and with - the specified rdata objects. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_rdata_list(name, ttl, rdatas) diff --git a/venv/lib/python3.6/site-packages/dns/set.py b/venv/lib/python3.6/site-packages/dns/set.py deleted file mode 100644 index 81329bf..0000000 --- a/venv/lib/python3.6/site-packages/dns/set.py +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -class Set(object): - - """A simple set class. - - This class was originally used to deal with sets being missing in - ancient versions of python, but dnspython will continue to use it - as these sets are based on lists and are thus indexable, and this - ability is widely used in dnspython applications. - """ - - __slots__ = ['items'] - - def __init__(self, items=None): - """Initialize the set. - - *items*, an iterable or ``None``, the initial set of items. - """ - - self.items = [] - if items is not None: - for item in items: - self.add(item) - - def __repr__(self): - return "dns.simpleset.Set(%s)" % repr(self.items) - - def add(self, item): - """Add an item to the set. - """ - - if item not in self.items: - self.items.append(item) - - def remove(self, item): - """Remove an item from the set. - """ - - self.items.remove(item) - - def discard(self, item): - """Remove an item from the set if present. - """ - - try: - self.items.remove(item) - except ValueError: - pass - - def _clone(self): - """Make a (shallow) copy of the set. - - There is a 'clone protocol' that subclasses of this class - should use. To make a copy, first call your super's _clone() - method, and use the object returned as the new instance. Then - make shallow copies of the attributes defined in the subclass. - - This protocol allows us to write the set algorithms that - return new instances (e.g. union) once, and keep using them in - subclasses. - """ - - cls = self.__class__ - obj = cls.__new__(cls) - obj.items = list(self.items) - return obj - - def __copy__(self): - """Make a (shallow) copy of the set. - """ - - return self._clone() - - def copy(self): - """Make a (shallow) copy of the set. - """ - - return self._clone() - - def union_update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - """ - - if not isinstance(other, Set): - raise ValueError('other must be a Set instance') - if self is other: - return - for item in other.items: - self.add(item) - - def intersection_update(self, other): - """Update the set, removing any elements from other which are not - in both sets. - """ - - if not isinstance(other, Set): - raise ValueError('other must be a Set instance') - if self is other: - return - # we make a copy of the list so that we can remove items from - # the list without breaking the iterator. - for item in list(self.items): - if item not in other.items: - self.items.remove(item) - - def difference_update(self, other): - """Update the set, removing any elements from other which are in - the set. - """ - - if not isinstance(other, Set): - raise ValueError('other must be a Set instance') - if self is other: - self.items = [] - else: - for item in other.items: - self.discard(item) - - def union(self, other): - """Return a new set which is the union of ``self`` and ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.union_update(other) - return obj - - def intersection(self, other): - """Return a new set which is the intersection of ``self`` and - ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.intersection_update(other) - return obj - - def difference(self, other): - """Return a new set which ``self`` - ``other``, i.e. the items - in ``self`` which are not also in ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.difference_update(other) - return obj - - def __or__(self, other): - return self.union(other) - - def __and__(self, other): - return self.intersection(other) - - def __add__(self, other): - return self.union(other) - - def __sub__(self, other): - return self.difference(other) - - def __ior__(self, other): - self.union_update(other) - return self - - def __iand__(self, other): - self.intersection_update(other) - return self - - def __iadd__(self, other): - self.union_update(other) - return self - - def __isub__(self, other): - self.difference_update(other) - return self - - def update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - - *other*, the collection of items with which to update the set, which - may be any iterable type. - """ - - for item in other: - self.add(item) - - def clear(self): - """Make the set empty.""" - self.items = [] - - def __eq__(self, other): - # Yes, this is inefficient but the sets we're dealing with are - # usually quite small, so it shouldn't hurt too much. - for item in self.items: - if item not in other.items: - return False - for item in other.items: - if item not in self.items: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.items) - - def __iter__(self): - return iter(self.items) - - def __getitem__(self, i): - return self.items[i] - - def __delitem__(self, i): - del self.items[i] - - def issubset(self, other): - """Is this set a subset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError('other must be a Set instance') - for item in self.items: - if item not in other.items: - return False - return True - - def issuperset(self, other): - """Is this set a superset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError('other must be a Set instance') - for item in other.items: - if item not in self.items: - return False - return True diff --git a/venv/lib/python3.6/site-packages/dns/tokenizer.py b/venv/lib/python3.6/site-packages/dns/tokenizer.py deleted file mode 100644 index 880b71c..0000000 --- a/venv/lib/python3.6/site-packages/dns/tokenizer.py +++ /dev/null @@ -1,571 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Tokenize DNS master file format""" - -from io import StringIO -import sys - -import dns.exception -import dns.name -import dns.ttl -from ._compat import long, text_type, binary_type - -_DELIMITERS = { - ' ': True, - '\t': True, - '\n': True, - ';': True, - '(': True, - ')': True, - '"': True} - -_QUOTING_DELIMITERS = {'"': True} - -EOF = 0 -EOL = 1 -WHITESPACE = 2 -IDENTIFIER = 3 -QUOTED_STRING = 4 -COMMENT = 5 -DELIMITER = 6 - - -class UngetBufferFull(dns.exception.DNSException): - """An attempt was made to unget a token when the unget buffer was full.""" - - -class Token(object): - """A DNS master file format token. - - ttype: The token type - value: The token value - has_escape: Does the token value contain escapes? - """ - - def __init__(self, ttype, value='', has_escape=False): - """Initialize a token instance.""" - - self.ttype = ttype - self.value = value - self.has_escape = has_escape - - def is_eof(self): - return self.ttype == EOF - - def is_eol(self): - return self.ttype == EOL - - def is_whitespace(self): - return self.ttype == WHITESPACE - - def is_identifier(self): - return self.ttype == IDENTIFIER - - def is_quoted_string(self): - return self.ttype == QUOTED_STRING - - def is_comment(self): - return self.ttype == COMMENT - - def is_delimiter(self): - return self.ttype == DELIMITER - - def is_eol_or_eof(self): - return self.ttype == EOL or self.ttype == EOF - - def __eq__(self, other): - if not isinstance(other, Token): - return False - return (self.ttype == other.ttype and - self.value == other.value) - - def __ne__(self, other): - if not isinstance(other, Token): - return True - return (self.ttype != other.ttype or - self.value != other.value) - - def __str__(self): - return '%d "%s"' % (self.ttype, self.value) - - def unescape(self): - if not self.has_escape: - return self - unescaped = '' - l = len(self.value) - i = 0 - while i < l: - c = self.value[i] - i += 1 - if c == '\\': - if i >= l: - raise dns.exception.UnexpectedEnd - c = self.value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = self.value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = self.value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) - unescaped += c - return Token(self.ttype, unescaped) - - # compatibility for old-style tuple tokens - - def __len__(self): - return 2 - - def __iter__(self): - return iter((self.ttype, self.value)) - - def __getitem__(self, i): - if i == 0: - return self.ttype - elif i == 1: - return self.value - else: - raise IndexError - - -class Tokenizer(object): - """A DNS master file format tokenizer. - - A token object is basically a (type, value) tuple. The valid - types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, - COMMENT, and DELIMITER. - - file: The file to tokenize - - ungotten_char: The most recently ungotten character, or None. - - ungotten_token: The most recently ungotten token, or None. - - multiline: The current multiline level. This value is increased - by one every time a '(' delimiter is read, and decreased by one every time - a ')' delimiter is read. - - quoting: This variable is true if the tokenizer is currently - reading a quoted string. - - eof: This variable is true if the tokenizer has encountered EOF. - - delimiters: The current delimiter dictionary. - - line_number: The current line number - - filename: A filename that will be returned by the where() method. - """ - - def __init__(self, f=sys.stdin, filename=None): - """Initialize a tokenizer instance. - - f: The file to tokenize. The default is sys.stdin. - This parameter may also be a string, in which case the tokenizer - will take its input from the contents of the string. - - filename: the name of the filename that the where() method - will return. - """ - - if isinstance(f, text_type): - f = StringIO(f) - if filename is None: - filename = '' - elif isinstance(f, binary_type): - f = StringIO(f.decode()) - if filename is None: - filename = '' - else: - if filename is None: - if f is sys.stdin: - filename = '' - else: - filename = '' - self.file = f - self.ungotten_char = None - self.ungotten_token = None - self.multiline = 0 - self.quoting = False - self.eof = False - self.delimiters = _DELIMITERS - self.line_number = 1 - self.filename = filename - - def _get_char(self): - """Read a character from input. - """ - - if self.ungotten_char is None: - if self.eof: - c = '' - else: - c = self.file.read(1) - if c == '': - self.eof = True - elif c == '\n': - self.line_number += 1 - else: - c = self.ungotten_char - self.ungotten_char = None - return c - - def where(self): - """Return the current location in the input. - - Returns a (string, int) tuple. The first item is the filename of - the input, the second is the current line number. - """ - - return (self.filename, self.line_number) - - def _unget_char(self, c): - """Unget a character. - - The unget buffer for characters is only one character large; it is - an error to try to unget a character when the unget buffer is not - empty. - - c: the character to unget - raises UngetBufferFull: there is already an ungotten char - """ - - if self.ungotten_char is not None: - raise UngetBufferFull - self.ungotten_char = c - - def skip_whitespace(self): - """Consume input until a non-whitespace character is encountered. - - The non-whitespace character is then ungotten, and the number of - whitespace characters consumed is returned. - - If the tokenizer is in multiline mode, then newlines are whitespace. - - Returns the number of characters skipped. - """ - - skipped = 0 - while True: - c = self._get_char() - if c != ' ' and c != '\t': - if (c != '\n') or not self.multiline: - self._unget_char(c) - return skipped - skipped += 1 - - def get(self, want_leading=False, want_comment=False): - """Get the next token. - - want_leading: If True, return a WHITESPACE token if the - first character read is whitespace. The default is False. - - want_comment: If True, return a COMMENT token if the - first token read is a comment. The default is False. - - Raises dns.exception.UnexpectedEnd: input ended prematurely - - Raises dns.exception.SyntaxError: input was badly formed - - Returns a Token. - """ - - if self.ungotten_token is not None: - token = self.ungotten_token - self.ungotten_token = None - if token.is_whitespace(): - if want_leading: - return token - elif token.is_comment(): - if want_comment: - return token - else: - return token - skipped = self.skip_whitespace() - if want_leading and skipped > 0: - return Token(WHITESPACE, ' ') - token = '' - ttype = IDENTIFIER - has_escape = False - while True: - c = self._get_char() - if c == '' or c in self.delimiters: - if c == '' and self.quoting: - raise dns.exception.UnexpectedEnd - if token == '' and ttype != QUOTED_STRING: - if c == '(': - self.multiline += 1 - self.skip_whitespace() - continue - elif c == ')': - if self.multiline <= 0: - raise dns.exception.SyntaxError - self.multiline -= 1 - self.skip_whitespace() - continue - elif c == '"': - if not self.quoting: - self.quoting = True - self.delimiters = _QUOTING_DELIMITERS - ttype = QUOTED_STRING - continue - else: - self.quoting = False - self.delimiters = _DELIMITERS - self.skip_whitespace() - continue - elif c == '\n': - return Token(EOL, '\n') - elif c == ';': - while 1: - c = self._get_char() - if c == '\n' or c == '': - break - token += c - if want_comment: - self._unget_char(c) - return Token(COMMENT, token) - elif c == '': - if self.multiline: - raise dns.exception.SyntaxError( - 'unbalanced parentheses') - return Token(EOF) - elif self.multiline: - self.skip_whitespace() - token = '' - continue - else: - return Token(EOL, '\n') - else: - # This code exists in case we ever want a - # delimiter to be returned. It never produces - # a token currently. - token = c - ttype = DELIMITER - else: - self._unget_char(c) - break - elif self.quoting: - if c == '\\': - c = self._get_char() - if c == '': - raise dns.exception.UnexpectedEnd - if c.isdigit(): - c2 = self._get_char() - if c2 == '': - raise dns.exception.UnexpectedEnd - c3 = self._get_char() - if c == '': - raise dns.exception.UnexpectedEnd - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) - elif c == '\n': - raise dns.exception.SyntaxError('newline in quoted string') - elif c == '\\': - # - # It's an escape. Put it and the next character into - # the token; it will be checked later for goodness. - # - token += c - has_escape = True - c = self._get_char() - if c == '' or c == '\n': - raise dns.exception.UnexpectedEnd - token += c - if token == '' and ttype != QUOTED_STRING: - if self.multiline: - raise dns.exception.SyntaxError('unbalanced parentheses') - ttype = EOF - return Token(ttype, token, has_escape) - - def unget(self, token): - """Unget a token. - - The unget buffer for tokens is only one token large; it is - an error to try to unget a token when the unget buffer is not - empty. - - token: the token to unget - - Raises UngetBufferFull: there is already an ungotten token - """ - - if self.ungotten_token is not None: - raise UngetBufferFull - self.ungotten_token = token - - def next(self): - """Return the next item in an iteration. - - Returns a Token. - """ - - token = self.get() - if token.is_eof(): - raise StopIteration - return token - - __next__ = next - - def __iter__(self): - return self - - # Helpers - - def get_int(self, base=10): - """Read the next token and interpret it as an integer. - - Raises dns.exception.SyntaxError if not an integer. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError('expecting an identifier') - if not token.value.isdigit(): - raise dns.exception.SyntaxError('expecting an integer') - return int(token.value, base) - - def get_uint8(self): - """Read the next token and interpret it as an 8-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int() - if value < 0 or value > 255: - raise dns.exception.SyntaxError( - '%d is not an unsigned 8-bit integer' % value) - return value - - def get_uint16(self, base=10): - """Read the next token and interpret it as a 16-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 65535: - if base == 8: - raise dns.exception.SyntaxError( - '%o is not an octal unsigned 16-bit integer' % value) - else: - raise dns.exception.SyntaxError( - '%d is not an unsigned 16-bit integer' % value) - return value - - def get_uint32(self): - """Read the next token and interpret it as a 32-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError('expecting an identifier') - if not token.value.isdigit(): - raise dns.exception.SyntaxError('expecting an integer') - value = long(token.value) - if value < 0 or value > long(4294967296): - raise dns.exception.SyntaxError( - '%d is not an unsigned 32-bit integer' % value) - return value - - def get_string(self, origin=None): - """Read the next token and interpret it as a string. - - Raises dns.exception.SyntaxError if not a string. - - Returns a string. - """ - - token = self.get().unescape() - if not (token.is_identifier() or token.is_quoted_string()): - raise dns.exception.SyntaxError('expecting a string') - return token.value - - def get_identifier(self, origin=None): - """Read the next token, which should be an identifier. - - Raises dns.exception.SyntaxError if not an identifier. - - Returns a string. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError('expecting an identifier') - return token.value - - def get_name(self, origin=None): - """Read the next token and interpret it as a DNS name. - - Raises dns.exception.SyntaxError if not a name. - - Returns a dns.name.Name. - """ - - token = self.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError('expecting an identifier') - return dns.name.from_text(token.value, origin) - - def get_eol(self): - """Read the next token and raise an exception if it isn't EOL or - EOF. - - Returns a string. - """ - - token = self.get() - if not token.is_eol_or_eof(): - raise dns.exception.SyntaxError( - 'expected EOL or EOF, got %d "%s"' % (token.ttype, - token.value)) - return token.value - - def get_ttl(self): - """Read the next token and interpret it as a DNS TTL. - - Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an - identifier or badly formed. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError('expecting an identifier') - return dns.ttl.from_text(token.value) diff --git a/venv/lib/python3.6/site-packages/dns/tsig.py b/venv/lib/python3.6/site-packages/dns/tsig.py deleted file mode 100644 index 3daa387..0000000 --- a/venv/lib/python3.6/site-packages/dns/tsig.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TSIG support.""" - -import hashlib -import hmac -import struct - -import dns.exception -import dns.rdataclass -import dns.name -from ._compat import long, string_types, text_type - -class BadTime(dns.exception.DNSException): - - """The current time is not within the TSIG's validity time.""" - - -class BadSignature(dns.exception.DNSException): - - """The TSIG signature fails to verify.""" - - -class PeerError(dns.exception.DNSException): - - """Base class for all TSIG errors generated by the remote peer""" - - -class PeerBadKey(PeerError): - - """The peer didn't know the key we used""" - - -class PeerBadSignature(PeerError): - - """The peer didn't like the signature we sent""" - - -class PeerBadTime(PeerError): - - """The peer didn't like the time we sent""" - - -class PeerBadTruncation(PeerError): - - """The peer didn't like amount of truncation in the TSIG we sent""" - -# TSIG Algorithms - -HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") -HMAC_SHA1 = dns.name.from_text("hmac-sha1") -HMAC_SHA224 = dns.name.from_text("hmac-sha224") -HMAC_SHA256 = dns.name.from_text("hmac-sha256") -HMAC_SHA384 = dns.name.from_text("hmac-sha384") -HMAC_SHA512 = dns.name.from_text("hmac-sha512") - -_hashes = { - HMAC_SHA224: hashlib.sha224, - HMAC_SHA256: hashlib.sha256, - HMAC_SHA384: hashlib.sha384, - HMAC_SHA512: hashlib.sha512, - HMAC_SHA1: hashlib.sha1, - HMAC_MD5: hashlib.md5, -} - -default_algorithm = HMAC_MD5 - -BADSIG = 16 -BADKEY = 17 -BADTIME = 18 -BADTRUNC = 22 - - -def sign(wire, keyname, secret, time, fudge, original_id, error, - other_data, request_mac, ctx=None, multi=False, first=True, - algorithm=default_algorithm): - """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata - for the input parameters, the HMAC MAC calculated by applying the - TSIG signature algorithm, and the TSIG digest context. - @rtype: (string, string, hmac.HMAC object) - @raises ValueError: I{other_data} is too long - @raises NotImplementedError: I{algorithm} is not supported - """ - - if isinstance(other_data, text_type): - other_data = other_data.encode() - (algorithm_name, digestmod) = get_algorithm(algorithm) - if first: - ctx = hmac.new(secret, digestmod=digestmod) - ml = len(request_mac) - if ml > 0: - ctx.update(struct.pack('!H', ml)) - ctx.update(request_mac) - id = struct.pack('!H', original_id) - ctx.update(id) - ctx.update(wire[2:]) - if first: - ctx.update(keyname.to_digestable()) - ctx.update(struct.pack('!H', dns.rdataclass.ANY)) - ctx.update(struct.pack('!I', 0)) - long_time = time + long(0) - upper_time = (long_time >> 32) & long(0xffff) - lower_time = long_time & long(0xffffffff) - time_mac = struct.pack('!HIH', upper_time, lower_time, fudge) - pre_mac = algorithm_name + time_mac - ol = len(other_data) - if ol > 65535: - raise ValueError('TSIG Other Data is > 65535 bytes') - post_mac = struct.pack('!HH', error, ol) + other_data - if first: - ctx.update(pre_mac) - ctx.update(post_mac) - else: - ctx.update(time_mac) - mac = ctx.digest() - mpack = struct.pack('!H', len(mac)) - tsig_rdata = pre_mac + mpack + mac + id + post_mac - if multi: - ctx = hmac.new(secret, digestmod=digestmod) - ml = len(mac) - ctx.update(struct.pack('!H', ml)) - ctx.update(mac) - else: - ctx = None - return (tsig_rdata, mac, ctx) - - -def hmac_md5(wire, keyname, secret, time, fudge, original_id, error, - other_data, request_mac, ctx=None, multi=False, first=True, - algorithm=default_algorithm): - return sign(wire, keyname, secret, time, fudge, original_id, error, - other_data, request_mac, ctx, multi, first, algorithm) - - -def validate(wire, keyname, secret, now, request_mac, tsig_start, tsig_rdata, - tsig_rdlen, ctx=None, multi=False, first=True): - """Validate the specified TSIG rdata against the other input parameters. - - @raises FormError: The TSIG is badly formed. - @raises BadTime: There is too much time skew between the client and the - server. - @raises BadSignature: The TSIG signature did not validate - @rtype: hmac.HMAC object""" - - (adcount,) = struct.unpack("!H", wire[10:12]) - if adcount == 0: - raise dns.exception.FormError - adcount -= 1 - new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] - current = tsig_rdata - (aname, used) = dns.name.from_wire(wire, current) - current = current + used - (upper_time, lower_time, fudge, mac_size) = \ - struct.unpack("!HIHH", wire[current:current + 10]) - time = ((upper_time + long(0)) << 32) + (lower_time + long(0)) - current += 10 - mac = wire[current:current + mac_size] - current += mac_size - (original_id, error, other_size) = \ - struct.unpack("!HHH", wire[current:current + 6]) - current += 6 - other_data = wire[current:current + other_size] - current += other_size - if current != tsig_rdata + tsig_rdlen: - raise dns.exception.FormError - if error != 0: - if error == BADSIG: - raise PeerBadSignature - elif error == BADKEY: - raise PeerBadKey - elif error == BADTIME: - raise PeerBadTime - elif error == BADTRUNC: - raise PeerBadTruncation - else: - raise PeerError('unknown TSIG error code %d' % error) - time_low = time - fudge - time_high = time + fudge - if now < time_low or now > time_high: - raise BadTime - (junk, our_mac, ctx) = sign(new_wire, keyname, secret, time, fudge, - original_id, error, other_data, - request_mac, ctx, multi, first, aname) - if our_mac != mac: - raise BadSignature - return ctx - - -def get_algorithm(algorithm): - """Returns the wire format string and the hash module to use for the - specified TSIG algorithm - - @rtype: (string, hash constructor) - @raises NotImplementedError: I{algorithm} is not supported - """ - - if isinstance(algorithm, string_types): - algorithm = dns.name.from_text(algorithm) - - try: - return (algorithm.to_digestable(), _hashes[algorithm]) - except KeyError: - raise NotImplementedError("TSIG algorithm " + str(algorithm) + - " is not supported") - - -def get_algorithm_and_mac(wire, tsig_rdata, tsig_rdlen): - """Return the tsig algorithm for the specified tsig_rdata - @raises FormError: The TSIG is badly formed. - """ - current = tsig_rdata - (aname, used) = dns.name.from_wire(wire, current) - current = current + used - (upper_time, lower_time, fudge, mac_size) = \ - struct.unpack("!HIHH", wire[current:current + 10]) - current += 10 - mac = wire[current:current + mac_size] - current += mac_size - if current > tsig_rdata + tsig_rdlen: - raise dns.exception.FormError - return (aname, mac) diff --git a/venv/lib/python3.6/site-packages/dns/tsigkeyring.py b/venv/lib/python3.6/site-packages/dns/tsigkeyring.py deleted file mode 100644 index 5e5fe1c..0000000 --- a/venv/lib/python3.6/site-packages/dns/tsigkeyring.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""A place to store TSIG keys.""" - -from dns._compat import maybe_decode, maybe_encode - -import base64 - -import dns.name - - -def from_text(textring): - """Convert a dictionary containing (textual DNS name, base64 secret) pairs - into a binary keyring which has (dns.name.Name, binary secret) pairs. - @rtype: dict""" - - keyring = {} - for keytext in textring: - keyname = dns.name.from_text(keytext) - secret = base64.decodestring(maybe_encode(textring[keytext])) - keyring[keyname] = secret - return keyring - - -def to_text(keyring): - """Convert a dictionary containing (dns.name.Name, binary secret) pairs - into a text keyring which has (textual DNS name, base64 secret) pairs. - @rtype: dict""" - - textring = {} - for keyname in keyring: - keytext = maybe_decode(keyname.to_text()) - secret = maybe_decode(base64.encodestring(keyring[keyname])) - textring[keytext] = secret - return textring diff --git a/venv/lib/python3.6/site-packages/dns/ttl.py b/venv/lib/python3.6/site-packages/dns/ttl.py deleted file mode 100644 index 4be16be..0000000 --- a/venv/lib/python3.6/site-packages/dns/ttl.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TTL conversion.""" - -import dns.exception -from ._compat import long - - -class BadTTL(dns.exception.SyntaxError): - """DNS TTL value is not well-formed.""" - - -def from_text(text): - """Convert the text form of a TTL to an integer. - - The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. - - *text*, a ``text``, the textual TTL. - - Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. - - Returns an ``int``. - """ - - if text.isdigit(): - total = long(text) - else: - if not text[0].isdigit(): - raise BadTTL - total = long(0) - current = long(0) - for c in text: - if c.isdigit(): - current *= 10 - current += long(c) - else: - c = c.lower() - if c == 'w': - total += current * long(604800) - elif c == 'd': - total += current * long(86400) - elif c == 'h': - total += current * long(3600) - elif c == 'm': - total += current * long(60) - elif c == 's': - total += current - else: - raise BadTTL("unknown unit '%s'" % c) - current = 0 - if not current == 0: - raise BadTTL("trailing integer") - if total < long(0) or total > long(2147483647): - raise BadTTL("TTL should be between 0 and 2^31 - 1 (inclusive)") - return total diff --git a/venv/lib/python3.6/site-packages/dns/update.py b/venv/lib/python3.6/site-packages/dns/update.py deleted file mode 100644 index 96a00d5..0000000 --- a/venv/lib/python3.6/site-packages/dns/update.py +++ /dev/null @@ -1,279 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Dynamic Update Support""" - - -import dns.message -import dns.name -import dns.opcode -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.tsig -from ._compat import string_types - - -class Update(dns.message.Message): - - def __init__(self, zone, rdclass=dns.rdataclass.IN, keyring=None, - keyname=None, keyalgorithm=dns.tsig.default_algorithm): - """Initialize a new DNS Update object. - - See the documentation of the Message class for a complete - description of the keyring dictionary. - - *zone*, a ``dns.name.Name`` or ``text``, the zone which is being - updated. - - *rdclass*, an ``int`` or ``text``, the class of the zone. - - *keyring*, a ``dict``, the TSIG keyring to use. If a - *keyring* is specified but a *keyname* is not, then the key - used will be the first key in the *keyring*. Note that the - order of keys in a dictionary is not defined, so applications - should supply a keyname when a keyring is used, unless they - know the keyring contains only one key. - - *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key - to use; defaults to ``None``. The key must be defined in the keyring. - - *keyalgorithm*, a ``dns.name.Name``, the TSIG algorithm to use. - """ - super(Update, self).__init__() - self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) - if isinstance(zone, string_types): - zone = dns.name.from_text(zone) - self.origin = zone - if isinstance(rdclass, string_types): - rdclass = dns.rdataclass.from_text(rdclass) - self.zone_rdclass = rdclass - self.find_rrset(self.question, self.origin, rdclass, dns.rdatatype.SOA, - create=True, force_unique=True) - if keyring is not None: - self.use_tsig(keyring, keyname, algorithm=keyalgorithm) - - def _add_rr(self, name, ttl, rd, deleting=None, section=None): - """Add a single RR to the update section.""" - - if section is None: - section = self.authority - covers = rd.covers() - rrset = self.find_rrset(section, name, self.zone_rdclass, rd.rdtype, - covers, deleting, True, True) - rrset.add(rd, ttl) - - def _add(self, replace, section, name, *args): - """Add records. - - *replace* is the replacement mode. If ``False``, - RRs are added to an existing RRset; if ``True``, the RRset - is replaced with the specified contents. The second - argument is the section to add to. The third argument - is always a name. The other arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, None) - if isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - if replace: - self.delete(name, rds.rdtype) - for rd in rds: - self._add_rr(name, rds.ttl, rd, section=section) - else: - args = list(args) - ttl = int(args.pop(0)) - if isinstance(args[0], dns.rdata.Rdata): - if replace: - self.delete(name, args[0].rdtype) - for rd in args: - self._add_rr(name, ttl, rd, section=section) - else: - rdtype = args.pop(0) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if replace: - self.delete(name, rdtype) - for s in args: - rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, - self.origin) - self._add_rr(name, ttl, rd, section=section) - - def add(self, name, *args): - """Add records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - self._add(False, self.authority, name, *args) - - def delete(self, name, *args): - """Delete records. - - The first argument is always a name. The other - arguments can be: - - - *empty* - - - rdataset... - - - rdata... - - - rdtype, [string...] - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset(self.authority, name, dns.rdataclass.ANY, - dns.rdatatype.ANY, dns.rdatatype.NONE, - dns.rdatatype.ANY, True, True) - elif isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - for rd in rds: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - args = list(args) - if isinstance(args[0], dns.rdata.Rdata): - for rd in args: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - rdtype = args.pop(0) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if len(args) == 0: - self.find_rrset(self.authority, name, - self.zone_rdclass, rdtype, - dns.rdatatype.NONE, - dns.rdataclass.ANY, - True, True) - else: - for s in args: - rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, - self.origin) - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - - def replace(self, name, *args): - """Replace records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - - Note that if you want to replace the entire node, you should do - a delete of the name followed by one or more calls to add. - """ - - self._add(True, self.authority, name, *args) - - def present(self, name, *args): - """Require that an owner name (and optionally an rdata type, - or specific rdataset) exists as a prerequisite to the - execution of the update. - - The first argument is always a name. - The other arguments can be: - - - rdataset... - - - rdata... - - - rdtype, string... - """ - - if isinstance(name, string_types): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset(self.answer, name, - dns.rdataclass.ANY, dns.rdatatype.ANY, - dns.rdatatype.NONE, None, - True, True) - elif isinstance(args[0], dns.rdataset.Rdataset) or \ - isinstance(args[0], dns.rdata.Rdata) or \ - len(args) > 1: - if not isinstance(args[0], dns.rdataset.Rdataset): - # Add a 0 TTL - args = list(args) - args.insert(0, 0) - self._add(False, self.answer, name, *args) - else: - rdtype = args[0] - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - self.find_rrset(self.answer, name, - dns.rdataclass.ANY, rdtype, - dns.rdatatype.NONE, None, - True, True) - - def absent(self, name, rdtype=None): - """Require that an owner name (and optionally an rdata type) does - not exist as a prerequisite to the execution of the update.""" - - if isinstance(name, string_types): - name = dns.name.from_text(name, None) - if rdtype is None: - self.find_rrset(self.answer, name, - dns.rdataclass.NONE, dns.rdatatype.ANY, - dns.rdatatype.NONE, None, - True, True) - else: - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - self.find_rrset(self.answer, name, - dns.rdataclass.NONE, rdtype, - dns.rdatatype.NONE, None, - True, True) - - def to_wire(self, origin=None, max_size=65535): - """Return a string containing the update in DNS compressed wire - format. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to be - appended to any relative names. If *origin* is ``None``, then - the origin of the ``dns.update.Update`` message object is used - (i.e. the *zone* parameter passed when the Update object was - created). - - *max_size*, an ``int``, the maximum size of the wire format - output; default is 0, which means "the message's request - payload, if nonzero, or 65535". - - Returns a ``binary``. - """ - - if origin is None: - origin = self.origin - return super(Update, self).to_wire(origin, max_size) diff --git a/venv/lib/python3.6/site-packages/dns/version.py b/venv/lib/python3.6/site-packages/dns/version.py deleted file mode 100644 index f116904..0000000 --- a/venv/lib/python3.6/site-packages/dns/version.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython release version information.""" - -#: MAJOR -MAJOR = 1 -#: MINOR -MINOR = 16 -#: MICRO -MICRO = 0 -#: RELEASELEVEL -RELEASELEVEL = 0x0f -#: SERIAL -SERIAL = 0 - -if RELEASELEVEL == 0x0f: - #: version - version = '%d.%d.%d' % (MAJOR, MINOR, MICRO) -elif RELEASELEVEL == 0x00: - version = '%d.%d.%dx%d' % \ - (MAJOR, MINOR, MICRO, SERIAL) -else: - version = '%d.%d.%d%x%d' % \ - (MAJOR, MINOR, MICRO, RELEASELEVEL, SERIAL) - -#: hexversion -hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | \ - SERIAL diff --git a/venv/lib/python3.6/site-packages/dns/wiredata.py b/venv/lib/python3.6/site-packages/dns/wiredata.py deleted file mode 100644 index ea3c1e6..0000000 --- a/venv/lib/python3.6/site-packages/dns/wiredata.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2011,2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Wire Data Helper""" - -import dns.exception -from ._compat import binary_type, string_types, PY2 - -# Figure out what constant python passes for an unspecified slice bound. -# It's supposed to be sys.maxint, yet on 64-bit windows sys.maxint is 2^31 - 1 -# but Python uses 2^63 - 1 as the constant. Rather than making pointless -# extra comparisons, duplicating code, or weakening WireData, we just figure -# out what constant Python will use. - - -class _SliceUnspecifiedBound(binary_type): - - def __getitem__(self, key): - return key.stop - - if PY2: - def __getslice__(self, i, j): # pylint: disable=getslice-method - return self.__getitem__(slice(i, j)) - -_unspecified_bound = _SliceUnspecifiedBound()[1:] - - -class WireData(binary_type): - # WireData is a binary type with stricter slicing - - def __getitem__(self, key): - try: - if isinstance(key, slice): - # make sure we are not going outside of valid ranges, - # do stricter control of boundaries than python does - # by default - start = key.start - stop = key.stop - - if PY2: - if stop == _unspecified_bound: - # handle the case where the right bound is unspecified - stop = len(self) - - if start < 0 or stop < 0: - raise dns.exception.FormError - # If it's not an empty slice, access left and right bounds - # to make sure they're valid - if start != stop: - super(WireData, self).__getitem__(start) - super(WireData, self).__getitem__(stop - 1) - else: - for index in (start, stop): - if index is None: - continue - elif abs(index) > len(self): - raise dns.exception.FormError - - return WireData(super(WireData, self).__getitem__( - slice(start, stop))) - return bytearray(self.unwrap())[key] - except IndexError: - raise dns.exception.FormError - - if PY2: - def __getslice__(self, i, j): # pylint: disable=getslice-method - return self.__getitem__(slice(i, j)) - - def __iter__(self): - i = 0 - while 1: - try: - yield self[i] - i += 1 - except dns.exception.FormError: - raise StopIteration - - def unwrap(self): - return binary_type(self) - - -def maybe_wrap(wire): - if isinstance(wire, WireData): - return wire - elif isinstance(wire, binary_type): - return WireData(wire) - elif isinstance(wire, string_types): - return WireData(wire.encode()) - raise ValueError("unhandled type %s" % type(wire)) diff --git a/venv/lib/python3.6/site-packages/dns/zone.py b/venv/lib/python3.6/site-packages/dns/zone.py deleted file mode 100644 index 1e2fe78..0000000 --- a/venv/lib/python3.6/site-packages/dns/zone.py +++ /dev/null @@ -1,1127 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Zones.""" - -from __future__ import generators - -import sys -import re -import os -from io import BytesIO - -import dns.exception -import dns.name -import dns.node -import dns.rdataclass -import dns.rdatatype -import dns.rdata -import dns.rdtypes.ANY.SOA -import dns.rrset -import dns.tokenizer -import dns.ttl -import dns.grange -from ._compat import string_types, text_type, PY3 - - -class BadZone(dns.exception.DNSException): - - """The DNS zone is malformed.""" - - -class NoSOA(BadZone): - - """The DNS zone has no SOA RR at its origin.""" - - -class NoNS(BadZone): - - """The DNS zone has no NS RRset at its origin.""" - - -class UnknownOrigin(BadZone): - - """The DNS zone's origin is unknown.""" - - -class Zone(object): - - """A DNS zone. - - A Zone is a mapping from names to nodes. The zone object may be - treated like a Python dictionary, e.g. zone[name] will retrieve - the node associated with that name. The I{name} may be a - dns.name.Name object, or it may be a string. In the either case, - if the name is relative it is treated as relative to the origin of - the zone. - - @ivar rdclass: The zone's rdata class; the default is class IN. - @type rdclass: int - @ivar origin: The origin of the zone. - @type origin: dns.name.Name object - @ivar nodes: A dictionary mapping the names of nodes in the zone to the - nodes themselves. - @type nodes: dict - @ivar relativize: should names in the zone be relativized? - @type relativize: bool - @cvar node_factory: the factory used to create a new node - @type node_factory: class or callable - """ - - node_factory = dns.node.Node - - __slots__ = ['rdclass', 'origin', 'nodes', 'relativize'] - - def __init__(self, origin, rdclass=dns.rdataclass.IN, relativize=True): - """Initialize a zone object. - - @param origin: The origin of the zone. - @type origin: dns.name.Name object - @param rdclass: The zone's rdata class; the default is class IN. - @type rdclass: int""" - - if origin is not None: - if isinstance(origin, string_types): - origin = dns.name.from_text(origin) - elif not isinstance(origin, dns.name.Name): - raise ValueError("origin parameter must be convertible to a " - "DNS name") - if not origin.is_absolute(): - raise ValueError("origin parameter must be an absolute name") - self.origin = origin - self.rdclass = rdclass - self.nodes = {} - self.relativize = relativize - - def __eq__(self, other): - """Two zones are equal if they have the same origin, class, and - nodes. - @rtype: bool - """ - - if not isinstance(other, Zone): - return False - if self.rdclass != other.rdclass or \ - self.origin != other.origin or \ - self.nodes != other.nodes: - return False - return True - - def __ne__(self, other): - """Are two zones not equal? - @rtype: bool - """ - - return not self.__eq__(other) - - def _validate_name(self, name): - if isinstance(name, string_types): - name = dns.name.from_text(name, None) - elif not isinstance(name, dns.name.Name): - raise KeyError("name parameter must be convertible to a DNS name") - if name.is_absolute(): - if not name.is_subdomain(self.origin): - raise KeyError( - "name parameter must be a subdomain of the zone origin") - if self.relativize: - name = name.relativize(self.origin) - return name - - def __getitem__(self, key): - key = self._validate_name(key) - return self.nodes[key] - - def __setitem__(self, key, value): - key = self._validate_name(key) - self.nodes[key] = value - - def __delitem__(self, key): - key = self._validate_name(key) - del self.nodes[key] - - def __iter__(self): - return self.nodes.__iter__() - - def iterkeys(self): - if PY3: - return self.nodes.keys() # pylint: disable=dict-keys-not-iterating - else: - return self.nodes.iterkeys() # pylint: disable=dict-iter-method - - def keys(self): - return self.nodes.keys() # pylint: disable=dict-keys-not-iterating - - def itervalues(self): - if PY3: - return self.nodes.values() # pylint: disable=dict-values-not-iterating - else: - return self.nodes.itervalues() # pylint: disable=dict-iter-method - - def values(self): - return self.nodes.values() # pylint: disable=dict-values-not-iterating - - def items(self): - return self.nodes.items() # pylint: disable=dict-items-not-iterating - - iteritems = items - - def get(self, key): - key = self._validate_name(key) - return self.nodes.get(key) - - def __contains__(self, other): - return other in self.nodes - - def find_node(self, name, create=False): - """Find a node in the zone, possibly creating it. - - @param name: the name of the node to find - @type name: dns.name.Name object or string - @param create: should the node be created if it doesn't exist? - @type create: bool - @raises KeyError: the name is not known and create was not specified. - @rtype: dns.node.Node object - """ - - name = self._validate_name(name) - node = self.nodes.get(name) - if node is None: - if not create: - raise KeyError - node = self.node_factory() - self.nodes[name] = node - return node - - def get_node(self, name, create=False): - """Get a node in the zone, possibly creating it. - - This method is like L{find_node}, except it returns None instead - of raising an exception if the node does not exist and creation - has not been requested. - - @param name: the name of the node to find - @type name: dns.name.Name object or string - @param create: should the node be created if it doesn't exist? - @type create: bool - @rtype: dns.node.Node object or None - """ - - try: - node = self.find_node(name, create) - except KeyError: - node = None - return node - - def delete_node(self, name): - """Delete the specified node if it exists. - - It is not an error if the node does not exist. - """ - - name = self._validate_name(name) - if name in self.nodes: - del self.nodes[name] - - def find_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, - create=False): - """Look for rdata with the specified name and type in the zone, - and return an rdataset encapsulating it. - - The I{name}, I{rdtype}, and I{covers} parameters may be - strings, in which case they will be converted to their proper - type. - - The rdataset returned is not a copy; changes to it will change - the zone. - - KeyError is raised if the name or type are not found. - Use L{get_rdataset} if you want to have None returned instead. - - @param name: the owner name to look for - @type name: DNS.name.Name object or string - @param rdtype: the rdata type desired - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - @param create: should the node and rdataset be created if they do not - exist? - @type create: bool - @raises KeyError: the node or rdata could not be found - @rtype: dns.rdataset.Rdataset object - """ - - name = self._validate_name(name) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(covers, string_types): - covers = dns.rdatatype.from_text(covers) - node = self.find_node(name, create) - return node.find_rdataset(self.rdclass, rdtype, covers, create) - - def get_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, - create=False): - """Look for rdata with the specified name and type in the zone, - and return an rdataset encapsulating it. - - The I{name}, I{rdtype}, and I{covers} parameters may be - strings, in which case they will be converted to their proper - type. - - The rdataset returned is not a copy; changes to it will change - the zone. - - None is returned if the name or type are not found. - Use L{find_rdataset} if you want to have KeyError raised instead. - - @param name: the owner name to look for - @type name: DNS.name.Name object or string - @param rdtype: the rdata type desired - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - @param create: should the node and rdataset be created if they do not - exist? - @type create: bool - @rtype: dns.rdataset.Rdataset object or None - """ - - try: - rdataset = self.find_rdataset(name, rdtype, covers, create) - except KeyError: - rdataset = None - return rdataset - - def delete_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE): - """Delete the rdataset matching I{rdtype} and I{covers}, if it - exists at the node specified by I{name}. - - The I{name}, I{rdtype}, and I{covers} parameters may be - strings, in which case they will be converted to their proper - type. - - It is not an error if the node does not exist, or if there is no - matching rdataset at the node. - - If the node has no rdatasets after the deletion, it will itself - be deleted. - - @param name: the owner name to look for - @type name: DNS.name.Name object or string - @param rdtype: the rdata type desired - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - """ - - name = self._validate_name(name) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(covers, string_types): - covers = dns.rdatatype.from_text(covers) - node = self.get_node(name) - if node is not None: - node.delete_rdataset(self.rdclass, rdtype, covers) - if len(node) == 0: - self.delete_node(name) - - def replace_rdataset(self, name, replacement): - """Replace an rdataset at name. - - It is not an error if there is no rdataset matching I{replacement}. - - Ownership of the I{replacement} object is transferred to the zone; - in other words, this method does not store a copy of I{replacement} - at the node, it stores I{replacement} itself. - - If the I{name} node does not exist, it is created. - - @param name: the owner name - @type name: DNS.name.Name object or string - @param replacement: the replacement rdataset - @type replacement: dns.rdataset.Rdataset - """ - - if replacement.rdclass != self.rdclass: - raise ValueError('replacement.rdclass != zone.rdclass') - node = self.find_node(name, True) - node.replace_rdataset(replacement) - - def find_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): - """Look for rdata with the specified name and type in the zone, - and return an RRset encapsulating it. - - The I{name}, I{rdtype}, and I{covers} parameters may be - strings, in which case they will be converted to their proper - type. - - This method is less efficient than the similar - L{find_rdataset} because it creates an RRset instead of - returning the matching rdataset. It may be more convenient - for some uses since it returns an object which binds the owner - name to the rdata. - - This method may not be used to create new nodes or rdatasets; - use L{find_rdataset} instead. - - KeyError is raised if the name or type are not found. - Use L{get_rrset} if you want to have None returned instead. - - @param name: the owner name to look for - @type name: DNS.name.Name object or string - @param rdtype: the rdata type desired - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - @raises KeyError: the node or rdata could not be found - @rtype: dns.rrset.RRset object - """ - - name = self._validate_name(name) - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(covers, string_types): - covers = dns.rdatatype.from_text(covers) - rdataset = self.nodes[name].find_rdataset(self.rdclass, rdtype, covers) - rrset = dns.rrset.RRset(name, self.rdclass, rdtype, covers) - rrset.update(rdataset) - return rrset - - def get_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): - """Look for rdata with the specified name and type in the zone, - and return an RRset encapsulating it. - - The I{name}, I{rdtype}, and I{covers} parameters may be - strings, in which case they will be converted to their proper - type. - - This method is less efficient than the similar L{get_rdataset} - because it creates an RRset instead of returning the matching - rdataset. It may be more convenient for some uses since it - returns an object which binds the owner name to the rdata. - - This method may not be used to create new nodes or rdatasets; - use L{find_rdataset} instead. - - None is returned if the name or type are not found. - Use L{find_rrset} if you want to have KeyError raised instead. - - @param name: the owner name to look for - @type name: DNS.name.Name object or string - @param rdtype: the rdata type desired - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - @rtype: dns.rrset.RRset object - """ - - try: - rrset = self.find_rrset(name, rdtype, covers) - except KeyError: - rrset = None - return rrset - - def iterate_rdatasets(self, rdtype=dns.rdatatype.ANY, - covers=dns.rdatatype.NONE): - """Return a generator which yields (name, rdataset) tuples for - all rdatasets in the zone which have the specified I{rdtype} - and I{covers}. If I{rdtype} is dns.rdatatype.ANY, the default, - then all rdatasets will be matched. - - @param rdtype: int or string - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - """ - - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(covers, string_types): - covers = dns.rdatatype.from_text(covers) - for (name, node) in self.iteritems(): # pylint: disable=dict-iter-method - for rds in node: - if rdtype == dns.rdatatype.ANY or \ - (rds.rdtype == rdtype and rds.covers == covers): - yield (name, rds) - - def iterate_rdatas(self, rdtype=dns.rdatatype.ANY, - covers=dns.rdatatype.NONE): - """Return a generator which yields (name, ttl, rdata) tuples for - all rdatas in the zone which have the specified I{rdtype} - and I{covers}. If I{rdtype} is dns.rdatatype.ANY, the default, - then all rdatas will be matched. - - @param rdtype: int or string - @type rdtype: int or string - @param covers: the covered type (defaults to None) - @type covers: int or string - """ - - if isinstance(rdtype, string_types): - rdtype = dns.rdatatype.from_text(rdtype) - if isinstance(covers, string_types): - covers = dns.rdatatype.from_text(covers) - for (name, node) in self.iteritems(): # pylint: disable=dict-iter-method - for rds in node: - if rdtype == dns.rdatatype.ANY or \ - (rds.rdtype == rdtype and rds.covers == covers): - for rdata in rds: - yield (name, rds.ttl, rdata) - - def to_file(self, f, sorted=True, relativize=True, nl=None): - """Write a zone to a file. - - @param f: file or string. If I{f} is a string, it is treated - as the name of a file to open. - @param sorted: if True, the file will be written with the - names sorted in DNSSEC order from least to greatest. Otherwise - the names will be written in whatever order they happen to have - in the zone's dictionary. - @param relativize: if True, domain names in the output will be - relativized to the zone's origin (if possible). - @type relativize: bool - @param nl: The end of line string. If not specified, the - output will use the platform's native end-of-line marker (i.e. - LF on POSIX, CRLF on Windows, CR on Macintosh). - @type nl: string or None - """ - - if isinstance(f, string_types): - f = open(f, 'wb') - want_close = True - else: - want_close = False - - # must be in this way, f.encoding may contain None, or even attribute - # may not be there - file_enc = getattr(f, 'encoding', None) - if file_enc is None: - file_enc = 'utf-8' - - if nl is None: - nl_b = os.linesep.encode(file_enc) # binary mode, '\n' is not enough - nl = u'\n' - elif isinstance(nl, string_types): - nl_b = nl.encode(file_enc) - else: - nl_b = nl - nl = nl.decode() - - try: - if sorted: - names = list(self.keys()) - names.sort() - else: - names = self.iterkeys() # pylint: disable=dict-iter-method - for n in names: - l = self[n].to_text(n, origin=self.origin, - relativize=relativize) - if isinstance(l, text_type): - l_b = l.encode(file_enc) - else: - l_b = l - l = l.decode() - - try: - f.write(l_b) - f.write(nl_b) - except TypeError: # textual mode - f.write(l) - f.write(nl) - finally: - if want_close: - f.close() - - def to_text(self, sorted=True, relativize=True, nl=None): - """Return a zone's text as though it were written to a file. - - @param sorted: if True, the file will be written with the - names sorted in DNSSEC order from least to greatest. Otherwise - the names will be written in whatever order they happen to have - in the zone's dictionary. - @param relativize: if True, domain names in the output will be - relativized to the zone's origin (if possible). - @type relativize: bool - @param nl: The end of line string. If not specified, the - output will use the platform's native end-of-line marker (i.e. - LF on POSIX, CRLF on Windows, CR on Macintosh). - @type nl: string or None - """ - temp_buffer = BytesIO() - self.to_file(temp_buffer, sorted, relativize, nl) - return_value = temp_buffer.getvalue() - temp_buffer.close() - return return_value - - def check_origin(self): - """Do some simple checking of the zone's origin. - - @raises dns.zone.NoSOA: there is no SOA RR - @raises dns.zone.NoNS: there is no NS RRset - @raises KeyError: there is no origin node - """ - if self.relativize: - name = dns.name.empty - else: - name = self.origin - if self.get_rdataset(name, dns.rdatatype.SOA) is None: - raise NoSOA - if self.get_rdataset(name, dns.rdatatype.NS) is None: - raise NoNS - - -class _MasterReader(object): - - """Read a DNS master file - - @ivar tok: The tokenizer - @type tok: dns.tokenizer.Tokenizer object - @ivar last_ttl: The last seen explicit TTL for an RR - @type last_ttl: int - @ivar last_ttl_known: Has last TTL been detected - @type last_ttl_known: bool - @ivar default_ttl: The default TTL from a $TTL directive or SOA RR - @type default_ttl: int - @ivar default_ttl_known: Has default TTL been detected - @type default_ttl_known: bool - @ivar last_name: The last name read - @type last_name: dns.name.Name object - @ivar current_origin: The current origin - @type current_origin: dns.name.Name object - @ivar relativize: should names in the zone be relativized? - @type relativize: bool - @ivar zone: the zone - @type zone: dns.zone.Zone object - @ivar saved_state: saved reader state (used when processing $INCLUDE) - @type saved_state: list of (tokenizer, current_origin, last_name, file, - last_ttl, last_ttl_known, default_ttl, default_ttl_known) tuples. - @ivar current_file: the file object of the $INCLUDed file being parsed - (None if no $INCLUDE is active). - @ivar allow_include: is $INCLUDE allowed? - @type allow_include: bool - @ivar check_origin: should sanity checks of the origin node be done? - The default is True. - @type check_origin: bool - """ - - def __init__(self, tok, origin, rdclass, relativize, zone_factory=Zone, - allow_include=False, check_origin=True): - if isinstance(origin, string_types): - origin = dns.name.from_text(origin) - self.tok = tok - self.current_origin = origin - self.relativize = relativize - self.last_ttl = 0 - self.last_ttl_known = False - self.default_ttl = 0 - self.default_ttl_known = False - self.last_name = self.current_origin - self.zone = zone_factory(origin, rdclass, relativize=relativize) - self.saved_state = [] - self.current_file = None - self.allow_include = allow_include - self.check_origin = check_origin - - def _eat_line(self): - while 1: - token = self.tok.get() - if token.is_eol_or_eof(): - break - - def _rr_line(self): - """Process one line from a DNS master file.""" - # Name - if self.current_origin is None: - raise UnknownOrigin - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = dns.name.from_text( - token.value, self.current_origin) - else: - token = self.tok.get() - if token.is_eol_or_eof(): - # treat leading WS followed by EOL/EOF as if they were EOL/EOF. - return - self.tok.unget(token) - name = self.last_name - if not name.is_subdomain(self.zone.origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone.origin) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # TTL - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.ttl.BadTTL: - if not (self.last_ttl_known or self.default_ttl_known): - raise dns.exception.SyntaxError("Missing default TTL value") - if self.default_ttl_known: - ttl = self.default_ttl - else: - ttl = self.last_ttl - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = self.zone.rdclass - if rdclass != self.zone.rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - # Type - try: - rdtype = dns.rdatatype.from_text(token.value) - except: - raise dns.exception.SyntaxError( - "unknown rdatatype '%s'" % token.value) - n = self.zone.nodes.get(name) - if n is None: - n = self.zone.node_factory() - self.zone.nodes[name] = n - try: - rd = dns.rdata.from_text(rdclass, rdtype, self.tok, - self.current_origin, False) - except dns.exception.SyntaxError: - # Catch and reraise. - (ty, va) = sys.exc_info()[:2] - raise va - except: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError( - "caught exception {}: {}".format(str(ty), str(va))) - - if not self.default_ttl_known and isinstance(rd, dns.rdtypes.ANY.SOA.SOA): - # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default - # TTL from the SOA minttl if no $TTL statement is present before the - # SOA is parsed. - self.default_ttl = rd.minimum - self.default_ttl_known = True - - rd.choose_relativity(self.zone.origin, self.relativize) - covers = rd.covers() - rds = n.find_rdataset(rdclass, rdtype, covers, True) - rds.add(rd, ttl) - - def _parse_modify(self, side): - # Here we catch everything in '{' '}' in a group so we can replace it - # with ''. - is_generate1 = re.compile("^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") - is_generate2 = re.compile("^.*\$({(\+|-?)(\d+)}).*$") - is_generate3 = re.compile("^.*\$({(\+|-?)(\d+),(\d+)}).*$") - # Sometimes there are modifiers in the hostname. These come after - # the dollar sign. They are in the form: ${offset[,width[,base]]}. - # Make names - g1 = is_generate1.match(side) - if g1: - mod, sign, offset, width, base = g1.groups() - if sign == '': - sign = '+' - g2 = is_generate2.match(side) - if g2: - mod, sign, offset = g2.groups() - if sign == '': - sign = '+' - width = 0 - base = 'd' - g3 = is_generate3.match(side) - if g3: - mod, sign, offset, width = g1.groups() - if sign == '': - sign = '+' - width = g1.groups()[2] - base = 'd' - - if not (g1 or g2 or g3): - mod = '' - sign = '+' - offset = 0 - width = 0 - base = 'd' - - if base != 'd': - raise NotImplementedError() - - return mod, sign, offset, width, base - - def _generate_line(self): - # range lhs [ttl] [class] type rhs [ comment ] - """Process one line containing the GENERATE statement from a DNS - master file.""" - if self.current_origin is None: - raise UnknownOrigin - - token = self.tok.get() - # Range (required) - try: - start, stop, step = dns.grange.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except: - raise dns.exception.SyntaxError - - # lhs (required) - try: - lhs = token.value - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except: - raise dns.exception.SyntaxError - - # TTL - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.ttl.BadTTL: - if not (self.last_ttl_known or self.default_ttl_known): - raise dns.exception.SyntaxError("Missing default TTL value") - if self.default_ttl_known: - ttl = self.default_ttl - else: - ttl = self.last_ttl - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = self.zone.rdclass - if rdclass != self.zone.rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - # Type - try: - rdtype = dns.rdatatype.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError("unknown rdatatype '%s'" % - token.value) - - # lhs (required) - try: - rhs = token.value - except: - raise dns.exception.SyntaxError - - lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) - rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) - for i in range(start, stop + 1, step): - # +1 because bind is inclusive and python is exclusive - - if lsign == u'+': - lindex = i + int(loffset) - elif lsign == u'-': - lindex = i - int(loffset) - - if rsign == u'-': - rindex = i - int(roffset) - elif rsign == u'+': - rindex = i + int(roffset) - - lzfindex = str(lindex).zfill(int(lwidth)) - rzfindex = str(rindex).zfill(int(rwidth)) - - name = lhs.replace(u'$%s' % (lmod), lzfindex) - rdata = rhs.replace(u'$%s' % (rmod), rzfindex) - - self.last_name = dns.name.from_text(name, self.current_origin) - name = self.last_name - if not name.is_subdomain(self.zone.origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone.origin) - - n = self.zone.nodes.get(name) - if n is None: - n = self.zone.node_factory() - self.zone.nodes[name] = n - try: - rd = dns.rdata.from_text(rdclass, rdtype, rdata, - self.current_origin, False) - except dns.exception.SyntaxError: - # Catch and reraise. - (ty, va) = sys.exc_info()[:2] - raise va - except: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError("caught exception %s: %s" % - (str(ty), str(va))) - - rd.choose_relativity(self.zone.origin, self.relativize) - covers = rd.covers() - rds = n.find_rdataset(rdclass, rdtype, covers, True) - rds.add(rd, ttl) - - def read(self): - """Read a DNS master file and build a zone object. - - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - """ - - try: - while 1: - token = self.tok.get(True, True) - if token.is_eof(): - if self.current_file is not None: - self.current_file.close() - if len(self.saved_state) > 0: - (self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known) = self.saved_state.pop(-1) - continue - break - elif token.is_eol(): - continue - elif token.is_comment(): - self.tok.get_eol() - continue - elif token.value[0] == u'$': - c = token.value.upper() - if c == u'$TTL': - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError("bad $TTL") - self.default_ttl = dns.ttl.from_text(token.value) - self.default_ttl_known = True - self.tok.get_eol() - elif c == u'$ORIGIN': - self.current_origin = self.tok.get_name() - self.tok.get_eol() - if self.zone.origin is None: - self.zone.origin = self.current_origin - elif c == u'$INCLUDE' and self.allow_include: - token = self.tok.get() - filename = token.value - token = self.tok.get() - if token.is_identifier(): - new_origin =\ - dns.name.from_text(token.value, - self.current_origin) - self.tok.get_eol() - elif not token.is_eol_or_eof(): - raise dns.exception.SyntaxError( - "bad origin in $INCLUDE") - else: - new_origin = self.current_origin - self.saved_state.append((self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known)) - self.current_file = open(filename, 'r') - self.tok = dns.tokenizer.Tokenizer(self.current_file, - filename) - self.current_origin = new_origin - elif c == u'$GENERATE': - self._generate_line() - else: - raise dns.exception.SyntaxError( - "Unknown master file directive '" + c + "'") - continue - self.tok.unget(token) - self._rr_line() - except dns.exception.SyntaxError as detail: - (filename, line_number) = self.tok.where() - if detail is None: - detail = "syntax error" - raise dns.exception.SyntaxError( - "%s:%d: %s" % (filename, line_number, detail)) - - # Now that we're done reading, do some basic checking of the zone. - if self.check_origin: - self.zone.check_origin() - - -def from_text(text, origin=None, rdclass=dns.rdataclass.IN, - relativize=True, zone_factory=Zone, filename=None, - allow_include=False, check_origin=True): - """Build a zone object from a master file format string. - - @param text: the master file format input - @type text: string. - @param origin: The origin of the zone; if not specified, the first - $ORIGIN statement in the master file will determine the origin of the - zone. - @type origin: dns.name.Name object or string - @param rdclass: The zone's rdata class; the default is class IN. - @type rdclass: int - @param relativize: should names be relativized? The default is True - @type relativize: bool - @param zone_factory: The zone factory to use - @type zone_factory: function returning a Zone - @param filename: The filename to emit when describing where an error - occurred; the default is ''. - @type filename: string - @param allow_include: is $INCLUDE allowed? - @type allow_include: bool - @param check_origin: should sanity checks of the origin node be done? - The default is True. - @type check_origin: bool - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - @rtype: dns.zone.Zone object - """ - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - if filename is None: - filename = '' - tok = dns.tokenizer.Tokenizer(text, filename) - reader = _MasterReader(tok, origin, rdclass, relativize, zone_factory, - allow_include=allow_include, - check_origin=check_origin) - reader.read() - return reader.zone - - -def from_file(f, origin=None, rdclass=dns.rdataclass.IN, - relativize=True, zone_factory=Zone, filename=None, - allow_include=True, check_origin=True): - """Read a master file and build a zone object. - - @param f: file or string. If I{f} is a string, it is treated - as the name of a file to open. - @param origin: The origin of the zone; if not specified, the first - $ORIGIN statement in the master file will determine the origin of the - zone. - @type origin: dns.name.Name object or string - @param rdclass: The zone's rdata class; the default is class IN. - @type rdclass: int - @param relativize: should names be relativized? The default is True - @type relativize: bool - @param zone_factory: The zone factory to use - @type zone_factory: function returning a Zone - @param filename: The filename to emit when describing where an error - occurred; the default is '', or the value of I{f} if I{f} is a - string. - @type filename: string - @param allow_include: is $INCLUDE allowed? - @type allow_include: bool - @param check_origin: should sanity checks of the origin node be done? - The default is True. - @type check_origin: bool - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - @rtype: dns.zone.Zone object - """ - - str_type = string_types - if PY3: - opts = 'r' - else: - opts = 'rU' - - if isinstance(f, str_type): - if filename is None: - filename = f - f = open(f, opts) - want_close = True - else: - if filename is None: - filename = '' - want_close = False - - try: - z = from_text(f, origin, rdclass, relativize, zone_factory, - filename, allow_include, check_origin) - finally: - if want_close: - f.close() - return z - - -def from_xfr(xfr, zone_factory=Zone, relativize=True, check_origin=True): - """Convert the output of a zone transfer generator into a zone object. - - @param xfr: The xfr generator - @type xfr: generator of dns.message.Message objects - @param relativize: should names be relativized? The default is True. - It is essential that the relativize setting matches the one specified - to dns.query.xfr(). - @type relativize: bool - @param check_origin: should sanity checks of the origin node be done? - The default is True. - @type check_origin: bool - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - @rtype: dns.zone.Zone object - """ - - z = None - for r in xfr: - if z is None: - if relativize: - origin = r.origin - else: - origin = r.answer[0].name - rdclass = r.answer[0].rdclass - z = zone_factory(origin, rdclass, relativize=relativize) - for rrset in r.answer: - znode = z.nodes.get(rrset.name) - if not znode: - znode = z.node_factory() - z.nodes[rrset.name] = znode - zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, - rrset.covers, True) - zrds.update_ttl(rrset.ttl) - for rd in rrset: - rd.choose_relativity(z.origin, relativize) - zrds.add(rd) - if check_origin: - z.check_origin() - return z diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/DESCRIPTION.rst b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/DESCRIPTION.rst deleted file mode 100644 index 615ae68..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,9 +0,0 @@ -dnspython is a DNS toolkit for Python. It supports almost all -record types. It can be used for queries, zone transfers, and dynamic -updates. It supports TSIG authenticated messages and EDNS0. - -dnspython provides both high and low level access to DNS. The high -level classes perform queries for data of a given name, type, and -class, and return an answer set. The low level classes allow -direct manipulation of DNS zones, messages, names, and records. - diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/LICENSE.txt b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/LICENSE.txt deleted file mode 100644 index 390a726..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,35 +0,0 @@ -ISC License - -Copyright (C) Dnspython Contributors - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all -copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR -PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - - - -Copyright (C) 2001-2017 Nominum, Inc. -Copyright (C) Google Inc. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose with or without fee is hereby granted, -provided that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/METADATA b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/METADATA deleted file mode 100644 index 0dc8f67..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/METADATA +++ /dev/null @@ -1,44 +0,0 @@ -Metadata-Version: 2.0 -Name: dnspython -Version: 1.16.0 -Summary: DNS toolkit -Home-page: http://www.dnspython.org -Author: Bob Halley -Author-email: halley@dnspython.org -License: BSD-like -Download-URL: http://www.dnspython.org/kits/1.16.0/dnspython-1.16.0.tar.gz -Description-Content-Type: UNKNOWN -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: Freeware -Classifier: Operating System :: Microsoft :: Windows :: Windows 95/98/2000 -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Provides: dns -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Provides-Extra: DNSSEC -Requires-Dist: pycryptodome; extra == 'DNSSEC' -Requires-Dist: ecdsa (>=0.13); extra == 'DNSSEC' -Provides-Extra: IDNA -Requires-Dist: idna (>=2.1); extra == 'IDNA' - -dnspython is a DNS toolkit for Python. It supports almost all -record types. It can be used for queries, zone transfers, and dynamic -updates. It supports TSIG authenticated messages and EDNS0. - -dnspython provides both high and low level access to DNS. The high -level classes perform queries for data of a given name, type, and -class, and return an answer set. The low level classes allow -direct manipulation of DNS zones, messages, names, and records. - diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/RECORD b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/RECORD deleted file mode 100644 index db52be6..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/RECORD +++ /dev/null @@ -1,201 +0,0 @@ -dns/__init__.py,sha256=YT8dlV94eA9ycHtSAbLNjvBHJSYl3pM9nPkkR5d7HCw,1404 -dns/_compat.py,sha256=_z_CnqBp0U_8VXGaAEgM6YP0xWsGWC7Xibm3uBlgJpg,1503 -dns/dnssec.py,sha256=MjoQ6ldvWSW3KHBSIO0bxCmsLpLapeYW2T8Rzy2DbI0,16504 -dns/e164.py,sha256=uEHNlLy8p6j4S2WPQdMU9wAM4b2xbprWz_Hsc2EPwO8,3753 -dns/edns.py,sha256=ITjQuaPXdW7ZHlk7qSyHYsFk3Yk1jqagwaia10aQx7g,7424 -dns/entropy.py,sha256=LZRtKxcTTd7Ng4P5m2rBJGz3r_qq0G-JqbqybrbBx8I,4771 -dns/exception.py,sha256=_SYm0BLa0MrfpU2B0jMnRDPdKCVs6jmx8_ZSAcwzAJY,4876 -dns/flags.py,sha256=5nsxoZCLqeNmxsLGIKpJBVk8_hhhgpTleEc-421kh-8,2933 -dns/grange.py,sha256=sfbj8pZPMSIdYVORMrBXPcSXusjwRipOxRgMEoywqTk,2055 -dns/hash.py,sha256=2UG0QEO_5D8NNEvr1YdDXcRQcxmQdzgbNCaXgtKYlY0,1319 -dns/inet.py,sha256=C78xVq0uxIh0PYLH7S48DPVaUeJ1GfF9re1w7BOIu70,3377 -dns/ipv4.py,sha256=fkXKFagx7pLwccrtXA4AF7iQCj_6lSkFYRSTbyC7DNk,2096 -dns/ipv6.py,sha256=Z3O21oNPpah3vRG0l4qQ9fnTrG0dy5DvfPU-GVBb20Q,5527 -dns/message.py,sha256=GRq9c8TsZTOd6Pin2v4wnAFT5N0OZ_daAP_Epx_fvrc,41458 -dns/name.py,sha256=TaQOg32XZ2rJKMpQdt-MZk_Kf5QDwDsv87OIPjIuBrY,31259 -dns/namedict.py,sha256=9_rRigVZkJGyfxEepLTjX-aaNiyP_ZgmW_tlEP4mR-U,3958 -dns/node.py,sha256=lOuOEIBvo8t4XumjYc0DxUPpmjXntbYI9qP4kMuqrCw,6352 -dns/opcode.py,sha256=fV5_ysudkxiUsXRHJPKOnzopShXTzLSNLtb5CauRosA,2832 -dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -dns/query.py,sha256=wXvMrhGRmGejSa2OOBZBOpl83WeD3-5D7u_isQCvtaY,23691 -dns/rcode.py,sha256=5t18m2ZaDgrNQsyguKjzFWAOR0oBaHXjujjZUxs1uJ4,3613 -dns/rdata.py,sha256=zcn8qsDK3oZlcnfEGcf275XCwJ1Vh1c49yey8TEfF7M,14451 -dns/rdataclass.py,sha256=E1zIUb8f3BotxiKvrcDZGTLt8jaEJ8WThvTU7_hDv2g,3219 -dns/rdataset.py,sha256=4Nrn5tPsb8dpz78UAej98GDiK6uNeHh0Cqss1p7oxPQ,11463 -dns/rdatatype.py,sha256=qTGY1IPbR8cu4oGcGw0e5HPyXWEluBNcrsLL9navOtA,5917 -dns/renderer.py,sha256=TOrnxiYw91UnPUd4tR1C3OE9C5mVBNsuTlOrD1GeSp4,10826 -dns/resolver.py,sha256=yAXPLBxex3uD0780bZZSeVjBaclnG8BhjLpuwvZ-ikI,50210 -dns/reversename.py,sha256=I0rLFjFF0YXo1S6oZm2_1V_PVJyI_JfLdwv_YLIKpA4,3291 -dns/rrset.py,sha256=MaIKqdUfgo6BBRavfXb5w0M68Or9LtpDn5wdmSf12qc,6159 -dns/set.py,sha256=t3LFOnlRzMVY8jjit7kn4v9dDER1HHB8V1uMnHW2J44,7310 -dns/tokenizer.py,sha256=Z7pUpN__yNfWDhl4IBAQ96BaXOi8sCnOXnVRmaxGwT8,18040 -dns/tsig.py,sha256=GPnq7VBLfLjZrD3Cp7uYBeGjHpMHUfqwsQqKHZ0FIno,7813 -dns/tsigkeyring.py,sha256=HuOyV7UAT2TzcX2Xxi1YFGW7tDR_MlfXgsuzWJBT4A8,1814 -dns/ttl.py,sha256=5sukYG5gdkXgtkRjh8cDAJ0ZthSdR42QsPRshDQJJuc,2349 -dns/update.py,sha256=MOqvO2mNn3SOnTo9acDeTGq-YGRHjw60_FIRG309bGI,10343 -dns/version.py,sha256=iXOyp-lf9A1LC-XfdUU1NbkIKtqV4s61KI5XElbtMRs,1406 -dns/wiredata.py,sha256=RtfOFtbkzmxGUnJ9EuK4fdHU8shJLA46hWWH1y2NTtI,3751 -dns/zone.py,sha256=2alLKjAQ3X5VS22rvauolG0J-251kd5sKJSGzjJiu1M,42381 -dns/rdtypes/__init__.py,sha256=7pq8IJL8HfnyKC0beC-jhh7pKQ0Gr7KFtzGAvUndhRQ,982 -dns/rdtypes/dnskeybase.py,sha256=xqmBGcdg0uFl1FVTOBVtL9LJUip5VoeiBzk2o4cZRXs,4458 -dns/rdtypes/dsbase.py,sha256=2aKIy2azXkEDLAsf-GMXlHvpxtQ5a2FQUhbfT6PKDCg,3157 -dns/rdtypes/euibase.py,sha256=UaY1YOQBPZ3KKMEmFku9JL6S1CyQVJVzeFtJzyrwZHU,2777 -dns/rdtypes/mxbase.py,sha256=Hn9RJqnIz9LAt4kizE9jciw5wFazJs-BOA9MbGVE-SE,3737 -dns/rdtypes/nsbase.py,sha256=lYWBj09-aqMHwAL2tJF_jC5-_QcBS_t9v2o1uvGOzYA,2928 -dns/rdtypes/txtbase.py,sha256=uai-BeKwCZZpK4IiftS4IckjYIk9T0zxKI5RcNVmCZM,3328 -dns/rdtypes/ANY/AFSDB.py,sha256=TivfOjrAjOCLrli_3xE33ufzbUGug28ZC2fHo6WOups,1925 -dns/rdtypes/ANY/AVC.py,sha256=eiCzYvivS3ecZjVWIW4KaJ_ki3WuNDXQ_Iv_1bnlrT0,1027 -dns/rdtypes/ANY/CAA.py,sha256=eOR3lhOk0PhVjhSwtup4XbstENKM_kn0XkeLRLF-DBw,2699 -dns/rdtypes/ANY/CDNSKEY.py,sha256=_C5m5PHSr3YWXJm-a50yRf11NQsGATjtQBm_9uGarso,1103 -dns/rdtypes/ANY/CDS.py,sha256=RglDWyG-ZQn8NiVhVeI1QuSj8Do5EREf2aclzE-JwGE,952 -dns/rdtypes/ANY/CERT.py,sha256=UGKOYvAtGmGnEDJFKgo1NOihsIyLJohQw4ExG4RHl_8,4028 -dns/rdtypes/ANY/CNAME.py,sha256=9kuO5FBKKIHaAjcvo02_65Sg6dletz2wAWd5qNGaHDw,1161 -dns/rdtypes/ANY/CSYNC.py,sha256=lNl6vrOgCWdrVx7WF0-l_T5_s7zYWI66_3D9VwCma8Q,4721 -dns/rdtypes/ANY/DLV.py,sha256=wM1BUX0ufYMXMOqNyfCTMVDFy5dBvwpqmrJydwEd4vQ,941 -dns/rdtypes/ANY/DNAME.py,sha256=Wf02V8IB-lEIJT2wVqTtvOP5_OCpHowKcG5wpLs-4Pk,1056 -dns/rdtypes/ANY/DNSKEY.py,sha256=gHEWjBjO0zHpz0yamASTfC3U7sUIom58gBRSOuPnxJ8,1101 -dns/rdtypes/ANY/DS.py,sha256=zE7c-rxxwzqnqBDEDotvCrD45piCgg_kTp7DOEKhpIA,950 -dns/rdtypes/ANY/EUI48.py,sha256=bX_FaWgbh96dRb_CNr9Ltc03rPRtheQp99luJJpP-WE,1124 -dns/rdtypes/ANY/EUI64.py,sha256=9FzHF90pc7Ls0RO1jfPFnezfC_5iDjAdZEdSljRqEt0,1134 -dns/rdtypes/ANY/GPOS.py,sha256=0A3uPPrb4FKuUQsKnPPxXcGkfR-STQi4P7ntlVOQi-k,5450 -dns/rdtypes/ANY/HINFO.py,sha256=OrmLq8oD-JxjlgZI5MJFP6OilW7YLLwKkKrGceuAtYQ,2750 -dns/rdtypes/ANY/HIP.py,sha256=qnjxZtJ27bh31o2RLWDdIm3uDis3oirqf5uwCrmiLvM,4221 -dns/rdtypes/ANY/ISDN.py,sha256=0Ysj_ugB1feFvSyjRh82cwnDlmzIECxk9R5HGrF8OcA,3350 -dns/rdtypes/ANY/LOC.py,sha256=ud8bHZ3dBJ4jtE_r8naH1rfKSI3TQnx6txohJMbDTaU,12351 -dns/rdtypes/ANY/MX.py,sha256=DMw5gGSBQcwokT6ScqaXqOuluXg9IebOg-Nyi__RS14,950 -dns/rdtypes/ANY/NS.py,sha256=OcTL4Cg9snF5tOBFcxm-qeB6aII-oXEzsxqJozNxQl4,950 -dns/rdtypes/ANY/NSEC.py,sha256=oYJZLMmj1isl7TOyo72zgkNLiskL1ud6aa52_qJy4CI,4771 -dns/rdtypes/ANY/NSEC3.py,sha256=vZJCQ7OWSgCsOIbOKLie5Q6a5LlfQuKOUj1mBN4As_M,7211 -dns/rdtypes/ANY/NSEC3PARAM.py,sha256=UcqvoUrFzsuu_6Ba6ufpwnqb-hwgnXi43qt69Pa2qGI,3175 -dns/rdtypes/ANY/OPENPGPKEY.py,sha256=eaOu9n_GSC1GBKpYzBshRnDzeObMoZ0G0csFZPvhZPg,2028 -dns/rdtypes/ANY/PTR.py,sha256=bWEntdSYSZHZl8yVOsiCaU1V_JRBOMYjmeRtzfJPUBU,952 -dns/rdtypes/ANY/RP.py,sha256=4xc4eEM-l6IQmKr0dlpzY3mRkZyAe1fUkeRKxd5JIWI,3151 -dns/rdtypes/ANY/RRSIG.py,sha256=PQp-jckD0s9GyI01vi24gwVv6mzDYtKX5I_X8jCi1l8,5740 -dns/rdtypes/ANY/RT.py,sha256=Pb57qTz4O_ZKzP3IBOeii4IX7gGV_DeUZADxh8UuDA4,968 -dns/rdtypes/ANY/SOA.py,sha256=hhcigkLhIMPlgoD9zBYSduKGsHJM4LO7hVjmnXUsfg8,4597 -dns/rdtypes/ANY/SPF.py,sha256=2shb4N_8gRfIy0Yi6WAHSE4kIau_4ca-3fACfIWxeCg,976 -dns/rdtypes/ANY/SSHFP.py,sha256=3uSooQ_wYkXIqi2jYeRkfPedLFdko90vqNexd9Wkyi8,2905 -dns/rdtypes/ANY/TLSA.py,sha256=7d3r3TqKDV9E_ElrfAv_ZIPi67OcXM2hvXRA3DWtom0,3033 -dns/rdtypes/ANY/TXT.py,sha256=6Xb_tt8MRgMv-G6FMBluoxUhMN1G18zrMGB7WvwZIyE,955 -dns/rdtypes/ANY/URI.py,sha256=-eXBeUQDClnaok4stZCyCg9GbkdZaBBqQ4Z5zrgFqmI,2975 -dns/rdtypes/ANY/X25.py,sha256=6IYIyZ2iVEh0fXnRJHHiO0XU-HwJi-eafbjbyU6CGK0,2196 -dns/rdtypes/ANY/__init__.py,sha256=Sn0vG1e7mSZuSfaRKauVLTLnalp8qLkjVE57bJ2F8ls,1362 -dns/rdtypes/CH/A.py,sha256=m1UXsasKInSoeNNmmEl2Tp7B_2nI90PUVu01HzttguM,2797 -dns/rdtypes/CH/__init__.py,sha256=Ef8Z58kdxbLpTqtgP_u3MfVQoqwb3If2T68YEwIMosw,923 -dns/rdtypes/IN/A.py,sha256=QQgxIU06-fPMyO9g_lcitItWMa9HQMxP4hVK3s-X9BY,1921 -dns/rdtypes/IN/AAAA.py,sha256=GUs2fIXxJmNGaOJPdTkbJPoSd4841RE61wuwrDaB1B4,2015 -dns/rdtypes/IN/APL.py,sha256=O-dQdw6sDOYDkP0k34NjO0jGQ802yWULHYVNf0Q_wUE,5344 -dns/rdtypes/IN/DHCID.py,sha256=4J6H3ByUhNYWDD8HqZ3iL-xc0EiNmnBFzrwtB899PL0,2096 -dns/rdtypes/IN/IPSECKEY.py,sha256=twKeJ3fsWCAvJhwDCuF6TAKLLpQuoq0w6UElX1jk4cs,5758 -dns/rdtypes/IN/KX.py,sha256=vxNd0l6qH5lgxmyLpqB3wuAlBOdoCM1833zvoAgPuv4,958 -dns/rdtypes/IN/NAPTR.py,sha256=KF-NbGiG3156nolAn667sebs7oi8M3B54cNK9qHK1aw,4547 -dns/rdtypes/IN/NSAP.py,sha256=Z0WR3gba4PnOEL7JY-EiLEVOIFG1hUx7PPY446XuwaE,2156 -dns/rdtypes/IN/NSAP_PTR.py,sha256=EH_tKmNfHJqys5qmRv3GwwmRH2ojjGnw1mUgEDXOcaA,970 -dns/rdtypes/IN/PX.py,sha256=ZSsAI9zmQA2Io7Xbp-1cz3YwIIhRjaAsvYhYKH0b8JY,3471 -dns/rdtypes/IN/SRV.py,sha256=1S8PTQWI5WqcLSV4fheW32USi9edkUdkQEamJuF-RTg,3131 -dns/rdtypes/IN/WKS.py,sha256=5yMZhMZ6sxiyzKgeEEL-H-E4kyfnykjjeBCt6SqzEIU,3888 -dns/rdtypes/IN/__init__.py,sha256=mNZ9A4tfstUe-moGqbJUVC0QVih_72I1cyeOe7nyREY,1058 -dnspython-1.16.0.dist-info/DESCRIPTION.rst,sha256=6hmWFB2RbH9czYdOGp9iL1mpM00S6_G0I4kfijc1kwQ,454 -dnspython-1.16.0.dist-info/LICENSE.txt,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 -dnspython-1.16.0.dist-info/METADATA,sha256=MhWnFzd-dWhj5UKVHxoet9SSCcqgH5JQc6crlSbnfBI,1837 -dnspython-1.16.0.dist-info/RECORD,, -dnspython-1.16.0.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 -dnspython-1.16.0.dist-info/metadata.json,sha256=tR-xBI8ymSFJzryXXPK8cQaAEdES4COhW5fSlxPtJHE,1469 -dnspython-1.16.0.dist-info/top_level.txt,sha256=pNgzNfVoNIdUseB5acMPCTmUyu6m6aH47olUCAzzWTo,4 -dnspython-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -dns/rdtypes/IN/__pycache__/NSAP.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/SRV.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/APL.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/AAAA.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/DHCID.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/KX.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/NAPTR.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/WKS.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/PX.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/A.cpython-36.pyc,, -dns/rdtypes/IN/__pycache__/__init__.cpython-36.pyc,, -dns/rdtypes/CH/__pycache__/A.cpython-36.pyc,, -dns/rdtypes/CH/__pycache__/__init__.cpython-36.pyc,, -dns/rdtypes/__pycache__/euibase.cpython-36.pyc,, -dns/rdtypes/__pycache__/nsbase.cpython-36.pyc,, -dns/rdtypes/__pycache__/mxbase.cpython-36.pyc,, -dns/rdtypes/__pycache__/dsbase.cpython-36.pyc,, -dns/rdtypes/__pycache__/txtbase.cpython-36.pyc,, -dns/rdtypes/__pycache__/__init__.cpython-36.pyc,, -dns/rdtypes/__pycache__/dnskeybase.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/ISDN.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/RP.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/NS.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/URI.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/EUI64.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/GPOS.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/RRSIG.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CNAME.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/HIP.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/RT.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/TXT.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/X25.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CSYNC.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/SPF.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/DLV.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CERT.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/SSHFP.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/SOA.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/EUI48.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/TLSA.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/AFSDB.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/HINFO.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/LOC.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CDS.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/AVC.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/DNAME.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/__init__.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/PTR.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/CAA.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/MX.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/DS.cpython-36.pyc,, -dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-36.pyc,, -dns/__pycache__/renderer.cpython-36.pyc,, -dns/__pycache__/wiredata.cpython-36.pyc,, -dns/__pycache__/namedict.cpython-36.pyc,, -dns/__pycache__/edns.cpython-36.pyc,, -dns/__pycache__/update.cpython-36.pyc,, -dns/__pycache__/flags.cpython-36.pyc,, -dns/__pycache__/version.cpython-36.pyc,, -dns/__pycache__/tsigkeyring.cpython-36.pyc,, -dns/__pycache__/e164.cpython-36.pyc,, -dns/__pycache__/ipv4.cpython-36.pyc,, -dns/__pycache__/query.cpython-36.pyc,, -dns/__pycache__/inet.cpython-36.pyc,, -dns/__pycache__/rdata.cpython-36.pyc,, -dns/__pycache__/ipv6.cpython-36.pyc,, -dns/__pycache__/entropy.cpython-36.pyc,, -dns/__pycache__/ttl.cpython-36.pyc,, -dns/__pycache__/message.cpython-36.pyc,, -dns/__pycache__/dnssec.cpython-36.pyc,, -dns/__pycache__/hash.cpython-36.pyc,, -dns/__pycache__/rrset.cpython-36.pyc,, -dns/__pycache__/rdataclass.cpython-36.pyc,, -dns/__pycache__/set.cpython-36.pyc,, -dns/__pycache__/tsig.cpython-36.pyc,, -dns/__pycache__/_compat.cpython-36.pyc,, -dns/__pycache__/opcode.cpython-36.pyc,, -dns/__pycache__/zone.cpython-36.pyc,, -dns/__pycache__/grange.cpython-36.pyc,, -dns/__pycache__/resolver.cpython-36.pyc,, -dns/__pycache__/exception.cpython-36.pyc,, -dns/__pycache__/rcode.cpython-36.pyc,, -dns/__pycache__/rdatatype.cpython-36.pyc,, -dns/__pycache__/tokenizer.cpython-36.pyc,, -dns/__pycache__/reversename.cpython-36.pyc,, -dns/__pycache__/node.cpython-36.pyc,, -dns/__pycache__/__init__.cpython-36.pyc,, -dns/__pycache__/rdataset.cpython-36.pyc,, -dns/__pycache__/name.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/WHEEL b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/WHEEL deleted file mode 100644 index 7332a41..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/metadata.json b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/metadata.json deleted file mode 100644 index 893283d..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: Freeware", "Operating System :: Microsoft :: Windows :: Windows 95/98/2000", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Internet :: Name Service (DNS)", "Topic :: Software Development :: Libraries :: Python Modules", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "description_content_type": "UNKNOWN", "download_url": "http://www.dnspython.org/kits/1.16.0/dnspython-1.16.0.tar.gz", "extensions": {"python.details": {"contacts": [{"email": "halley@dnspython.org", "name": "Bob Halley", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://www.dnspython.org"}}}, "extras": ["DNSSEC", "IDNA"], "generator": "bdist_wheel (0.30.0)", "license": "BSD-like", "metadata_version": "2.0", "name": "dnspython", "provides": "dns", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"extra": "DNSSEC", "requires": ["ecdsa (>=0.13)", "pycryptodome"]}, {"extra": "IDNA", "requires": ["idna (>=2.1)"]}], "summary": "DNS toolkit", "version": "1.16.0"} \ No newline at end of file diff --git a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/top_level.txt deleted file mode 100644 index 2e7065c..0000000 --- a/venv/lib/python3.6/site-packages/dnspython-1.16.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -dns diff --git a/venv/lib/python3.6/site-packages/easy-install.pth b/venv/lib/python3.6/site-packages/easy-install.pth deleted file mode 100644 index a5580cc..0000000 --- a/venv/lib/python3.6/site-packages/easy-install.pth +++ /dev/null @@ -1 +0,0 @@ -./pip-10.0.1-py3.6.egg diff --git a/venv/lib/python3.6/site-packages/easy_install.py b/venv/lib/python3.6/site-packages/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/venv/lib/python3.6/site-packages/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/venv/lib/python3.6/site-packages/engineio/__init__.py b/venv/lib/python3.6/site-packages/engineio/__init__.py deleted file mode 100644 index f7b2172..0000000 --- a/venv/lib/python3.6/site-packages/engineio/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -import sys - -from .client import Client -from .middleware import WSGIApp, Middleware -from .server import Server -if sys.version_info >= (3, 5): # pragma: no cover - from .asyncio_server import AsyncServer - from .asyncio_client import AsyncClient - from .async_drivers.asgi import ASGIApp - try: - from .async_drivers.tornado import get_tornado_handler - except ImportError: - get_tornado_handler = None -else: # pragma: no cover - AsyncServer = None - AsyncClient = None - get_tornado_handler = None - ASGIApp = None - -__version__ = '3.9.3' - -__all__ = ['__version__', 'Server', 'WSGIApp', 'Middleware', 'Client'] -if AsyncServer is not None: # pragma: no cover - __all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler', - 'AsyncClient'], diff --git a/venv/lib/python3.6/site-packages/engineio/async_aiohttp.py b/venv/lib/python3.6/site-packages/engineio/async_aiohttp.py deleted file mode 100644 index ad0252d..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_aiohttp.py +++ /dev/null @@ -1,129 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit - -from aiohttp.web import Response, WebSocketResponse -import six - - -def create_route(app, engineio_server, engineio_endpoint): - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.router.add_get(engineio_endpoint, engineio_server.handle_request) - app.router.add_post(engineio_endpoint, engineio_server.handle_request) - app.router.add_route('OPTIONS', engineio_endpoint, - engineio_server.handle_request) - - -def translate_request(request): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - message = request._message - payload = request._payload - - uri_parts = urlsplit(message.path) - environ = { - 'wsgi.input': payload, - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': message.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': message.path, - 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'aiohttp.request': request - } - - for hdr_name, hdr_value in message.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - return Response(body=payload, status=int(status.split()[0]), - headers=headers) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a aiohttp WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['aiohttp.request'] - self._sock = WebSocketResponse() - await self._sock.prepare(request) - - self.environ = environ - await self.handler(self) - return self._sock - - async def close(self): - await self._sock.close() - - async def send(self, message): - if isinstance(message, bytes): - f = self._sock.send_bytes - else: - f = self._sock.send_str - if asyncio.iscoroutinefunction(f): - await f(message) - else: - f(message) - - async def wait(self): - msg = await self._sock.receive() - if not isinstance(msg.data, six.binary_type) and \ - not isinstance(msg.data, six.text_type): - raise IOError() - return msg.data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': sys.modules[__name__], - 'websocket_class': 'WebSocket' -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_asgi.py b/venv/lib/python3.6/site-packages/engineio/async_asgi.py deleted file mode 100644 index d216f31..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_asgi.py +++ /dev/null @@ -1,223 +0,0 @@ -import sys - - -class ASGIApp: - """ASGI application middleware for Engine.IO. - - This middleware dispatches traffic to an Engine.IO application. It can - also serve a list of static files to the client, or forward unrelated - HTTP traffic to another ASGI application. - - :param engineio_server: The Engine.IO server. Must be an instance of the - ``engineio.AsyncServer`` class. - :param static_files: A dictionary where the keys are URLs that should be - served as static files. For each URL, the value is - a dictionary with ``content_type`` and ``filename`` - keys. This option is intended to be used for serving - client files during development. - :param other_asgi_app: A separate ASGI app that receives all other traffic. - :param engineio_path: The endpoint where the Engine.IO application should - be installed. The default value is appropriate for - most cases. - - Example usage:: - - import engineio - import uvicorn - - eio = engineio.AsyncServer() - app = engineio.ASGIApp(eio, static_files={ - '/': {'content_type': 'text/html', 'filename': 'index.html'}, - '/index.html': {'content_type': 'text/html', - 'filename': 'index.html'}, - }) - uvicorn.run(app, '127.0.0.1', 5000) - """ - def __init__(self, engineio_server, other_asgi_app=None, - static_files=None, engineio_path='engine.io'): - self.engineio_server = engineio_server - self.other_asgi_app = other_asgi_app - self.engineio_path = engineio_path.strip('/') - self.static_files = static_files or {} - - def __call__(self, scope): - if scope['type'] in ['http', 'websocket'] and \ - scope['path'].startswith('/{0}/'.format(self.engineio_path)): - return self.engineio_asgi_app(scope) - elif scope['type'] == 'http' and scope['path'] in self.static_files: - return self.serve_static_file(scope) - elif self.other_asgi_app is not None: - return self.other_asgi_app(scope) - elif scope['type'] == 'lifespan': - return self.lifespan - else: - return self.not_found - - def engineio_asgi_app(self, scope): - async def _app(receive, send): - await self.engineio_server.handle_request(scope, receive, send) - return _app - - def serve_static_file(self, scope): - async def _send_static_file(receive, send): # pragma: no cover - event = await receive() - if event['type'] == 'http.request': - if scope['path'] in self.static_files: - content_type = self.static_files[scope['path']][ - 'content_type'].encode('utf-8') - filename = self.static_files[scope['path']]['filename'] - status_code = 200 - with open(filename, 'rb') as f: - payload = f.read() - else: - content_type = b'text/plain' - status_code = 404 - payload = b'not found' - await send({'type': 'http.response.start', - 'status': status_code, - 'headers': [(b'Content-Type', content_type)]}) - await send({'type': 'http.response.body', - 'body': payload}) - return _send_static_file - - async def lifespan(self, receive, send): - event = await receive() - if event['type'] == 'lifespan.startup': - await send({'type': 'lifespan.startup.complete'}) - elif event['type'] == 'lifespan.shutdown': - await send({'type': 'lifespan.shutdown.complete'}) - - async def not_found(self, receive, send): - """Return a 404 Not Found error to the client.""" - await send({'type': 'http.response.start', - 'status': 404, - 'headers': [(b'Content-Type', b'text/plain')]}) - await send({'type': 'http.response.body', - 'body': b'not found'}) - - -async def translate_request(scope, receive, send): - class AwaitablePayload(object): # pragma: no cover - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - event = await receive() - payload = b'' - if event['type'] == 'http.request': - payload += event.get('body') or b'' - while event.get('more_body'): - event = await receive() - if event['type'] == 'http.request': - payload += event.get('body') or b'' - elif event['type'] == 'websocket.connect': - await send({'type': 'websocket.accept'}) - else: - return {} - - raw_uri = scope['path'].encode('utf-8') - if 'query_string' in scope and scope['query_string']: - raw_uri += b'?' + scope['query_string'] - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'asgi', - 'REQUEST_METHOD': scope.get('method', 'GET'), - 'PATH_INFO': scope['path'], - 'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'), - 'RAW_URI': raw_uri.decode('utf-8'), - 'SCRIPT_NAME': '', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'asgi', - 'SERVER_PORT': '0', - 'asgi.receive': receive, - 'asgi.send': send, - } - - for hdr_name, hdr_value in scope['headers']: - hdr_name = hdr_name.upper().decode('utf-8') - hdr_value = hdr_value.decode('utf-8') - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - return environ - - -async def make_response(status, headers, payload, environ): - headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers] - await environ['asgi.send']({'type': 'http.response.start', - 'status': int(status.split(' ')[0]), - 'headers': headers}) - await environ['asgi.send']({'type': 'http.response.body', - 'body': payload}) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides an asgi WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.asgi_receive = None - self.asgi_send = None - - async def __call__(self, environ): - self.asgi_receive = environ['asgi.receive'] - self.asgi_send = environ['asgi.send'] - await self.handler(self) - - async def close(self): - await self.asgi_send({'type': 'websocket.close'}) - - async def send(self, message): - msg_bytes = None - msg_text = None - if isinstance(message, bytes): - msg_bytes = message - else: - msg_text = message - await self.asgi_send({'type': 'websocket.send', - 'bytes': msg_bytes, - 'text': msg_text}) - - async def wait(self): - event = await self.asgi_receive() - if event['type'] != 'websocket.receive': - raise IOError() - return event.get('bytes') or event.get('text') - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': sys.modules[__name__], - 'websocket_class': 'WebSocket' -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/__init__.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/aiohttp.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/aiohttp.py deleted file mode 100644 index ad69876..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/aiohttp.py +++ /dev/null @@ -1,128 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit - -from aiohttp.web import Response, WebSocketResponse -import six - - -def create_route(app, engineio_server, engineio_endpoint): - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.router.add_get(engineio_endpoint, engineio_server.handle_request) - app.router.add_post(engineio_endpoint, engineio_server.handle_request) - app.router.add_route('OPTIONS', engineio_endpoint, - engineio_server.handle_request) - - -def translate_request(request): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - message = request._message - payload = request._payload - - uri_parts = urlsplit(message.path) - environ = { - 'wsgi.input': payload, - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': message.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': message.path, - 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'aiohttp.request': request - } - - for hdr_name, hdr_value in message.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - return Response(body=payload, status=int(status.split()[0]), - headers=headers) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a aiohttp WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['aiohttp.request'] - self._sock = WebSocketResponse() - await self._sock.prepare(request) - - self.environ = environ - await self.handler(self) - return self._sock - - async def close(self): - await self._sock.close() - - async def send(self, message): - if isinstance(message, bytes): - f = self._sock.send_bytes - else: - f = self._sock.send_str - if asyncio.iscoroutinefunction(f): - await f(message) - else: - f(message) - - async def wait(self): - msg = await self._sock.receive() - if not isinstance(msg.data, six.binary_type) and \ - not isinstance(msg.data, six.text_type): - raise IOError() - return msg.data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/asgi.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/asgi.py deleted file mode 100644 index 9f14ef0..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/asgi.py +++ /dev/null @@ -1,214 +0,0 @@ -import os -import sys - -from engineio.static_files import get_static_file - - -class ASGIApp: - """ASGI application middleware for Engine.IO. - - This middleware dispatches traffic to an Engine.IO application. It can - also serve a list of static files to the client, or forward unrelated - HTTP traffic to another ASGI application. - - :param engineio_server: The Engine.IO server. Must be an instance of the - ``engineio.AsyncServer`` class. - :param static_files: A dictionary with static file mapping rules. See the - documentation for details on this argument. - :param other_asgi_app: A separate ASGI app that receives all other traffic. - :param engineio_path: The endpoint where the Engine.IO application should - be installed. The default value is appropriate for - most cases. - - Example usage:: - - import engineio - import uvicorn - - eio = engineio.AsyncServer() - app = engineio.ASGIApp(eio, static_files={ - '/': {'content_type': 'text/html', 'filename': 'index.html'}, - '/index.html': {'content_type': 'text/html', - 'filename': 'index.html'}, - }) - uvicorn.run(app, '127.0.0.1', 5000) - """ - def __init__(self, engineio_server, other_asgi_app=None, - static_files=None, engineio_path='engine.io'): - self.engineio_server = engineio_server - self.other_asgi_app = other_asgi_app - self.engineio_path = engineio_path.strip('/') - self.static_files = static_files or {} - - async def __call__(self, scope, receive, send): - if scope['type'] in ['http', 'websocket'] and \ - scope['path'].startswith('/{0}/'.format(self.engineio_path)): - await self.engineio_server.handle_request(scope, receive, send) - else: - static_file = get_static_file(scope['path'], self.static_files) \ - if scope['type'] == 'http' and self.static_files else None - if static_file: - await self.serve_static_file(static_file, receive, send) - elif self.other_asgi_app is not None: - await self.other_asgi_app(scope, receive, send) - elif scope['type'] == 'lifespan': - await self.lifespan(receive, send) - else: - await self.not_found(receive, send) - - async def serve_static_file(self, static_file, receive, - send): # pragma: no cover - event = await receive() - if event['type'] == 'http.request': - if os.path.exists(static_file['filename']): - with open(static_file['filename'], 'rb') as f: - payload = f.read() - await send({'type': 'http.response.start', - 'status': 200, - 'headers': [(b'Content-Type', static_file[ - 'content_type'].encode('utf-8'))]}) - await send({'type': 'http.response.body', - 'body': payload}) - else: - await self.not_found(receive, send) - - async def lifespan(self, receive, send): - event = await receive() - if event['type'] == 'lifespan.startup': - await send({'type': 'lifespan.startup.complete'}) - elif event['type'] == 'lifespan.shutdown': - await send({'type': 'lifespan.shutdown.complete'}) - - async def not_found(self, receive, send): - """Return a 404 Not Found error to the client.""" - await send({'type': 'http.response.start', - 'status': 404, - 'headers': [(b'Content-Type', b'text/plain')]}) - await send({'type': 'http.response.body', - 'body': b'Not Found'}) - - -async def translate_request(scope, receive, send): - class AwaitablePayload(object): # pragma: no cover - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - event = await receive() - payload = b'' - if event['type'] == 'http.request': - payload += event.get('body') or b'' - while event.get('more_body'): - event = await receive() - if event['type'] == 'http.request': - payload += event.get('body') or b'' - elif event['type'] == 'websocket.connect': - await send({'type': 'websocket.accept'}) - else: - return {} - - raw_uri = scope['path'].encode('utf-8') - if 'query_string' in scope and scope['query_string']: - raw_uri += b'?' + scope['query_string'] - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'asgi', - 'REQUEST_METHOD': scope.get('method', 'GET'), - 'PATH_INFO': scope['path'], - 'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'), - 'RAW_URI': raw_uri.decode('utf-8'), - 'SCRIPT_NAME': '', - 'SERVER_PROTOCOL': 'HTTP/1.1', - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'asgi', - 'SERVER_PORT': '0', - 'asgi.receive': receive, - 'asgi.send': send, - } - - for hdr_name, hdr_value in scope['headers']: - hdr_name = hdr_name.upper().decode('utf-8') - hdr_value = hdr_value.decode('utf-8') - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - return environ - - -async def make_response(status, headers, payload, environ): - headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers] - await environ['asgi.send']({'type': 'http.response.start', - 'status': int(status.split(' ')[0]), - 'headers': headers}) - await environ['asgi.send']({'type': 'http.response.body', - 'body': payload}) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides an asgi WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.asgi_receive = None - self.asgi_send = None - - async def __call__(self, environ): - self.asgi_receive = environ['asgi.receive'] - self.asgi_send = environ['asgi.send'] - await self.handler(self) - - async def close(self): - await self.asgi_send({'type': 'websocket.close'}) - - async def send(self, message): - msg_bytes = None - msg_text = None - if isinstance(message, bytes): - msg_bytes = message - else: - msg_text = message - await self.asgi_send({'type': 'websocket.send', - 'bytes': msg_bytes, - 'text': msg_text}) - - async def wait(self): - event = await self.asgi_receive() - if event['type'] != 'websocket.receive': - raise IOError() - return event.get('bytes') or event.get('text') - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/eventlet.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/eventlet.py deleted file mode 100644 index 9be3797..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/eventlet.py +++ /dev/null @@ -1,30 +0,0 @@ -from __future__ import absolute_import - -from eventlet.green.threading import Thread, Event -from eventlet import queue -from eventlet import sleep -from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI - - -class WebSocketWSGI(_WebSocketWSGI): - def __init__(self, *args, **kwargs): - super(WebSocketWSGI, self).__init__(*args, **kwargs) - self._sock = None - - def __call__(self, environ, start_response): - if 'eventlet.input' not in environ: - raise RuntimeError('You need to use the eventlet server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['eventlet.input'].get_socket() - return super(WebSocketWSGI, self).__call__(environ, start_response) - - -_async = { - 'thread': Thread, - 'queue': queue.Queue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': WebSocketWSGI, - 'sleep': sleep, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent.py deleted file mode 100644 index 024dd0a..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent.py +++ /dev/null @@ -1,63 +0,0 @@ -from __future__ import absolute_import - -import gevent -from gevent import queue -from gevent.event import Event -try: - import geventwebsocket # noqa - _websocket_available = True -except ImportError: - _websocket_available = False - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class WebSocketWSGI(object): # pragma: no cover - """ - This wrapper class provides a gevent WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - if 'wsgi.websocket' not in environ: - raise RuntimeError('You need to use the gevent-websocket server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['wsgi.websocket'] - self.environ = environ - self.version = self._sock.version - self.path = self._sock.path - self.origin = self._sock.origin - self.protocol = self._sock.protocol - return self.app(self) - - def close(self): - return self._sock.close() - - def send(self, message): - return self._sock.send(message) - - def wait(self): - return self._sock.receive() - - -_async = { - 'thread': Thread, - 'queue': queue.JoinableQueue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': WebSocketWSGI if _websocket_available else None, - 'sleep': gevent.sleep, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent_uwsgi.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent_uwsgi.py deleted file mode 100644 index 07fa2a7..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/gevent_uwsgi.py +++ /dev/null @@ -1,156 +0,0 @@ -from __future__ import absolute_import - -import six - -import gevent -from gevent import queue -from gevent.event import Event -import uwsgi -_websocket_available = hasattr(uwsgi, 'websocket_handshake') - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class uWSGIWebSocket(object): # pragma: no cover - """ - This wrapper class provides a uWSGI WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - self._sock = None - - def __call__(self, environ, start_response): - self._sock = uwsgi.connection_fd() - self.environ = environ - - uwsgi.websocket_handshake() - - self._req_ctx = None - if hasattr(uwsgi, 'request_context'): - # uWSGI >= 2.1.x with support for api access across-greenlets - self._req_ctx = uwsgi.request_context() - else: - # use event and queue for sending messages - from gevent.event import Event - from gevent.queue import Queue - from gevent.select import select - self._event = Event() - self._send_queue = Queue() - - # spawn a select greenlet - def select_greenlet_runner(fd, event): - """Sets event when data becomes available to read on fd.""" - while True: - event.set() - try: - select([fd], [], [])[0] - except ValueError: - break - self._select_greenlet = gevent.spawn( - select_greenlet_runner, - self._sock, - self._event) - - self.app(self) - - def close(self): - """Disconnects uWSGI from the client.""" - uwsgi.disconnect() - if self._req_ctx is None: - # better kill it here in case wait() is not called again - self._select_greenlet.kill() - self._event.set() - - def _send(self, msg): - """Transmits message either in binary or UTF-8 text mode, - depending on its type.""" - if isinstance(msg, six.binary_type): - method = uwsgi.websocket_send_binary - else: - method = uwsgi.websocket_send - if self._req_ctx is not None: - method(msg, request_context=self._req_ctx) - else: - method(msg) - - def _decode_received(self, msg): - """Returns either bytes or str, depending on message type.""" - if not isinstance(msg, six.binary_type): - # already decoded - do nothing - return msg - # only decode from utf-8 if message is not binary data - type = six.byte2int(msg[0:1]) - if type >= 48: # no binary - return msg.decode('utf-8') - # binary message, don't try to decode - return msg - - def send(self, msg): - """Queues a message for sending. Real transmission is done in - wait method. - Sends directly if uWSGI version is new enough.""" - if self._req_ctx is not None: - self._send(msg) - else: - self._send_queue.put(msg) - self._event.set() - - def wait(self): - """Waits and returns received messages. - If running in compatibility mode for older uWSGI versions, - it also sends messages that have been queued by send(). - A return value of None means that connection was closed. - This must be called repeatedly. For uWSGI < 2.1.x it must - be called from the main greenlet.""" - while True: - if self._req_ctx is not None: - try: - msg = uwsgi.websocket_recv(request_context=self._req_ctx) - except IOError: # connection closed - return None - return self._decode_received(msg) - else: - # we wake up at least every 3 seconds to let uWSGI - # do its ping/ponging - event_set = self._event.wait(timeout=3) - if event_set: - self._event.clear() - # maybe there is something to send - msgs = [] - while True: - try: - msgs.append(self._send_queue.get(block=False)) - except gevent.queue.Empty: - break - for msg in msgs: - self._send(msg) - # maybe there is something to receive, if not, at least - # ensure uWSGI does its ping/ponging - try: - msg = uwsgi.websocket_recv_nb() - except IOError: # connection closed - self._select_greenlet.kill() - return None - if msg: # message available - return self._decode_received(msg) - - -_async = { - 'thread': Thread, - 'queue': queue.JoinableQueue, - 'queue_empty': queue.Empty, - 'event': Event, - 'websocket': uWSGIWebSocket if _websocket_available else None, - 'sleep': gevent.sleep, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/sanic.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/sanic.py deleted file mode 100644 index 6929654..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/sanic.py +++ /dev/null @@ -1,144 +0,0 @@ -import sys -from urllib.parse import urlsplit - -from sanic.response import HTTPResponse -try: - from sanic.websocket import WebSocketProtocol -except ImportError: - # the installed version of sanic does not have websocket support - WebSocketProtocol = None -import six - - -def create_route(app, engineio_server, engineio_endpoint): - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.add_route(engineio_server.handle_request, engineio_endpoint, - methods=['GET', 'POST', 'OPTIONS']) - try: - app.enable_websocket() - except AttributeError: - # ignore, this version does not support websocket - pass - - -def translate_request(request): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - uri_parts = urlsplit(request.url) - environ = { - 'wsgi.input': AwaitablePayload(request.body), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'sanic', - 'REQUEST_METHOD': request.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': request.url, - 'SERVER_PROTOCOL': 'HTTP/' + request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'sanic', - 'SERVER_PORT': '0', - 'sanic.request': request - } - - for hdr_name, hdr_value in request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - headers_dict = {} - content_type = None - for h in headers: - if h[0].lower() == 'content-type': - content_type = h[1] - else: - headers_dict[h[0]] = h[1] - return HTTPResponse(body_bytes=payload, content_type=content_type, - status=int(status.split()[0]), headers=headers_dict) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a sanic WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['sanic.request'] - protocol = request.transport.get_protocol() - self._sock = await protocol.websocket_handshake(request) - - self.environ = environ - await self.handler(self) - - async def close(self): - await self._sock.close() - - async def send(self, message): - await self._sock.send(message) - - async def wait(self): - data = await self._sock.recv() - if not isinstance(data, six.binary_type) and \ - not isinstance(data, six.text_type): - raise IOError() - return data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket if WebSocketProtocol else None, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/threading.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/threading.py deleted file mode 100644 index 9b53756..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/threading.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import absolute_import -import threading -import time - -try: - import queue -except ImportError: # pragma: no cover - import Queue as queue - -_async = { - 'thread': threading.Thread, - 'queue': queue.Queue, - 'queue_empty': queue.Empty, - 'event': threading.Event, - 'websocket': None, - 'sleep': time.sleep, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_drivers/tornado.py b/venv/lib/python3.6/site-packages/engineio/async_drivers/tornado.py deleted file mode 100644 index adfe18f..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_drivers/tornado.py +++ /dev/null @@ -1,184 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit -from .. import exceptions - -import tornado.web -import tornado.websocket -import six - - -def get_tornado_handler(engineio_server): - class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - if isinstance(engineio_server.cors_allowed_origins, - six.string_types): - if engineio_server.cors_allowed_origins == '*': - self.allowed_origins = None - else: - self.allowed_origins = [ - engineio_server.cors_allowed_origins] - else: - self.allowed_origins = engineio_server.cors_allowed_origins - self.receive_queue = asyncio.Queue() - - async def get(self, *args, **kwargs): - if self.request.headers.get('Upgrade', '').lower() == 'websocket': - ret = super().get(*args, **kwargs) - if asyncio.iscoroutine(ret): - await ret - else: - await engineio_server.handle_request(self) - - async def open(self, *args, **kwargs): - # this is the handler for the websocket request - asyncio.ensure_future(engineio_server.handle_request(self)) - - async def post(self, *args, **kwargs): - await engineio_server.handle_request(self) - - async def options(self, *args, **kwargs): - await engineio_server.handle_request(self) - - async def on_message(self, message): - await self.receive_queue.put(message) - - async def get_next_message(self): - return await self.receive_queue.get() - - def on_close(self): - self.receive_queue.put_nowait(None) - - def check_origin(self, origin): - if self.allowed_origins is None or origin in self.allowed_origins: - return True - return super().check_origin(origin) - - def get_compression_options(self): - # enable compression - return {} - - return Handler - - -def translate_request(handler): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - payload = handler.request.body - - uri_parts = urlsplit(handler.request.path) - full_uri = handler.request.path - if handler.request.query: # pragma: no cover - full_uri += '?' + handler.request.query - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': handler.request.method, - 'QUERY_STRING': handler.request.query or '', - 'RAW_URI': full_uri, - 'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'tornado.handler': handler - } - - for hdr_name, hdr_value in handler.request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - tornado_handler = environ['tornado.handler'] - try: - tornado_handler.set_status(int(status.split()[0])) - except RuntimeError: # pragma: no cover - # for websocket connections Tornado does not accept a response, since - # it already emitted the 101 status code - return - for header, value in headers: - tornado_handler.set_header(header, value) - tornado_handler.write(payload) - tornado_handler.finish() - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a tornado WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.tornado_handler = None - - async def __call__(self, environ): - self.tornado_handler = environ['tornado.handler'] - self.environ = environ - await self.handler(self) - - async def close(self): - self.tornado_handler.close() - - async def send(self, message): - try: - self.tornado_handler.write_message( - message, binary=isinstance(message, bytes)) - except tornado.websocket.WebSocketClosedError: - raise exceptions.EngineIOError() - - async def wait(self): - msg = await self.tornado_handler.get_next_message() - if not isinstance(msg, six.binary_type) and \ - not isinstance(msg, six.text_type): - raise IOError() - return msg - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': WebSocket, -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_eventlet.py b/venv/lib/python3.6/site-packages/engineio/async_eventlet.py deleted file mode 100644 index 042a674..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_eventlet.py +++ /dev/null @@ -1,30 +0,0 @@ -import importlib -import sys - -from eventlet import sleep -from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI - - -class WebSocketWSGI(_WebSocketWSGI): - def __init__(self, *args, **kwargs): - super(WebSocketWSGI, self).__init__(*args, **kwargs) - self._sock = None - - def __call__(self, environ, start_response): - if 'eventlet.input' not in environ: - raise RuntimeError('You need to use the eventlet server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['eventlet.input'].get_socket() - return super(WebSocketWSGI, self).__call__(environ, start_response) - - -_async = { - 'threading': importlib.import_module('eventlet.green.threading'), - 'thread_class': 'Thread', - 'queue': importlib.import_module('eventlet.queue'), - 'queue_class': 'Queue', - 'websocket': sys.modules[__name__], - 'websocket_class': 'WebSocketWSGI', - 'sleep': sleep -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_gevent.py b/venv/lib/python3.6/site-packages/engineio/async_gevent.py deleted file mode 100644 index 34a9e43..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_gevent.py +++ /dev/null @@ -1,63 +0,0 @@ -import importlib -import sys - -import gevent -try: - import geventwebsocket # noqa - _websocket_available = True -except ImportError: - _websocket_available = False - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class WebSocketWSGI(object): # pragma: no cover - """ - This wrapper class provides a gevent WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - - def __call__(self, environ, start_response): - if 'wsgi.websocket' not in environ: - raise RuntimeError('You need to use the gevent-websocket server. ' - 'See the Deployment section of the ' - 'documentation for more information.') - self._sock = environ['wsgi.websocket'] - self.environ = environ - self.version = self._sock.version - self.path = self._sock.path - self.origin = self._sock.origin - self.protocol = self._sock.protocol - return self.app(self) - - def close(self): - return self._sock.close() - - def send(self, message): - return self._sock.send(message) - - def wait(self): - return self._sock.receive() - - -_async = { - 'threading': sys.modules[__name__], - 'thread_class': 'Thread', - 'queue': importlib.import_module('gevent.queue'), - 'queue_class': 'JoinableQueue', - 'websocket': sys.modules[__name__] if _websocket_available else None, - 'websocket_class': 'WebSocketWSGI' if _websocket_available else None, - 'sleep': gevent.sleep -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_gevent_uwsgi.py b/venv/lib/python3.6/site-packages/engineio/async_gevent_uwsgi.py deleted file mode 100644 index d2d6983..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_gevent_uwsgi.py +++ /dev/null @@ -1,155 +0,0 @@ -import importlib -import sys -import six - -import gevent -import uwsgi -_websocket_available = hasattr(uwsgi, 'websocket_handshake') - - -class Thread(gevent.Greenlet): # pragma: no cover - """ - This wrapper class provides gevent Greenlet interface that is compatible - with the standard library's Thread class. - """ - def __init__(self, target, args=[], kwargs={}): - super(Thread, self).__init__(target, *args, **kwargs) - - def _run(self): - return self.run() - - -class uWSGIWebSocket(object): # pragma: no cover - """ - This wrapper class provides a uWSGI WebSocket interface that is - compatible with eventlet's implementation. - """ - def __init__(self, app): - self.app = app - self._sock = None - - def __call__(self, environ, start_response): - self._sock = uwsgi.connection_fd() - self.environ = environ - - uwsgi.websocket_handshake() - - self._req_ctx = None - if hasattr(uwsgi, 'request_context'): - # uWSGI >= 2.1.x with support for api access across-greenlets - self._req_ctx = uwsgi.request_context() - else: - # use event and queue for sending messages - from gevent.event import Event - from gevent.queue import Queue - from gevent.select import select - self._event = Event() - self._send_queue = Queue() - - # spawn a select greenlet - def select_greenlet_runner(fd, event): - """Sets event when data becomes available to read on fd.""" - while True: - event.set() - try: - select([fd], [], [])[0] - except ValueError: - break - self._select_greenlet = gevent.spawn( - select_greenlet_runner, - self._sock, - self._event) - - self.app(self) - - def close(self): - """Disconnects uWSGI from the client.""" - uwsgi.disconnect() - if self._req_ctx is None: - # better kill it here in case wait() is not called again - self._select_greenlet.kill() - self._event.set() - - def _send(self, msg): - """Transmits message either in binary or UTF-8 text mode, - depending on its type.""" - if isinstance(msg, six.binary_type): - method = uwsgi.websocket_send_binary - else: - method = uwsgi.websocket_send - if self._req_ctx is not None: - method(msg, request_context=self._req_ctx) - else: - method(msg) - - def _decode_received(self, msg): - """Returns either bytes or str, depending on message type.""" - if not isinstance(msg, six.binary_type): - # already decoded - do nothing - return msg - # only decode from utf-8 if message is not binary data - type = six.byte2int(msg[0:1]) - if type >= 48: # no binary - return msg.decode('utf-8') - # binary message, don't try to decode - return msg - - def send(self, msg): - """Queues a message for sending. Real transmission is done in - wait method. - Sends directly if uWSGI version is new enough.""" - if self._req_ctx is not None: - self._send(msg) - else: - self._send_queue.put(msg) - self._event.set() - - def wait(self): - """Waits and returns received messages. - If running in compatibility mode for older uWSGI versions, - it also sends messages that have been queued by send(). - A return value of None means that connection was closed. - This must be called repeatedly. For uWSGI < 2.1.x it must - be called from the main greenlet.""" - while True: - if self._req_ctx is not None: - try: - msg = uwsgi.websocket_recv(request_context=self._req_ctx) - except IOError: # connection closed - return None - return self._decode_received(msg) - else: - # we wake up at least every 3 seconds to let uWSGI - # do its ping/ponging - event_set = self._event.wait(timeout=3) - if event_set: - self._event.clear() - # maybe there is something to send - msgs = [] - while True: - try: - msgs.append(self._send_queue.get(block=False)) - except gevent.queue.Empty: - break - for msg in msgs: - self._send(msg) - # maybe there is something to receive, if not, at least - # ensure uWSGI does its ping/ponging - try: - msg = uwsgi.websocket_recv_nb() - except IOError: # connection closed - self._select_greenlet.kill() - return None - if msg: # message available - return self._decode_received(msg) - - -_async = { - 'threading': sys.modules[__name__], - 'thread_class': 'Thread', - 'queue': importlib.import_module('gevent.queue'), - 'queue_class': 'JoinableQueue', - 'websocket': sys.modules[__name__] if _websocket_available else None, - 'websocket_class': 'uWSGIWebSocket' if _websocket_available else None, - 'sleep': gevent.sleep -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_sanic.py b/venv/lib/python3.6/site-packages/engineio/async_sanic.py deleted file mode 100644 index d150aac..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_sanic.py +++ /dev/null @@ -1,145 +0,0 @@ -import sys -from urllib.parse import urlsplit - -from sanic.response import HTTPResponse -try: - from sanic.websocket import WebSocketProtocol -except ImportError: - # the installed version of sanic does not have websocket support - WebSocketProtocol = None -import six - - -def create_route(app, engineio_server, engineio_endpoint): - """This function sets up the engine.io endpoint as a route for the - application. - - Note that both GET and POST requests must be hooked up on the engine.io - endpoint. - """ - app.add_route(engineio_server.handle_request, engineio_endpoint, - methods=['GET', 'POST', 'OPTIONS']) - try: - app.enable_websocket() - except AttributeError: - # ignore, this version does not support websocket - pass - - -def translate_request(request): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - uri_parts = urlsplit(request.url) - environ = { - 'wsgi.input': AwaitablePayload(request.body), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'sanic', - 'REQUEST_METHOD': request.method, - 'QUERY_STRING': uri_parts.query or '', - 'RAW_URI': request.url, - 'SERVER_PROTOCOL': 'HTTP/' + request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'sanic', - 'SERVER_PORT': '0', - 'sanic.request': request - } - - for hdr_name, hdr_value in request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - if key in environ: - hdr_value = '%s,%s' % (environ[key], hdr_value) - - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - headers_dict = {} - content_type = None - for h in headers: - if h[0].lower() == 'content-type': - content_type = h[1] - else: - headers_dict[h[0]] = h[1] - return HTTPResponse(body_bytes=payload, content_type=content_type, - status=int(status.split()[0]), headers=headers_dict) - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a sanic WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self._sock = None - - async def __call__(self, environ): - request = environ['sanic.request'] - protocol = request.transport.get_protocol() - self._sock = await protocol.websocket_handshake(request) - - self.environ = environ - await self.handler(self) - - async def close(self): - await self._sock.close() - - async def send(self, message): - await self._sock.send(message) - - async def wait(self): - data = await self._sock.recv() - if not isinstance(data, six.binary_type) and \ - not isinstance(data, six.text_type): - raise IOError() - return data - - -_async = { - 'asyncio': True, - 'create_route': create_route, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': sys.modules[__name__] if WebSocketProtocol else None, - 'websocket_class': 'WebSocket' if WebSocketProtocol else None -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_threading.py b/venv/lib/python3.6/site-packages/engineio/async_threading.py deleted file mode 100644 index fb458b0..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_threading.py +++ /dev/null @@ -1,17 +0,0 @@ -import importlib -import time - -try: - queue = importlib.import_module('queue') -except ImportError: # pragma: no cover - queue = importlib.import_module('Queue') # pragma: no cover - -_async = { - 'threading': importlib.import_module('threading'), - 'thread_class': 'Thread', - 'queue': queue, - 'queue_class': 'Queue', - 'websocket': None, - 'websocket_class': None, - 'sleep': time.sleep -} diff --git a/venv/lib/python3.6/site-packages/engineio/async_tornado.py b/venv/lib/python3.6/site-packages/engineio/async_tornado.py deleted file mode 100644 index 4b4f9c7..0000000 --- a/venv/lib/python3.6/site-packages/engineio/async_tornado.py +++ /dev/null @@ -1,154 +0,0 @@ -import asyncio -import sys -from urllib.parse import urlsplit - -try: - import tornado.web - import tornado.websocket -except ImportError: # pragma: no cover - pass -import six - - -def get_tornado_handler(engineio_server): - class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.receive_queue = asyncio.Queue() - - async def get(self): - if self.request.headers.get('Upgrade', '').lower() == 'websocket': - super().get() - await engineio_server.handle_request(self) - - async def post(self): - await engineio_server.handle_request(self) - - async def options(self): - await engineio_server.handle_request(self) - - async def on_message(self, message): - await self.receive_queue.put(message) - - async def get_next_message(self): - return await self.receive_queue.get() - - def on_close(self): - self.receive_queue.put_nowait(None) - - return Handler - - -def translate_request(handler): - """This function takes the arguments passed to the request handler and - uses them to generate a WSGI compatible environ dictionary. - """ - class AwaitablePayload(object): - def __init__(self, payload): - self.payload = payload or b'' - - async def read(self, length=None): - if length is None: - r = self.payload - self.payload = b'' - else: - r = self.payload[:length] - self.payload = self.payload[length:] - return r - - payload = handler.request.body - - uri_parts = urlsplit(handler.request.path) - full_uri = handler.request.path - if handler.request.query: # pragma: no cover - full_uri += '?' + handler.request.query - environ = { - 'wsgi.input': AwaitablePayload(payload), - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.async': True, - 'wsgi.multithread': False, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'SERVER_SOFTWARE': 'aiohttp', - 'REQUEST_METHOD': handler.request.method, - 'QUERY_STRING': handler.request.query or '', - 'RAW_URI': full_uri, - 'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version, - 'REMOTE_ADDR': '127.0.0.1', - 'REMOTE_PORT': '0', - 'SERVER_NAME': 'aiohttp', - 'SERVER_PORT': '0', - 'tornado.handler': handler - } - - for hdr_name, hdr_value in handler.request.headers.items(): - hdr_name = hdr_name.upper() - if hdr_name == 'CONTENT-TYPE': - environ['CONTENT_TYPE'] = hdr_value - continue - elif hdr_name == 'CONTENT-LENGTH': - environ['CONTENT_LENGTH'] = hdr_value - continue - - key = 'HTTP_%s' % hdr_name.replace('-', '_') - environ[key] = hdr_value - - environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') - - path_info = uri_parts.path - - environ['PATH_INFO'] = path_info - environ['SCRIPT_NAME'] = '' - - return environ - - -def make_response(status, headers, payload, environ): - """This function generates an appropriate response object for this async - mode. - """ - tornado_handler = environ['tornado.handler'] - tornado_handler.set_status(int(status.split()[0])) - for header, value in headers: - tornado_handler.set_header(header, value) - tornado_handler.write(payload) - tornado_handler.finish() - - -class WebSocket(object): # pragma: no cover - """ - This wrapper class provides a tornado WebSocket interface that is - somewhat compatible with eventlet's implementation. - """ - def __init__(self, handler): - self.handler = handler - self.tornado_handler = None - - async def __call__(self, environ): - self.tornado_handler = environ['tornado.handler'] - self.environ = environ - await self.handler(self) - - async def close(self): - self.tornado_handler.close() - - async def send(self, message): - self.tornado_handler.write_message( - message, binary=isinstance(message, bytes)) - - async def wait(self): - msg = await self.tornado_handler.get_next_message() - if not isinstance(msg, six.binary_type) and \ - not isinstance(msg, six.text_type): - raise IOError() - return msg - - -_async = { - 'asyncio': True, - 'translate_request': translate_request, - 'make_response': make_response, - 'websocket': sys.modules[__name__], - 'websocket_class': 'WebSocket' -} diff --git a/venv/lib/python3.6/site-packages/engineio/asyncio_client.py b/venv/lib/python3.6/site-packages/engineio/asyncio_client.py deleted file mode 100644 index abaab3c..0000000 --- a/venv/lib/python3.6/site-packages/engineio/asyncio_client.py +++ /dev/null @@ -1,566 +0,0 @@ -import asyncio - -try: - import aiohttp -except ImportError: # pragma: no cover - aiohttp = None -import six -try: - import websockets -except ImportError: # pragma: no cover - websockets = None - -from . import client -from . import exceptions -from . import packet -from . import payload - - -class AsyncClient(client.Client): - """An Engine.IO client for asyncio. - - This class implements a fully compliant Engine.IO web client with support - for websocket and long-polling transports, compatible with the asyncio - framework on Python 3.5 or newer. - - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param request_timeout: A timeout in seconds for requests. The default is - 5 seconds. - """ - def is_asyncio_based(self): - return True - - async def connect(self, url, headers={}, transports=None, - engineio_path='engine.io'): - """Connect to an Engine.IO server. - - :param url: The URL of the Engine.IO server. It can include custom - query string parameters if required by the server. - :param headers: A dictionary with custom headers to send with the - connection request. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. If not - given, the polling transport is connected first, - then an upgrade to websocket is attempted. - :param engineio_path: The endpoint where the Engine.IO server is - installed. The default value is appropriate for - most cases. - - Note: this method is a coroutine. - - Example usage:: - - eio = engineio.Client() - await eio.connect('http://localhost:5000') - """ - if self.state != 'disconnected': - raise ValueError('Client is not in a disconnected state') - valid_transports = ['polling', 'websocket'] - if transports is not None: - if isinstance(transports, six.text_type): - transports = [transports] - transports = [transport for transport in transports - if transport in valid_transports] - if not transports: - raise ValueError('No valid transports provided') - self.transports = transports or valid_transports - self.queue = self.create_queue() - return await getattr(self, '_connect_' + self.transports[0])( - url, headers, engineio_path) - - async def wait(self): - """Wait until the connection with the server ends. - - Client applications can use this function to block the main thread - during the life of the connection. - - Note: this method is a coroutine. - """ - if self.read_loop_task: - await self.read_loop_task - - async def send(self, data, binary=None): - """Send a message to a client. - - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - :param binary: ``True`` to send packet as binary, ``False`` to send - as text. If not given, unicode (Python 2) and str - (Python 3) are sent as text, and str (Python 2) and - bytes (Python 3) are sent as binary. - - Note: this method is a coroutine. - """ - await self._send_packet(packet.Packet(packet.MESSAGE, data=data, - binary=binary)) - - async def disconnect(self, abort=False): - """Disconnect from the server. - - :param abort: If set to ``True``, do not wait for background tasks - associated with the connection to end. - - Note: this method is a coroutine. - """ - if self.state == 'connected': - await self._send_packet(packet.Packet(packet.CLOSE)) - await self.queue.put(None) - self.state = 'disconnecting' - await self._trigger_event('disconnect', run_async=False) - if self.current_transport == 'websocket': - await self.ws.close() - if not abort: - await self.read_loop_task - self.state = 'disconnected' - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - - def start_background_task(self, target, *args, **kwargs): - """Start a background task. - - This is a utility function that applications can use to start a - background task. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object compatible with the `Thread` class in - the Python standard library. The `start()` method on this object is - already called by this function. - - Note: this method is a coroutine. - """ - return asyncio.ensure_future(target(*args, **kwargs)) - - async def sleep(self, seconds=0): - """Sleep for the requested amount of time. - - Note: this method is a coroutine. - """ - return await asyncio.sleep(seconds) - - def create_queue(self): - """Create a queue object.""" - q = asyncio.Queue() - q.Empty = asyncio.QueueEmpty - return q - - def create_event(self): - """Create an event object.""" - return asyncio.Event() - - def _reset(self): - if self.http: # pragma: no cover - asyncio.ensure_future(self.http.close()) - super()._reset() - - async def _connect_polling(self, url, headers, engineio_path): - """Establish a long-polling connection to the Engine.IO server.""" - if aiohttp is None: # pragma: no cover - self.logger.error('aiohttp not installed -- cannot make HTTP ' - 'requests!') - return - self.base_url = self._get_engineio_url(url, engineio_path, 'polling') - self.logger.info('Attempting polling connection to ' + self.base_url) - r = await self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), headers=headers, - timeout=self.request_timeout) - if r is None: - self._reset() - raise exceptions.ConnectionError( - 'Connection refused by the server') - if r.status != 200: - raise exceptions.ConnectionError( - 'Unexpected status code {} in server response'.format( - r.status)) - try: - p = payload.Payload(encoded_payload=await r.read()) - except ValueError: - six.raise_from(exceptions.ConnectionError( - 'Unexpected response from server'), None) - open_packet = p.packets[0] - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError( - 'OPEN packet not returned by server') - self.logger.info( - 'Polling connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = open_packet.data['pingInterval'] / 1000.0 - self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 - self.current_transport = 'polling' - self.base_url += '&sid=' + self.sid - - self.state = 'connected' - client.connected_clients.append(self) - await self._trigger_event('connect', run_async=False) - - for pkt in p.packets[1:]: - await self._receive_packet(pkt) - - if 'websocket' in self.upgrades and 'websocket' in self.transports: - # attempt to upgrade to websocket - if await self._connect_websocket(url, headers, engineio_path): - # upgrade to websocket succeeded, we're done here - return - - self.ping_loop_task = self.start_background_task(self._ping_loop) - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_polling) - - async def _connect_websocket(self, url, headers, engineio_path): - """Establish or upgrade to a WebSocket connection with the server.""" - if websockets is None: # pragma: no cover - self.logger.error('websockets package not installed') - return False - websocket_url = self._get_engineio_url(url, engineio_path, - 'websocket') - if self.sid: - self.logger.info( - 'Attempting WebSocket upgrade to ' + websocket_url) - upgrade = True - websocket_url += '&sid=' + self.sid - else: - upgrade = False - self.base_url = websocket_url - self.logger.info( - 'Attempting WebSocket connection to ' + websocket_url) - - # get the cookies from the long-polling connection so that they can - # also be sent the the WebSocket route - cookies = None - if self.http: - cookies = '; '.join(["{}={}".format(cookie.key, cookie.value) - for cookie in self.http._cookie_jar]) - headers = headers.copy() - headers['Cookie'] = cookies - - try: - ws = await websockets.connect( - websocket_url + self._get_url_timestamp(), - extra_headers=headers) - except (websockets.exceptions.InvalidURI, - websockets.exceptions.InvalidHandshake, - OSError): - if upgrade: - self.logger.warning( - 'WebSocket upgrade failed: connection error') - return False - else: - raise exceptions.ConnectionError('Connection error') - if upgrade: - p = packet.Packet(packet.PING, data='probe').encode( - always_bytes=False) - try: - await ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - try: - p = await ws.recv() - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected recv exception: %s', - str(e)) - return False - pkt = packet.Packet(encoded_packet=p) - if pkt.packet_type != packet.PONG or pkt.data != 'probe': - self.logger.warning( - 'WebSocket upgrade failed: no PONG packet') - return False - p = packet.Packet(packet.UPGRADE).encode(always_bytes=False) - try: - await ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - self.current_transport = 'websocket' - if self.http: # pragma: no cover - await self.http.close() - self.logger.info('WebSocket upgrade was successful') - else: - try: - p = await ws.recv() - except Exception as e: # pragma: no cover - raise exceptions.ConnectionError( - 'Unexpected recv exception: ' + str(e)) - open_packet = packet.Packet(encoded_packet=p) - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError('no OPEN packet') - self.logger.info( - 'WebSocket connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = open_packet.data['pingInterval'] / 1000.0 - self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 - self.current_transport = 'websocket' - - self.state = 'connected' - client.connected_clients.append(self) - await self._trigger_event('connect', run_async=False) - - self.ws = ws - self.ping_loop_task = self.start_background_task(self._ping_loop) - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_websocket) - return True - - async def _receive_packet(self, pkt): - """Handle incoming packets from the server.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.logger.info( - 'Received packet %s data %s', packet_name, - pkt.data if not isinstance(pkt.data, bytes) else '') - if pkt.packet_type == packet.MESSAGE: - await self._trigger_event('message', pkt.data, run_async=True) - elif pkt.packet_type == packet.PONG: - self.pong_received = True - elif pkt.packet_type == packet.CLOSE: - await self.disconnect(abort=True) - elif pkt.packet_type == packet.NOOP: - pass - else: - self.logger.error('Received unexpected packet of type %s', - pkt.packet_type) - - async def _send_packet(self, pkt): - """Queue a packet to be sent to the server.""" - if self.state != 'connected': - return - await self.queue.put(pkt) - self.logger.info( - 'Sending packet %s data %s', - packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) else '') - - async def _send_request( - self, method, url, headers=None, body=None, - timeout=None): # pragma: no cover - if self.http is None or self.http.closed: - self.http = aiohttp.ClientSession() - http_method = getattr(self.http, method.lower()) - try: - return await http_method( - url, headers=headers, data=body, - timeout=aiohttp.ClientTimeout(total=timeout)) - except (aiohttp.ClientError, asyncio.TimeoutError) as exc: - self.logger.info('HTTP %s request to %s failed with error %s.', - method, url, exc) - - async def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - ret = None - if event in self.handlers: - if asyncio.iscoroutinefunction(self.handlers[event]) is True: - if run_async: - return self.start_background_task(self.handlers[event], - *args) - else: - try: - ret = await self.handlers[event](*args) - except asyncio.CancelledError: # pragma: no cover - pass - except: - self.logger.exception(event + ' async handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - else: - if run_async: - async def async_handler(): - return self.handlers[event](*args) - - return self.start_background_task(async_handler) - else: - try: - ret = self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - return ret - - async def _ping_loop(self): - """This background task sends a PING to the server at the requested - interval. - """ - self.pong_received = True - self.ping_loop_event.clear() - while self.state == 'connected': - if not self.pong_received: - self.logger.info( - 'PONG response has not been received, aborting') - if self.ws: - await self.ws.close() - await self.queue.put(None) - break - self.pong_received = False - await self._send_packet(packet.Packet(packet.PING)) - try: - await asyncio.wait_for(self.ping_loop_event.wait(), - self.ping_interval) - except (asyncio.TimeoutError, - asyncio.CancelledError): # pragma: no cover - pass - self.logger.info('Exiting ping task') - - async def _read_loop_polling(self): - """Read packets by polling the Engine.IO server.""" - while self.state == 'connected': - self.logger.info( - 'Sending polling GET request to ' + self.base_url) - r = await self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), - timeout=max(self.ping_interval, self.ping_timeout) + 5) - if r is None: - self.logger.warning( - 'Connection refused by the server, aborting') - await self.queue.put(None) - break - if r.status != 200: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status) - await self.queue.put(None) - break - try: - p = payload.Payload(encoded_payload=await r.read()) - except ValueError: - self.logger.warning( - 'Unexpected packet from server, aborting') - await self.queue.put(None) - break - for pkt in p.packets: - await self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - await self.write_loop_task - self.logger.info('Waiting for ping loop task to end') - self.ping_loop_event.set() - await self.ping_loop_task - if self.state == 'connected': - await self._trigger_event('disconnect', run_async=False) - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - async def _read_loop_websocket(self): - """Read packets from the Engine.IO WebSocket connection.""" - while self.state == 'connected': - p = None - try: - p = await self.ws.recv() - except websockets.exceptions.ConnectionClosed: - self.logger.info( - 'Read loop: WebSocket connection was closed, aborting') - await self.queue.put(None) - break - except Exception as e: - self.logger.info( - 'Unexpected error "%s", aborting', str(e)) - await self.queue.put(None) - break - if isinstance(p, six.text_type): # pragma: no cover - p = p.encode('utf-8') - pkt = packet.Packet(encoded_packet=p) - await self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - await self.write_loop_task - self.logger.info('Waiting for ping loop task to end') - self.ping_loop_event.set() - await self.ping_loop_task - if self.state == 'connected': - await self._trigger_event('disconnect', run_async=False) - try: - client.connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - async def _write_loop(self): - """This background task sends packages to the server as they are - pushed to the send queue. - """ - while self.state == 'connected': - # to simplify the timeout handling, use the maximum of the - # ping interval and ping timeout as timeout, with an extra 5 - # seconds grace period - timeout = max(self.ping_interval, self.ping_timeout) + 5 - packets = None - try: - packets = [await asyncio.wait_for(self.queue.get(), timeout)] - except (self.queue.Empty, asyncio.TimeoutError, - asyncio.CancelledError): - self.logger.error('packet queue is empty, aborting') - break - if packets == [None]: - self.queue.task_done() - packets = [] - else: - while True: - try: - packets.append(self.queue.get_nowait()) - except self.queue.Empty: - break - if packets[-1] is None: - packets = packets[:-1] - self.queue.task_done() - break - if not packets: - # empty packet list returned -> connection closed - break - if self.current_transport == 'polling': - p = payload.Payload(packets=packets) - r = await self._send_request( - 'POST', self.base_url, body=p.encode(), - headers={'Content-Type': 'application/octet-stream'}, - timeout=self.request_timeout) - for pkt in packets: - self.queue.task_done() - if r is None: - self.logger.warning( - 'Connection refused by the server, aborting') - break - if r.status != 200: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status) - self._reset() - break - else: - # websocket - try: - for pkt in packets: - await self.ws.send(pkt.encode(always_bytes=False)) - self.queue.task_done() - except websockets.exceptions.ConnectionClosed: - self.logger.info( - 'Write loop: WebSocket connection was closed, ' - 'aborting') - break - self.logger.info('Exiting write loop task') diff --git a/venv/lib/python3.6/site-packages/engineio/asyncio_server.py b/venv/lib/python3.6/site-packages/engineio/asyncio_server.py deleted file mode 100644 index 5e203d2..0000000 --- a/venv/lib/python3.6/site-packages/engineio/asyncio_server.py +++ /dev/null @@ -1,463 +0,0 @@ -import asyncio - -import six -from six.moves import urllib - -from . import exceptions -from . import packet -from . import server -from . import asyncio_socket - - -class AsyncServer(server.Server): - """An Engine.IO server for asyncio. - - This class implements a fully compliant Engine.IO web server with support - for websocket and long-polling transports, compatible with the asyncio - framework on Python 3.5 or newer. - - :param async_mode: The asynchronous model to use. See the Deployment - section in the documentation for a description of the - available options. Valid async modes are "aiohttp", - "sanic", "tornado" and "asgi". If this argument is not - given, "aiohttp" is tried first, followed by "sanic", - "tornado", and finally "asgi". The first async mode that - has all its dependencies installed is the one that is - chosen. - :param ping_timeout: The time in seconds that the client waits for the - server to respond before disconnecting. - :param ping_interval: The interval in seconds at which the client pings - the server. - :param max_http_buffer_size: The maximum size of a message when using the - polling transport. - :param allow_upgrades: Whether to allow transport upgrades or not. - :param http_compression: Whether to compress packages when using the - polling transport. - :param compression_threshold: Only compress messages when their byte size - is greater than this value. - :param cookie: Name of the HTTP cookie that contains the client session - id. If set to ``None``, a cookie is not sent to the client. - :param cors_allowed_origins: Origin or list of origins that are allowed to - connect to this server. Only the same origin - is allowed by default. Set this argument to - ``'*'`` to allow all origins, or to ``[]`` to - disable CORS handling. - :param cors_credentials: Whether credentials (cookies, authentication) are - allowed in requests to this server. - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param async_handlers: If set to ``True``, run message event handlers in - non-blocking threads. To run handlers synchronously, - set to ``False``. The default is ``True``. - :param kwargs: Reserved for future extensions, any additional parameters - given as keyword arguments will be silently ignored. - """ - def is_asyncio_based(self): - return True - - def async_modes(self): - return ['aiohttp', 'sanic', 'tornado', 'asgi'] - - def attach(self, app, engineio_path='engine.io'): - """Attach the Engine.IO server to an application.""" - engineio_path = engineio_path.strip('/') - self._async['create_route'](app, self, '/{}/'.format(engineio_path)) - - async def send(self, sid, data, binary=None): - """Send a message to a client. - - :param sid: The session id of the recipient client. - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - :param binary: ``True`` to send packet as binary, ``False`` to send - as text. If not given, unicode (Python 2) and str - (Python 3) are sent as text, and str (Python 2) and - bytes (Python 3) are sent as binary. - - Note: this method is a coroutine. - """ - try: - socket = self._get_socket(sid) - except KeyError: - # the socket is not available - self.logger.warning('Cannot send to sid %s', sid) - return - await socket.send(packet.Packet(packet.MESSAGE, data=data, - binary=binary)) - - async def get_session(self, sid): - """Return the user session for a client. - - :param sid: The session id of the client. - - The return value is a dictionary. Modifications made to this - dictionary are not guaranteed to be preserved. If you want to modify - the user session, use the ``session`` context manager instead. - """ - socket = self._get_socket(sid) - return socket.session - - async def save_session(self, sid, session): - """Store the user session for a client. - - :param sid: The session id of the client. - :param session: The session dictionary. - """ - socket = self._get_socket(sid) - socket.session = session - - def session(self, sid): - """Return the user session for a client with context manager syntax. - - :param sid: The session id of the client. - - This is a context manager that returns the user session dictionary for - the client. Any changes that are made to this dictionary inside the - context manager block are saved back to the session. Example usage:: - - @eio.on('connect') - def on_connect(sid, environ): - username = authenticate_user(environ) - if not username: - return False - with eio.session(sid) as session: - session['username'] = username - - @eio.on('message') - def on_message(sid, msg): - async with eio.session(sid) as session: - print('received message from ', session['username']) - """ - class _session_context_manager(object): - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.session = None - - async def __aenter__(self): - self.session = await self.server.get_session(sid) - return self.session - - async def __aexit__(self, *args): - await self.server.save_session(sid, self.session) - - return _session_context_manager(self, sid) - - async def disconnect(self, sid=None): - """Disconnect a client. - - :param sid: The session id of the client to close. If this parameter - is not given, then all clients are closed. - - Note: this method is a coroutine. - """ - if sid is not None: - try: - socket = self._get_socket(sid) - except KeyError: # pragma: no cover - # the socket was already closed or gone - pass - else: - await socket.close() - del self.sockets[sid] - else: - await asyncio.wait([client.close() - for client in six.itervalues(self.sockets)]) - self.sockets = {} - - async def handle_request(self, *args, **kwargs): - """Handle an HTTP request from the client. - - This is the entry point of the Engine.IO application. This function - returns the HTTP response to deliver to the client. - - Note: this method is a coroutine. - """ - translate_request = self._async['translate_request'] - if asyncio.iscoroutinefunction(translate_request): - environ = await translate_request(*args, **kwargs) - else: - environ = translate_request(*args, **kwargs) - - if self.cors_allowed_origins != []: - # Validate the origin header if present - # This is important for WebSocket more than for HTTP, since - # browsers only apply CORS controls to HTTP. - origin = environ.get('HTTP_ORIGIN') - if origin: - allowed_origins = self._cors_allowed_origins(environ) - if allowed_origins is not None and origin not in \ - allowed_origins: - self.logger.info(origin + ' is not an accepted origin.') - r = self._bad_request() - make_response = self._async['make_response'] - response = make_response(r['status'], r['headers'], - r['response'], environ) - return response - - method = environ['REQUEST_METHOD'] - query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) - - sid = query['sid'][0] if 'sid' in query else None - b64 = False - jsonp = False - jsonp_index = None - - if 'b64' in query: - if query['b64'][0] == "1" or query['b64'][0].lower() == "true": - b64 = True - if 'j' in query: - jsonp = True - try: - jsonp_index = int(query['j'][0]) - except (ValueError, KeyError, IndexError): - # Invalid JSONP index number - pass - - if jsonp and jsonp_index is None: - self.logger.warning('Invalid JSONP index number') - r = self._bad_request() - elif method == 'GET': - if sid is None: - transport = query.get('transport', ['polling'])[0] - if transport != 'polling' and transport != 'websocket': - self.logger.warning('Invalid transport %s', transport) - r = self._bad_request() - else: - r = await self._handle_connect(environ, transport, - b64, jsonp_index) - else: - if sid not in self.sockets: - self.logger.warning('Invalid session %s', sid) - r = self._bad_request() - else: - socket = self._get_socket(sid) - try: - packets = await socket.handle_get_request(environ) - if isinstance(packets, list): - r = self._ok(packets, b64=b64, - jsonp_index=jsonp_index) - else: - r = packets - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - await self.disconnect(sid) - r = self._bad_request() - if sid in self.sockets and self.sockets[sid].closed: - del self.sockets[sid] - elif method == 'POST': - if sid is None or sid not in self.sockets: - self.logger.warning('Invalid session %s', sid) - r = self._bad_request() - else: - socket = self._get_socket(sid) - try: - await socket.handle_post_request(environ) - r = self._ok(jsonp_index=jsonp_index) - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - await self.disconnect(sid) - r = self._bad_request() - except: # pragma: no cover - # for any other unexpected errors, we log the error - # and keep going - self.logger.exception('post request handler error') - r = self._ok(jsonp_index=jsonp_index) - elif method == 'OPTIONS': - r = self._ok() - else: - self.logger.warning('Method %s not supported', method) - r = self._method_not_found() - if not isinstance(r, dict): - return r - if self.http_compression and \ - len(r['response']) >= self.compression_threshold: - encodings = [e.split(';')[0].strip() for e in - environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] - for encoding in encodings: - if encoding in self.compression_methods: - r['response'] = \ - getattr(self, '_' + encoding)(r['response']) - r['headers'] += [('Content-Encoding', encoding)] - break - cors_headers = self._cors_headers(environ) - make_response = self._async['make_response'] - if asyncio.iscoroutinefunction(make_response): - response = await make_response(r['status'], - r['headers'] + cors_headers, - r['response'], environ) - else: - response = make_response(r['status'], r['headers'] + cors_headers, - r['response'], environ) - return response - - def start_background_task(self, target, *args, **kwargs): - """Start a background task using the appropriate async model. - - This is a utility function that applications can use to start a - background task using the method that is compatible with the - selected async mode. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - The return value is a ``asyncio.Task`` object. - """ - return asyncio.ensure_future(target(*args, **kwargs)) - - async def sleep(self, seconds=0): - """Sleep for the requested amount of time using the appropriate async - model. - - This is a utility function that applications can use to put a task to - sleep without having to worry about using the correct call for the - selected async mode. - - Note: this method is a coroutine. - """ - return await asyncio.sleep(seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object using the appropriate async model. - - This is a utility function that applications can use to create a queue - without having to worry about using the correct call for the selected - async mode. For asyncio based async modes, this returns an instance of - ``asyncio.Queue``. - """ - return asyncio.Queue(*args, **kwargs) - - def get_queue_empty_exception(self): - """Return the queue empty exception for the appropriate async model. - - This is a utility function that applications can use to work with a - queue without having to worry about using the correct call for the - selected async mode. For asyncio based async modes, this returns an - instance of ``asyncio.QueueEmpty``. - """ - return asyncio.QueueEmpty - - def create_event(self, *args, **kwargs): - """Create an event object using the appropriate async model. - - This is a utility function that applications can use to create an - event without having to worry about using the correct call for the - selected async mode. For asyncio based async modes, this returns - an instance of ``asyncio.Event``. - """ - return asyncio.Event(*args, **kwargs) - - async def _handle_connect(self, environ, transport, b64=False, - jsonp_index=None): - """Handle a client connection request.""" - if self.start_service_task: - # start the service task to monitor connected clients - self.start_service_task = False - self.start_background_task(self._service_task) - - sid = self._generate_id() - s = asyncio_socket.AsyncSocket(self, sid) - self.sockets[sid] = s - - pkt = packet.Packet( - packet.OPEN, {'sid': sid, - 'upgrades': self._upgrades(sid, transport), - 'pingTimeout': int(self.ping_timeout * 1000), - 'pingInterval': int(self.ping_interval * 1000)}) - await s.send(pkt) - - ret = await self._trigger_event('connect', sid, environ, - run_async=False) - if ret is False: - del self.sockets[sid] - self.logger.warning('Application rejected connection') - return self._unauthorized() - - if transport == 'websocket': - ret = await s.handle_get_request(environ) - if s.closed: - # websocket connection ended, so we are done - del self.sockets[sid] - return ret - else: - s.connected = True - headers = None - if self.cookie: - headers = [('Set-Cookie', self.cookie + '=' + sid)] - try: - return self._ok(await s.poll(), headers=headers, b64=b64, - jsonp_index=jsonp_index) - except exceptions.QueueEmpty: - return self._bad_request() - - async def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - ret = None - if event in self.handlers: - if asyncio.iscoroutinefunction(self.handlers[event]) is True: - if run_async: - return self.start_background_task(self.handlers[event], - *args) - else: - try: - ret = await self.handlers[event](*args) - except asyncio.CancelledError: # pragma: no cover - pass - except: - self.logger.exception(event + ' async handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - else: - if run_async: - async def async_handler(): - return self.handlers[event](*args) - - return self.start_background_task(async_handler) - else: - try: - ret = self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - return ret - - async def _service_task(self): # pragma: no cover - """Monitor connected clients and clean up those that time out.""" - while True: - if len(self.sockets) == 0: - # nothing to do - await self.sleep(self.ping_timeout) - continue - - # go through the entire client list in a ping interval cycle - sleep_interval = self.ping_timeout / len(self.sockets) - - try: - # iterate over the current clients - for socket in self.sockets.copy().values(): - if not socket.closing and not socket.closed: - await socket.check_ping_timeout() - await self.sleep(sleep_interval) - except (SystemExit, KeyboardInterrupt, asyncio.CancelledError): - self.logger.info('service task canceled') - break - except: - if asyncio.get_event_loop().is_closed(): - self.logger.info('event loop is closed, exiting service ' - 'task') - break - - # an unexpected exception has occurred, log it and continue - self.logger.exception('service task exception') diff --git a/venv/lib/python3.6/site-packages/engineio/asyncio_socket.py b/venv/lib/python3.6/site-packages/engineio/asyncio_socket.py deleted file mode 100644 index c4afcfe..0000000 --- a/venv/lib/python3.6/site-packages/engineio/asyncio_socket.py +++ /dev/null @@ -1,235 +0,0 @@ -import asyncio -import six -import sys -import time - -from . import exceptions -from . import packet -from . import payload -from . import socket - - -class AsyncSocket(socket.Socket): - async def poll(self): - """Wait for packets to send to the client.""" - try: - packets = [await asyncio.wait_for(self.queue.get(), - self.server.ping_timeout)] - self.queue.task_done() - except (asyncio.TimeoutError, asyncio.CancelledError): - raise exceptions.QueueEmpty() - if packets == [None]: - return [] - try: - packets.append(self.queue.get_nowait()) - self.queue.task_done() - except asyncio.QueueEmpty: - pass - return packets - - async def receive(self, pkt): - """Receive packet from the client.""" - self.server.logger.info('%s: Received packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - if pkt.packet_type == packet.PING: - self.last_ping = time.time() - await self.send(packet.Packet(packet.PONG, pkt.data)) - elif pkt.packet_type == packet.MESSAGE: - await self.server._trigger_event( - 'message', self.sid, pkt.data, - run_async=self.server.async_handlers) - elif pkt.packet_type == packet.UPGRADE: - await self.send(packet.Packet(packet.NOOP)) - elif pkt.packet_type == packet.CLOSE: - await self.close(wait=False, abort=True) - else: - raise exceptions.UnknownPacketError() - - async def check_ping_timeout(self): - """Make sure the client is still sending pings. - - This helps detect disconnections for long-polling clients. - """ - if self.closed: - raise exceptions.SocketIsClosedError() - if time.time() - self.last_ping > self.server.ping_interval + 5: - self.server.logger.info('%s: Client is gone, closing socket', - self.sid) - # Passing abort=False here will cause close() to write a - # CLOSE packet. This has the effect of updating half-open sockets - # to their correct state of disconnected - await self.close(wait=False, abort=False) - return False - return True - - async def send(self, pkt): - """Send a packet to the client.""" - if not await self.check_ping_timeout(): - return - if self.upgrading: - self.packet_backlog.append(pkt) - else: - await self.queue.put(pkt) - self.server.logger.info('%s: Sending packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - - async def handle_get_request(self, environ): - """Handle a long-polling GET request from the client.""" - connections = [ - s.strip() - for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] - transport = environ.get('HTTP_UPGRADE', '').lower() - if 'upgrade' in connections and transport in self.upgrade_protocols: - self.server.logger.info('%s: Received request to upgrade to %s', - self.sid, transport) - return await getattr(self, '_upgrade_' + transport)(environ) - try: - packets = await self.poll() - except exceptions.QueueEmpty: - exc = sys.exc_info() - await self.close(wait=False) - six.reraise(*exc) - return packets - - async def handle_post_request(self, environ): - """Handle a long-polling POST request from the client.""" - length = int(environ.get('CONTENT_LENGTH', '0')) - if length > self.server.max_http_buffer_size: - raise exceptions.ContentTooLongError() - else: - body = await environ['wsgi.input'].read(length) - p = payload.Payload(encoded_payload=body) - for pkt in p.packets: - await self.receive(pkt) - - async def close(self, wait=True, abort=False): - """Close the socket connection.""" - if not self.closed and not self.closing: - self.closing = True - await self.server._trigger_event('disconnect', self.sid) - if not abort: - await self.send(packet.Packet(packet.CLOSE)) - self.closed = True - if wait: - await self.queue.join() - - async def _upgrade_websocket(self, environ): - """Upgrade the connection from polling to websocket.""" - if self.upgraded: - raise IOError('Socket has been upgraded already') - if self.server._async['websocket'] is None: - # the selected async mode does not support websocket - return self.server._bad_request() - ws = self.server._async['websocket'](self._websocket_handler) - return await ws(environ) - - async def _websocket_handler(self, ws): - """Engine.IO handler for websocket transport.""" - if self.connected: - # the socket was already connected, so this is an upgrade - self.upgrading = True # hold packet sends during the upgrade - - try: - pkt = await ws.wait() - except IOError: # pragma: no cover - return - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.PING or \ - decoded_pkt.data != 'probe': - self.server.logger.info( - '%s: Failed websocket upgrade, no PING packet', self.sid) - return - await ws.send(packet.Packet( - packet.PONG, - data=six.text_type('probe')).encode(always_bytes=False)) - await self.queue.put(packet.Packet(packet.NOOP)) # end poll - - try: - pkt = await ws.wait() - except IOError: # pragma: no cover - return - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.UPGRADE: - self.upgraded = False - self.server.logger.info( - ('%s: Failed websocket upgrade, expected UPGRADE packet, ' - 'received %s instead.'), - self.sid, pkt) - return - self.upgraded = True - - # flush any packets that were sent during the upgrade - for pkt in self.packet_backlog: - await self.queue.put(pkt) - self.packet_backlog = [] - self.upgrading = False - else: - self.connected = True - self.upgraded = True - - # start separate writer thread - async def writer(): - while True: - packets = None - try: - packets = await self.poll() - except exceptions.QueueEmpty: - break - if not packets: - # empty packet list returned -> connection closed - break - try: - for pkt in packets: - await ws.send(pkt.encode(always_bytes=False)) - except: - break - writer_task = asyncio.ensure_future(writer()) - - self.server.logger.info( - '%s: Upgrade to websocket successful', self.sid) - - while True: - p = None - wait_task = asyncio.ensure_future(ws.wait()) - try: - p = await asyncio.wait_for(wait_task, self.server.ping_timeout) - except asyncio.CancelledError: # pragma: no cover - # there is a bug (https://bugs.python.org/issue30508) in - # asyncio that causes a "Task exception never retrieved" error - # to appear when wait_task raises an exception before it gets - # cancelled. Calling wait_task.exception() prevents the error - # from being issued in Python 3.6, but causes other errors in - # other versions, so we run it with all errors suppressed and - # hope for the best. - try: - wait_task.exception() - except: - pass - break - except: - break - if p is None: - # connection closed by client - break - if isinstance(p, six.text_type): # pragma: no cover - p = p.encode('utf-8') - pkt = packet.Packet(encoded_packet=p) - try: - await self.receive(pkt) - except exceptions.UnknownPacketError: # pragma: no cover - pass - except exceptions.SocketIsClosedError: # pragma: no cover - self.server.logger.info('Receive error -- socket is closed') - break - except: # pragma: no cover - # if we get an unexpected exception we log the error and exit - # the connection properly - self.server.logger.exception('Unknown receive error') - - await self.queue.put(None) # unlock the writer task so it can exit - await asyncio.wait_for(writer_task, timeout=None) - await self.close(wait=False, abort=True) diff --git a/venv/lib/python3.6/site-packages/engineio/client.py b/venv/lib/python3.6/site-packages/engineio/client.py deleted file mode 100644 index 192e924..0000000 --- a/venv/lib/python3.6/site-packages/engineio/client.py +++ /dev/null @@ -1,651 +0,0 @@ -import logging -try: - import queue -except ImportError: # pragma: no cover - import Queue as queue -import signal -import threading -import time - -import six -from six.moves import urllib -try: - import requests -except ImportError: # pragma: no cover - requests = None -try: - import websocket -except ImportError: # pragma: no cover - websocket = None -from . import exceptions -from . import packet -from . import payload - -default_logger = logging.getLogger('engineio.client') -connected_clients = [] - -if six.PY2: # pragma: no cover - ConnectionError = OSError - - -def signal_handler(sig, frame): - """SIGINT handler. - - Disconnect all active clients and then invoke the original signal handler. - """ - for client in connected_clients[:]: - if client.is_asyncio_based(): - client.start_background_task(client.disconnect, abort=True) - else: - client.disconnect(abort=True) - return original_signal_handler(sig, frame) - - -original_signal_handler = signal.signal(signal.SIGINT, signal_handler) - - -class Client(object): - """An Engine.IO client. - - This class implements a fully compliant Engine.IO web client with support - for websocket and long-polling transports. - - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param request_timeout: A timeout in seconds for requests. The default is - 5 seconds. - """ - event_names = ['connect', 'disconnect', 'message'] - - def __init__(self, logger=False, json=None, request_timeout=5): - self.handlers = {} - self.base_url = None - self.transports = None - self.current_transport = None - self.sid = None - self.upgrades = None - self.ping_interval = None - self.ping_timeout = None - self.pong_received = True - self.http = None - self.ws = None - self.read_loop_task = None - self.write_loop_task = None - self.ping_loop_task = None - self.ping_loop_event = self.create_event() - self.queue = None - self.state = 'disconnected' - - if json is not None: - packet.Packet.json = json - if not isinstance(logger, bool): - self.logger = logger - else: - self.logger = default_logger - if not logging.root.handlers and \ - self.logger.level == logging.NOTSET: - if logger: - self.logger.setLevel(logging.INFO) - else: - self.logger.setLevel(logging.ERROR) - self.logger.addHandler(logging.StreamHandler()) - - self.request_timeout = request_timeout - - def is_asyncio_based(self): - return False - - def on(self, event, handler=None): - """Register an event handler. - - :param event: The event name. Can be ``'connect'``, ``'message'`` or - ``'disconnect'``. - :param handler: The function that should be invoked to handle the - event. When this parameter is not given, the method - acts as a decorator for the handler function. - - Example usage:: - - # as a decorator: - @eio.on('connect') - def connect_handler(): - print('Connection request') - - # as a method: - def message_handler(msg): - print('Received message: ', msg) - eio.send('response') - eio.on('message', message_handler) - """ - if event not in self.event_names: - raise ValueError('Invalid event') - - def set_handler(handler): - self.handlers[event] = handler - return handler - - if handler is None: - return set_handler - set_handler(handler) - - def connect(self, url, headers={}, transports=None, - engineio_path='engine.io'): - """Connect to an Engine.IO server. - - :param url: The URL of the Engine.IO server. It can include custom - query string parameters if required by the server. - :param headers: A dictionary with custom headers to send with the - connection request. - :param transports: The list of allowed transports. Valid transports - are ``'polling'`` and ``'websocket'``. If not - given, the polling transport is connected first, - then an upgrade to websocket is attempted. - :param engineio_path: The endpoint where the Engine.IO server is - installed. The default value is appropriate for - most cases. - - Example usage:: - - eio = engineio.Client() - eio.connect('http://localhost:5000') - """ - if self.state != 'disconnected': - raise ValueError('Client is not in a disconnected state') - valid_transports = ['polling', 'websocket'] - if transports is not None: - if isinstance(transports, six.string_types): - transports = [transports] - transports = [transport for transport in transports - if transport in valid_transports] - if not transports: - raise ValueError('No valid transports provided') - self.transports = transports or valid_transports - self.queue = self.create_queue() - return getattr(self, '_connect_' + self.transports[0])( - url, headers, engineio_path) - - def wait(self): - """Wait until the connection with the server ends. - - Client applications can use this function to block the main thread - during the life of the connection. - """ - if self.read_loop_task: - self.read_loop_task.join() - - def send(self, data, binary=None): - """Send a message to a client. - - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - :param binary: ``True`` to send packet as binary, ``False`` to send - as text. If not given, unicode (Python 2) and str - (Python 3) are sent as text, and str (Python 2) and - bytes (Python 3) are sent as binary. - """ - self._send_packet(packet.Packet(packet.MESSAGE, data=data, - binary=binary)) - - def disconnect(self, abort=False): - """Disconnect from the server. - - :param abort: If set to ``True``, do not wait for background tasks - associated with the connection to end. - """ - if self.state == 'connected': - self._send_packet(packet.Packet(packet.CLOSE)) - self.queue.put(None) - self.state = 'disconnecting' - self._trigger_event('disconnect', run_async=False) - if self.current_transport == 'websocket': - self.ws.close() - if not abort: - self.read_loop_task.join() - self.state = 'disconnected' - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - - def transport(self): - """Return the name of the transport currently in use. - - The possible values returned by this function are ``'polling'`` and - ``'websocket'``. - """ - return self.current_transport - - def start_background_task(self, target, *args, **kwargs): - """Start a background task. - - This is a utility function that applications can use to start a - background task. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object compatible with the `Thread` class in - the Python standard library. The `start()` method on this object is - already called by this function. - """ - th = threading.Thread(target=target, args=args, kwargs=kwargs) - th.start() - return th - - def sleep(self, seconds=0): - """Sleep for the requested amount of time.""" - return time.sleep(seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object.""" - q = queue.Queue(*args, **kwargs) - q.Empty = queue.Empty - return q - - def create_event(self, *args, **kwargs): - """Create an event object.""" - return threading.Event(*args, **kwargs) - - def _reset(self): - self.state = 'disconnected' - self.sid = None - - def _connect_polling(self, url, headers, engineio_path): - """Establish a long-polling connection to the Engine.IO server.""" - if requests is None: # pragma: no cover - # not installed - self.logger.error('requests package is not installed -- cannot ' - 'send HTTP requests!') - return - self.base_url = self._get_engineio_url(url, engineio_path, 'polling') - self.logger.info('Attempting polling connection to ' + self.base_url) - r = self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), headers=headers, - timeout=self.request_timeout) - if r is None: - self._reset() - raise exceptions.ConnectionError( - 'Connection refused by the server') - if r.status_code != 200: - raise exceptions.ConnectionError( - 'Unexpected status code {} in server response'.format( - r.status_code)) - try: - p = payload.Payload(encoded_payload=r.content) - except ValueError: - six.raise_from(exceptions.ConnectionError( - 'Unexpected response from server'), None) - open_packet = p.packets[0] - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError( - 'OPEN packet not returned by server') - self.logger.info( - 'Polling connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = open_packet.data['pingInterval'] / 1000.0 - self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 - self.current_transport = 'polling' - self.base_url += '&sid=' + self.sid - - self.state = 'connected' - connected_clients.append(self) - self._trigger_event('connect', run_async=False) - - for pkt in p.packets[1:]: - self._receive_packet(pkt) - - if 'websocket' in self.upgrades and 'websocket' in self.transports: - # attempt to upgrade to websocket - if self._connect_websocket(url, headers, engineio_path): - # upgrade to websocket succeeded, we're done here - return - - # start background tasks associated with this client - self.ping_loop_task = self.start_background_task(self._ping_loop) - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_polling) - - def _connect_websocket(self, url, headers, engineio_path): - """Establish or upgrade to a WebSocket connection with the server.""" - if websocket is None: # pragma: no cover - # not installed - self.logger.warning('websocket-client package not installed, only ' - 'polling transport is available') - return False - websocket_url = self._get_engineio_url(url, engineio_path, 'websocket') - if self.sid: - self.logger.info( - 'Attempting WebSocket upgrade to ' + websocket_url) - upgrade = True - websocket_url += '&sid=' + self.sid - else: - upgrade = False - self.base_url = websocket_url - self.logger.info( - 'Attempting WebSocket connection to ' + websocket_url) - - # get the cookies from the long-polling connection so that they can - # also be sent the the WebSocket route - cookies = None - if self.http: - cookies = '; '.join(["{}={}".format(cookie.name, cookie.value) - for cookie in self.http.cookies]) - try: - ws = websocket.create_connection( - websocket_url + self._get_url_timestamp(), header=headers, - cookie=cookies) - except ConnectionError: - if upgrade: - self.logger.warning( - 'WebSocket upgrade failed: connection error') - return False - else: - raise exceptions.ConnectionError('Connection error') - if upgrade: - p = packet.Packet(packet.PING, - data=six.text_type('probe')).encode() - try: - ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - try: - p = ws.recv() - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected recv exception: %s', - str(e)) - return False - pkt = packet.Packet(encoded_packet=p) - if pkt.packet_type != packet.PONG or pkt.data != 'probe': - self.logger.warning( - 'WebSocket upgrade failed: no PONG packet') - return False - p = packet.Packet(packet.UPGRADE).encode() - try: - ws.send(p) - except Exception as e: # pragma: no cover - self.logger.warning( - 'WebSocket upgrade failed: unexpected send exception: %s', - str(e)) - return False - self.current_transport = 'websocket' - self.logger.info('WebSocket upgrade was successful') - else: - try: - p = ws.recv() - except Exception as e: # pragma: no cover - raise exceptions.ConnectionError( - 'Unexpected recv exception: ' + str(e)) - open_packet = packet.Packet(encoded_packet=p) - if open_packet.packet_type != packet.OPEN: - raise exceptions.ConnectionError('no OPEN packet') - self.logger.info( - 'WebSocket connection accepted with ' + str(open_packet.data)) - self.sid = open_packet.data['sid'] - self.upgrades = open_packet.data['upgrades'] - self.ping_interval = open_packet.data['pingInterval'] / 1000.0 - self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 - self.current_transport = 'websocket' - - self.state = 'connected' - connected_clients.append(self) - self._trigger_event('connect', run_async=False) - self.ws = ws - - # start background tasks associated with this client - self.ping_loop_task = self.start_background_task(self._ping_loop) - self.write_loop_task = self.start_background_task(self._write_loop) - self.read_loop_task = self.start_background_task( - self._read_loop_websocket) - return True - - def _receive_packet(self, pkt): - """Handle incoming packets from the server.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.logger.info( - 'Received packet %s data %s', packet_name, - pkt.data if not isinstance(pkt.data, bytes) else '') - if pkt.packet_type == packet.MESSAGE: - self._trigger_event('message', pkt.data, run_async=True) - elif pkt.packet_type == packet.PONG: - self.pong_received = True - elif pkt.packet_type == packet.CLOSE: - self.disconnect(abort=True) - elif pkt.packet_type == packet.NOOP: - pass - else: - self.logger.error('Received unexpected packet of type %s', - pkt.packet_type) - - def _send_packet(self, pkt): - """Queue a packet to be sent to the server.""" - if self.state != 'connected': - return - self.queue.put(pkt) - self.logger.info( - 'Sending packet %s data %s', - packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) else '') - - def _send_request( - self, method, url, headers=None, body=None, - timeout=None): # pragma: no cover - if self.http is None: - self.http = requests.Session() - try: - return self.http.request(method, url, headers=headers, data=body, - timeout=timeout) - except requests.exceptions.RequestException as exc: - self.logger.info('HTTP %s request to %s failed with error %s.', - method, url, exc) - - def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - if event in self.handlers: - if run_async: - return self.start_background_task(self.handlers[event], *args) - else: - try: - return self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - - def _get_engineio_url(self, url, engineio_path, transport): - """Generate the Engine.IO connection URL.""" - engineio_path = engineio_path.strip('/') - parsed_url = urllib.parse.urlparse(url) - - if transport == 'polling': - scheme = 'http' - elif transport == 'websocket': - scheme = 'ws' - else: # pragma: no cover - raise ValueError('invalid transport') - if parsed_url.scheme in ['https', 'wss']: - scheme += 's' - - return ('{scheme}://{netloc}/{path}/?{query}' - '{sep}transport={transport}&EIO=3').format( - scheme=scheme, netloc=parsed_url.netloc, - path=engineio_path, query=parsed_url.query, - sep='&' if parsed_url.query else '', - transport=transport) - - def _get_url_timestamp(self): - """Generate the Engine.IO query string timestamp.""" - return '&t=' + str(time.time()) - - def _ping_loop(self): - """This background task sends a PING to the server at the requested - interval. - """ - self.pong_received = True - self.ping_loop_event.clear() - while self.state == 'connected': - if not self.pong_received: - self.logger.info( - 'PONG response has not been received, aborting') - if self.ws: - self.ws.shutdown() - self.queue.put(None) - break - self.pong_received = False - self._send_packet(packet.Packet(packet.PING)) - self.ping_loop_event.wait(timeout=self.ping_interval) - self.logger.info('Exiting ping task') - - def _read_loop_polling(self): - """Read packets by polling the Engine.IO server.""" - while self.state == 'connected': - self.logger.info( - 'Sending polling GET request to ' + self.base_url) - r = self._send_request( - 'GET', self.base_url + self._get_url_timestamp(), - timeout=max(self.ping_interval, self.ping_timeout) + 5) - if r is None: - self.logger.warning( - 'Connection refused by the server, aborting') - self.queue.put(None) - break - if r.status_code != 200: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status_code) - self.queue.put(None) - break - try: - p = payload.Payload(encoded_payload=r.content) - except ValueError: - self.logger.warning( - 'Unexpected packet from server, aborting') - self.queue.put(None) - break - for pkt in p.packets: - self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - self.write_loop_task.join() - self.logger.info('Waiting for ping loop task to end') - self.ping_loop_event.set() - self.ping_loop_task.join() - if self.state == 'connected': - self._trigger_event('disconnect', run_async=False) - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - def _read_loop_websocket(self): - """Read packets from the Engine.IO WebSocket connection.""" - while self.state == 'connected': - p = None - try: - p = self.ws.recv() - except websocket.WebSocketConnectionClosedException: - self.logger.warning( - 'WebSocket connection was closed, aborting') - self.queue.put(None) - break - except Exception as e: - self.logger.info( - 'Unexpected error "%s", aborting', str(e)) - self.queue.put(None) - break - if isinstance(p, six.text_type): # pragma: no cover - p = p.encode('utf-8') - pkt = packet.Packet(encoded_packet=p) - self._receive_packet(pkt) - - self.logger.info('Waiting for write loop task to end') - self.write_loop_task.join() - self.logger.info('Waiting for ping loop task to end') - self.ping_loop_event.set() - self.ping_loop_task.join() - if self.state == 'connected': - self._trigger_event('disconnect', run_async=False) - try: - connected_clients.remove(self) - except ValueError: # pragma: no cover - pass - self._reset() - self.logger.info('Exiting read loop task') - - def _write_loop(self): - """This background task sends packages to the server as they are - pushed to the send queue. - """ - while self.state == 'connected': - # to simplify the timeout handling, use the maximum of the - # ping interval and ping timeout as timeout, with an extra 5 - # seconds grace period - timeout = max(self.ping_interval, self.ping_timeout) + 5 - packets = None - try: - packets = [self.queue.get(timeout=timeout)] - except self.queue.Empty: - self.logger.error('packet queue is empty, aborting') - break - if packets == [None]: - self.queue.task_done() - packets = [] - else: - while True: - try: - packets.append(self.queue.get(block=False)) - except self.queue.Empty: - break - if packets[-1] is None: - packets = packets[:-1] - self.queue.task_done() - break - if not packets: - # empty packet list returned -> connection closed - break - if self.current_transport == 'polling': - p = payload.Payload(packets=packets) - r = self._send_request( - 'POST', self.base_url, body=p.encode(), - headers={'Content-Type': 'application/octet-stream'}, - timeout=self.request_timeout) - for pkt in packets: - self.queue.task_done() - if r is None: - self.logger.warning( - 'Connection refused by the server, aborting') - break - if r.status_code != 200: - self.logger.warning('Unexpected status code %s in server ' - 'response, aborting', r.status_code) - self._reset() - break - else: - # websocket - try: - for pkt in packets: - encoded_packet = pkt.encode(always_bytes=False) - if pkt.binary: - self.ws.send_binary(encoded_packet) - else: - self.ws.send(encoded_packet) - self.queue.task_done() - except websocket.WebSocketConnectionClosedException: - self.logger.warning( - 'WebSocket connection was closed, aborting') - break - self.logger.info('Exiting write loop task') diff --git a/venv/lib/python3.6/site-packages/engineio/exceptions.py b/venv/lib/python3.6/site-packages/engineio/exceptions.py deleted file mode 100644 index fb0b3e0..0000000 --- a/venv/lib/python3.6/site-packages/engineio/exceptions.py +++ /dev/null @@ -1,22 +0,0 @@ -class EngineIOError(Exception): - pass - - -class ContentTooLongError(EngineIOError): - pass - - -class UnknownPacketError(EngineIOError): - pass - - -class QueueEmpty(EngineIOError): - pass - - -class SocketIsClosedError(EngineIOError): - pass - - -class ConnectionError(EngineIOError): - pass diff --git a/venv/lib/python3.6/site-packages/engineio/middleware.py b/venv/lib/python3.6/site-packages/engineio/middleware.py deleted file mode 100644 index d0bdcc7..0000000 --- a/venv/lib/python3.6/site-packages/engineio/middleware.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -from engineio.static_files import get_static_file - - -class WSGIApp(object): - """WSGI application middleware for Engine.IO. - - This middleware dispatches traffic to an Engine.IO application. It can - also serve a list of static files to the client, or forward unrelated - HTTP traffic to another WSGI application. - - :param engineio_app: The Engine.IO server. Must be an instance of the - ``engineio.Server`` class. - :param wsgi_app: The WSGI app that receives all other traffic. - :param static_files: A dictionary with static file mapping rules. See the - documentation for details on this argument. - :param engineio_path: The endpoint where the Engine.IO application should - be installed. The default value is appropriate for - most cases. - - Example usage:: - - import engineio - import eventlet - - eio = engineio.Server() - app = engineio.WSGIApp(eio, static_files={ - '/': {'content_type': 'text/html', 'filename': 'index.html'}, - '/index.html': {'content_type': 'text/html', - 'filename': 'index.html'}, - }) - eventlet.wsgi.server(eventlet.listen(('', 8000)), app) - """ - def __init__(self, engineio_app, wsgi_app=None, static_files=None, - engineio_path='engine.io'): - self.engineio_app = engineio_app - self.wsgi_app = wsgi_app - self.engineio_path = engineio_path.strip('/') - self.static_files = static_files or {} - - def __call__(self, environ, start_response): - if 'gunicorn.socket' in environ: - # gunicorn saves the socket under environ['gunicorn.socket'], while - # eventlet saves it under environ['eventlet.input']. Eventlet also - # stores the socket inside a wrapper class, while gunicon writes it - # directly into the environment. To give eventlet's WebSocket - # module access to this socket when running under gunicorn, here we - # copy the socket to the eventlet format. - class Input(object): - def __init__(self, socket): - self.socket = socket - - def get_socket(self): - return self.socket - - environ['eventlet.input'] = Input(environ['gunicorn.socket']) - path = environ['PATH_INFO'] - if path is not None and \ - path.startswith('/{0}/'.format(self.engineio_path)): - return self.engineio_app.handle_request(environ, start_response) - else: - static_file = get_static_file(path, self.static_files) \ - if self.static_files else None - if static_file: - if os.path.exists(static_file['filename']): - start_response( - '200 OK', - [('Content-Type', static_file['content_type'])]) - with open(static_file['filename'], 'rb') as f: - return [f.read()] - else: - return self.not_found(start_response) - elif self.wsgi_app is not None: - return self.wsgi_app(environ, start_response) - return self.not_found(start_response) - - def not_found(self, start_response): - start_response("404 Not Found", [('Content-Type', 'text/plain')]) - return [b'Not Found'] - - -class Middleware(WSGIApp): - """This class has been renamed to ``WSGIApp`` and is now deprecated.""" - def __init__(self, engineio_app, wsgi_app=None, - engineio_path='engine.io'): - super(Middleware, self).__init__(engineio_app, wsgi_app, - engineio_path=engineio_path) diff --git a/venv/lib/python3.6/site-packages/engineio/packet.py b/venv/lib/python3.6/site-packages/engineio/packet.py deleted file mode 100644 index a3aa6d4..0000000 --- a/venv/lib/python3.6/site-packages/engineio/packet.py +++ /dev/null @@ -1,92 +0,0 @@ -import base64 -import json as _json - -import six - -(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6) -packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP'] - -binary_types = (six.binary_type, bytearray) - - -class Packet(object): - """Engine.IO packet.""" - - json = _json - - def __init__(self, packet_type=NOOP, data=None, binary=None, - encoded_packet=None): - self.packet_type = packet_type - self.data = data - if binary is not None: - self.binary = binary - elif isinstance(data, six.text_type): - self.binary = False - elif isinstance(data, binary_types): - self.binary = True - else: - self.binary = False - if encoded_packet: - self.decode(encoded_packet) - - def encode(self, b64=False, always_bytes=True): - """Encode the packet for transmission.""" - if self.binary and not b64: - encoded_packet = six.int2byte(self.packet_type) - else: - encoded_packet = six.text_type(self.packet_type) - if self.binary and b64: - encoded_packet = 'b' + encoded_packet - if self.binary: - if b64: - encoded_packet += base64.b64encode(self.data).decode('utf-8') - else: - encoded_packet += self.data - elif isinstance(self.data, six.string_types): - encoded_packet += self.data - elif isinstance(self.data, dict) or isinstance(self.data, list): - encoded_packet += self.json.dumps(self.data, - separators=(',', ':')) - elif self.data is not None: - encoded_packet += str(self.data) - if always_bytes and not isinstance(encoded_packet, binary_types): - encoded_packet = encoded_packet.encode('utf-8') - return encoded_packet - - def decode(self, encoded_packet): - """Decode a transmitted package.""" - b64 = False - if not isinstance(encoded_packet, binary_types): - encoded_packet = encoded_packet.encode('utf-8') - elif not isinstance(encoded_packet, bytes): - encoded_packet = bytes(encoded_packet) - self.packet_type = six.byte2int(encoded_packet[0:1]) - if self.packet_type == 98: # 'b' --> binary base64 encoded packet - self.binary = True - encoded_packet = encoded_packet[1:] - self.packet_type = six.byte2int(encoded_packet[0:1]) - self.packet_type -= 48 - b64 = True - elif self.packet_type >= 48: - self.packet_type -= 48 - self.binary = False - else: - self.binary = True - self.data = None - if len(encoded_packet) > 1: - if self.binary: - if b64: - self.data = base64.b64decode(encoded_packet[1:]) - else: - self.data = encoded_packet[1:] - else: - try: - self.data = self.json.loads( - encoded_packet[1:].decode('utf-8')) - if isinstance(self.data, int): - # do not allow integer payloads, see - # github.com/miguelgrinberg/python-engineio/issues/75 - # for background on this decision - raise ValueError - except ValueError: - self.data = encoded_packet[1:].decode('utf-8') diff --git a/venv/lib/python3.6/site-packages/engineio/payload.py b/venv/lib/python3.6/site-packages/engineio/payload.py deleted file mode 100644 index cfff557..0000000 --- a/venv/lib/python3.6/site-packages/engineio/payload.py +++ /dev/null @@ -1,80 +0,0 @@ -import six - -from . import packet - -from six.moves import urllib - - -class Payload(object): - """Engine.IO payload.""" - def __init__(self, packets=None, encoded_payload=None): - self.packets = packets or [] - if encoded_payload is not None: - self.decode(encoded_payload) - - def encode(self, b64=False, jsonp_index=None): - """Encode the payload for transmission.""" - encoded_payload = b'' - for pkt in self.packets: - encoded_packet = pkt.encode(b64=b64) - packet_len = len(encoded_packet) - if b64: - encoded_payload += str(packet_len).encode('utf-8') + b':' + \ - encoded_packet - else: - binary_len = b'' - while packet_len != 0: - binary_len = six.int2byte(packet_len % 10) + binary_len - packet_len = int(packet_len / 10) - if not pkt.binary: - encoded_payload += b'\0' - else: - encoded_payload += b'\1' - encoded_payload += binary_len + b'\xff' + encoded_packet - if jsonp_index is not None: - encoded_payload = b'___eio[' + \ - str(jsonp_index).encode() + \ - b']("' + \ - encoded_payload.replace(b'"', b'\\"') + \ - b'");' - return encoded_payload - - def decode(self, encoded_payload): - """Decode a transmitted payload.""" - self.packets = [] - while encoded_payload: - # JSONP POST payload starts with 'd=' - if encoded_payload.startswith(b'd='): - encoded_payload = urllib.parse.parse_qs( - encoded_payload)[b'd'][0] - - if six.byte2int(encoded_payload[0:1]) <= 1: - packet_len = 0 - i = 1 - while six.byte2int(encoded_payload[i:i + 1]) != 255: - packet_len = packet_len * 10 + six.byte2int( - encoded_payload[i:i + 1]) - i += 1 - self.packets.append(packet.Packet( - encoded_packet=encoded_payload[i + 1:i + 1 + packet_len])) - else: - i = encoded_payload.find(b':') - if i == -1: - raise ValueError('invalid payload') - - # extracting the packet out of the payload is extremely - # inefficient, because the payload needs to be treated as - # binary, but the non-binary packets have to be parsed as - # unicode. Luckily this complication only applies to long - # polling, as the websocket transport sends packets - # individually wrapped. - packet_len = int(encoded_payload[0:i]) - pkt = encoded_payload.decode('utf-8', errors='ignore')[ - i + 1: i + 1 + packet_len].encode('utf-8') - self.packets.append(packet.Packet(encoded_packet=pkt)) - - # the engine.io protocol sends the packet length in - # utf-8 characters, but we need it in bytes to be able to - # jump to the next packet in the payload - packet_len = len(pkt) - encoded_payload = encoded_payload[i + 1 + packet_len:] diff --git a/venv/lib/python3.6/site-packages/engineio/server.py b/venv/lib/python3.6/site-packages/engineio/server.py deleted file mode 100644 index 24c5ded..0000000 --- a/venv/lib/python3.6/site-packages/engineio/server.py +++ /dev/null @@ -1,659 +0,0 @@ -import gzip -import importlib -import logging -import uuid -import zlib - -import six -from six.moves import urllib - -from . import exceptions -from . import packet -from . import payload -from . import socket - -default_logger = logging.getLogger('engineio.server') - - -class Server(object): - """An Engine.IO server. - - This class implements a fully compliant Engine.IO web server with support - for websocket and long-polling transports. - - :param async_mode: The asynchronous model to use. See the Deployment - section in the documentation for a description of the - available options. Valid async modes are "threading", - "eventlet", "gevent" and "gevent_uwsgi". If this - argument is not given, "eventlet" is tried first, then - "gevent_uwsgi", then "gevent", and finally "threading". - The first async mode that has all its dependencies - installed is the one that is chosen. - :param ping_timeout: The time in seconds that the client waits for the - server to respond before disconnecting. The default - is 60 seconds. - :param ping_interval: The interval in seconds at which the client pings - the server. The default is 25 seconds. - :param max_http_buffer_size: The maximum size of a message when using the - polling transport. The default is 100,000,000 - bytes. - :param allow_upgrades: Whether to allow transport upgrades or not. The - default is ``True``. - :param http_compression: Whether to compress packages when using the - polling transport. The default is ``True``. - :param compression_threshold: Only compress messages when their byte size - is greater than this value. The default is - 1024 bytes. - :param cookie: Name of the HTTP cookie that contains the client session - id. If set to ``None``, a cookie is not sent to the client. - The default is ``'io'``. - :param cors_allowed_origins: Origin or list of origins that are allowed to - connect to this server. Only the same origin - is allowed by default. Set this argument to - ``'*'`` to allow all origins, or to ``[]`` to - disable CORS handling. - :param cors_credentials: Whether credentials (cookies, authentication) are - allowed in requests to this server. The default - is ``True``. - :param logger: To enable logging set to ``True`` or pass a logger object to - use. To disable logging set to ``False``. The default is - ``False``. - :param json: An alternative json module to use for encoding and decoding - packets. Custom json modules must have ``dumps`` and ``loads`` - functions that are compatible with the standard library - versions. - :param async_handlers: If set to ``True``, run message event handlers in - non-blocking threads. To run handlers synchronously, - set to ``False``. The default is ``True``. - :param monitor_clients: If set to ``True``, a background task will ensure - inactive clients are closed. Set to ``False`` to - disable the monitoring task (not recommended). The - default is ``True``. - :param kwargs: Reserved for future extensions, any additional parameters - given as keyword arguments will be silently ignored. - """ - compression_methods = ['gzip', 'deflate'] - event_names = ['connect', 'disconnect', 'message'] - _default_monitor_clients = True - - def __init__(self, async_mode=None, ping_timeout=60, ping_interval=25, - max_http_buffer_size=100000000, allow_upgrades=True, - http_compression=True, compression_threshold=1024, - cookie='io', cors_allowed_origins=None, - cors_credentials=True, logger=False, json=None, - async_handlers=True, monitor_clients=None, **kwargs): - self.ping_timeout = ping_timeout - self.ping_interval = ping_interval - self.max_http_buffer_size = max_http_buffer_size - self.allow_upgrades = allow_upgrades - self.http_compression = http_compression - self.compression_threshold = compression_threshold - self.cookie = cookie - self.cors_allowed_origins = cors_allowed_origins - self.cors_credentials = cors_credentials - self.async_handlers = async_handlers - self.sockets = {} - self.handlers = {} - self.start_service_task = monitor_clients \ - if monitor_clients is not None else self._default_monitor_clients - if json is not None: - packet.Packet.json = json - if not isinstance(logger, bool): - self.logger = logger - else: - self.logger = default_logger - if not logging.root.handlers and \ - self.logger.level == logging.NOTSET: - if logger: - self.logger.setLevel(logging.INFO) - else: - self.logger.setLevel(logging.ERROR) - self.logger.addHandler(logging.StreamHandler()) - modes = self.async_modes() - if async_mode is not None: - modes = [async_mode] if async_mode in modes else [] - self._async = None - self.async_mode = None - for mode in modes: - try: - self._async = importlib.import_module( - 'engineio.async_drivers.' + mode)._async - asyncio_based = self._async['asyncio'] \ - if 'asyncio' in self._async else False - if asyncio_based != self.is_asyncio_based(): - continue # pragma: no cover - self.async_mode = mode - break - except ImportError: - pass - if self.async_mode is None: - raise ValueError('Invalid async_mode specified') - if self.is_asyncio_based() and \ - ('asyncio' not in self._async or not - self._async['asyncio']): # pragma: no cover - raise ValueError('The selected async_mode is not asyncio ' - 'compatible') - if not self.is_asyncio_based() and 'asyncio' in self._async and \ - self._async['asyncio']: # pragma: no cover - raise ValueError('The selected async_mode requires asyncio and ' - 'must use the AsyncServer class') - self.logger.info('Server initialized for %s.', self.async_mode) - - def is_asyncio_based(self): - return False - - def async_modes(self): - return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading'] - - def on(self, event, handler=None): - """Register an event handler. - - :param event: The event name. Can be ``'connect'``, ``'message'`` or - ``'disconnect'``. - :param handler: The function that should be invoked to handle the - event. When this parameter is not given, the method - acts as a decorator for the handler function. - - Example usage:: - - # as a decorator: - @eio.on('connect') - def connect_handler(sid, environ): - print('Connection request') - if environ['REMOTE_ADDR'] in blacklisted: - return False # reject - - # as a method: - def message_handler(sid, msg): - print('Received message: ', msg) - eio.send(sid, 'response') - eio.on('message', message_handler) - - The handler function receives the ``sid`` (session ID) for the - client as first argument. The ``'connect'`` event handler receives the - WSGI environment as a second argument, and can return ``False`` to - reject the connection. The ``'message'`` handler receives the message - payload as a second argument. The ``'disconnect'`` handler does not - take a second argument. - """ - if event not in self.event_names: - raise ValueError('Invalid event') - - def set_handler(handler): - self.handlers[event] = handler - return handler - - if handler is None: - return set_handler - set_handler(handler) - - def send(self, sid, data, binary=None): - """Send a message to a client. - - :param sid: The session id of the recipient client. - :param data: The data to send to the client. Data can be of type - ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` - or ``dict``, the data will be serialized as JSON. - :param binary: ``True`` to send packet as binary, ``False`` to send - as text. If not given, unicode (Python 2) and str - (Python 3) are sent as text, and str (Python 2) and - bytes (Python 3) are sent as binary. - """ - try: - socket = self._get_socket(sid) - except KeyError: - # the socket is not available - self.logger.warning('Cannot send to sid %s', sid) - return - socket.send(packet.Packet(packet.MESSAGE, data=data, binary=binary)) - - def get_session(self, sid): - """Return the user session for a client. - - :param sid: The session id of the client. - - The return value is a dictionary. Modifications made to this - dictionary are not guaranteed to be preserved unless - ``save_session()`` is called, or when the ``session`` context manager - is used. - """ - socket = self._get_socket(sid) - return socket.session - - def save_session(self, sid, session): - """Store the user session for a client. - - :param sid: The session id of the client. - :param session: The session dictionary. - """ - socket = self._get_socket(sid) - socket.session = session - - def session(self, sid): - """Return the user session for a client with context manager syntax. - - :param sid: The session id of the client. - - This is a context manager that returns the user session dictionary for - the client. Any changes that are made to this dictionary inside the - context manager block are saved back to the session. Example usage:: - - @eio.on('connect') - def on_connect(sid, environ): - username = authenticate_user(environ) - if not username: - return False - with eio.session(sid) as session: - session['username'] = username - - @eio.on('message') - def on_message(sid, msg): - with eio.session(sid) as session: - print('received message from ', session['username']) - """ - class _session_context_manager(object): - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.session = None - - def __enter__(self): - self.session = self.server.get_session(sid) - return self.session - - def __exit__(self, *args): - self.server.save_session(sid, self.session) - - return _session_context_manager(self, sid) - - def disconnect(self, sid=None): - """Disconnect a client. - - :param sid: The session id of the client to close. If this parameter - is not given, then all clients are closed. - """ - if sid is not None: - try: - socket = self._get_socket(sid) - except KeyError: # pragma: no cover - # the socket was already closed or gone - pass - else: - socket.close() - del self.sockets[sid] - else: - for client in six.itervalues(self.sockets): - client.close() - self.sockets = {} - - def transport(self, sid): - """Return the name of the transport used by the client. - - The two possible values returned by this function are ``'polling'`` - and ``'websocket'``. - - :param sid: The session of the client. - """ - return 'websocket' if self._get_socket(sid).upgraded else 'polling' - - def handle_request(self, environ, start_response): - """Handle an HTTP request from the client. - - This is the entry point of the Engine.IO application, using the same - interface as a WSGI application. For the typical usage, this function - is invoked by the :class:`Middleware` instance, but it can be invoked - directly when the middleware is not used. - - :param environ: The WSGI environment. - :param start_response: The WSGI ``start_response`` function. - - This function returns the HTTP response body to deliver to the client - as a byte sequence. - """ - if self.cors_allowed_origins != []: - # Validate the origin header if present - # This is important for WebSocket more than for HTTP, since - # browsers only apply CORS controls to HTTP. - origin = environ.get('HTTP_ORIGIN') - if origin: - allowed_origins = self._cors_allowed_origins(environ) - if allowed_origins is not None and origin not in \ - allowed_origins: - self.logger.info(origin + ' is not an accepted origin.') - r = self._bad_request() - start_response(r['status'], r['headers']) - return [r['response']] - - method = environ['REQUEST_METHOD'] - query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) - - sid = query['sid'][0] if 'sid' in query else None - b64 = False - jsonp = False - jsonp_index = None - - if 'b64' in query: - if query['b64'][0] == "1" or query['b64'][0].lower() == "true": - b64 = True - if 'j' in query: - jsonp = True - try: - jsonp_index = int(query['j'][0]) - except (ValueError, KeyError, IndexError): - # Invalid JSONP index number - pass - - if jsonp and jsonp_index is None: - self.logger.warning('Invalid JSONP index number') - r = self._bad_request() - elif method == 'GET': - if sid is None: - transport = query.get('transport', ['polling'])[0] - if transport != 'polling' and transport != 'websocket': - self.logger.warning('Invalid transport %s', transport) - r = self._bad_request() - else: - r = self._handle_connect(environ, start_response, - transport, b64, jsonp_index) - else: - if sid not in self.sockets: - self.logger.warning('Invalid session %s', sid) - r = self._bad_request() - else: - socket = self._get_socket(sid) - try: - packets = socket.handle_get_request( - environ, start_response) - if isinstance(packets, list): - r = self._ok(packets, b64=b64, - jsonp_index=jsonp_index) - else: - r = packets - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - self.disconnect(sid) - r = self._bad_request() - if sid in self.sockets and self.sockets[sid].closed: - del self.sockets[sid] - elif method == 'POST': - if sid is None or sid not in self.sockets: - self.logger.warning('Invalid session %s', sid) - r = self._bad_request() - else: - socket = self._get_socket(sid) - try: - socket.handle_post_request(environ) - r = self._ok(jsonp_index=jsonp_index) - except exceptions.EngineIOError: - if sid in self.sockets: # pragma: no cover - self.disconnect(sid) - r = self._bad_request() - except: # pragma: no cover - # for any other unexpected errors, we log the error - # and keep going - self.logger.exception('post request handler error') - r = self._ok(jsonp_index=jsonp_index) - elif method == 'OPTIONS': - r = self._ok() - else: - self.logger.warning('Method %s not supported', method) - r = self._method_not_found() - - if not isinstance(r, dict): - return r or [] - if self.http_compression and \ - len(r['response']) >= self.compression_threshold: - encodings = [e.split(';')[0].strip() for e in - environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] - for encoding in encodings: - if encoding in self.compression_methods: - r['response'] = \ - getattr(self, '_' + encoding)(r['response']) - r['headers'] += [('Content-Encoding', encoding)] - break - cors_headers = self._cors_headers(environ) - start_response(r['status'], r['headers'] + cors_headers) - return [r['response']] - - def start_background_task(self, target, *args, **kwargs): - """Start a background task using the appropriate async model. - - This is a utility function that applications can use to start a - background task using the method that is compatible with the - selected async mode. - - :param target: the target function to execute. - :param args: arguments to pass to the function. - :param kwargs: keyword arguments to pass to the function. - - This function returns an object compatible with the `Thread` class in - the Python standard library. The `start()` method on this object is - already called by this function. - """ - th = self._async['thread'](target=target, args=args, kwargs=kwargs) - th.start() - return th # pragma: no cover - - def sleep(self, seconds=0): - """Sleep for the requested amount of time using the appropriate async - model. - - This is a utility function that applications can use to put a task to - sleep without having to worry about using the correct call for the - selected async mode. - """ - return self._async['sleep'](seconds) - - def create_queue(self, *args, **kwargs): - """Create a queue object using the appropriate async model. - - This is a utility function that applications can use to create a queue - without having to worry about using the correct call for the selected - async mode. - """ - return self._async['queue'](*args, **kwargs) - - def get_queue_empty_exception(self): - """Return the queue empty exception for the appropriate async model. - - This is a utility function that applications can use to work with a - queue without having to worry about using the correct call for the - selected async mode. - """ - return self._async['queue_empty'] - - def create_event(self, *args, **kwargs): - """Create an event object using the appropriate async model. - - This is a utility function that applications can use to create an - event without having to worry about using the correct call for the - selected async mode. - """ - return self._async['event'](*args, **kwargs) - - def _generate_id(self): - """Generate a unique session id.""" - return uuid.uuid4().hex - - def _handle_connect(self, environ, start_response, transport, b64=False, - jsonp_index=None): - """Handle a client connection request.""" - if self.start_service_task: - # start the service task to monitor connected clients - self.start_service_task = False - self.start_background_task(self._service_task) - - sid = self._generate_id() - s = socket.Socket(self, sid) - self.sockets[sid] = s - - pkt = packet.Packet( - packet.OPEN, {'sid': sid, - 'upgrades': self._upgrades(sid, transport), - 'pingTimeout': int(self.ping_timeout * 1000), - 'pingInterval': int(self.ping_interval * 1000)}) - s.send(pkt) - - ret = self._trigger_event('connect', sid, environ, run_async=False) - if ret is False: - del self.sockets[sid] - self.logger.warning('Application rejected connection') - return self._unauthorized() - - if transport == 'websocket': - ret = s.handle_get_request(environ, start_response) - if s.closed: - # websocket connection ended, so we are done - del self.sockets[sid] - return ret - else: - s.connected = True - headers = None - if self.cookie: - headers = [('Set-Cookie', self.cookie + '=' + sid)] - try: - return self._ok(s.poll(), headers=headers, b64=b64, - jsonp_index=jsonp_index) - except exceptions.QueueEmpty: - return self._bad_request() - - def _upgrades(self, sid, transport): - """Return the list of possible upgrades for a client connection.""" - if not self.allow_upgrades or self._get_socket(sid).upgraded or \ - self._async['websocket'] is None or transport == 'websocket': - return [] - return ['websocket'] - - def _trigger_event(self, event, *args, **kwargs): - """Invoke an event handler.""" - run_async = kwargs.pop('run_async', False) - if event in self.handlers: - if run_async: - return self.start_background_task(self.handlers[event], *args) - else: - try: - return self.handlers[event](*args) - except: - self.logger.exception(event + ' handler error') - if event == 'connect': - # if connect handler raised error we reject the - # connection - return False - - def _get_socket(self, sid): - """Return the socket object for a given session.""" - try: - s = self.sockets[sid] - except KeyError: - raise KeyError('Session not found') - if s.closed: - del self.sockets[sid] - raise KeyError('Session is disconnected') - return s - - def _ok(self, packets=None, headers=None, b64=False, jsonp_index=None): - """Generate a successful HTTP response.""" - if packets is not None: - if headers is None: - headers = [] - if b64: - headers += [('Content-Type', 'text/plain; charset=UTF-8')] - else: - headers += [('Content-Type', 'application/octet-stream')] - return {'status': '200 OK', - 'headers': headers, - 'response': payload.Payload(packets=packets).encode( - b64=b64, jsonp_index=jsonp_index)} - else: - return {'status': '200 OK', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'OK'} - - def _bad_request(self): - """Generate a bad request HTTP error response.""" - return {'status': '400 BAD REQUEST', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'Bad Request'} - - def _method_not_found(self): - """Generate a method not found HTTP error response.""" - return {'status': '405 METHOD NOT FOUND', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'Method Not Found'} - - def _unauthorized(self): - """Generate a unauthorized HTTP error response.""" - return {'status': '401 UNAUTHORIZED', - 'headers': [('Content-Type', 'text/plain')], - 'response': b'Unauthorized'} - - def _cors_allowed_origins(self, environ): - default_origin = None - if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ: - default_origin = '{scheme}://{host}'.format( - scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST']) - if self.cors_allowed_origins is None: - allowed_origins = [default_origin] \ - if default_origin is not None else [] - elif self.cors_allowed_origins == '*': - allowed_origins = None - elif isinstance(self.cors_allowed_origins, six.string_types): - allowed_origins = [self.cors_allowed_origins] - else: - allowed_origins = self.cors_allowed_origins - return allowed_origins - - def _cors_headers(self, environ): - """Return the cross-origin-resource-sharing headers.""" - if self.cors_allowed_origins == []: - # special case, CORS handling is completely disabled - return [] - headers = [] - allowed_origins = self._cors_allowed_origins(environ) - if 'HTTP_ORIGIN' in environ and \ - (allowed_origins is None or environ['HTTP_ORIGIN'] in - allowed_origins): - headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])] - if environ['REQUEST_METHOD'] == 'OPTIONS': - headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')] - if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ: - headers += [('Access-Control-Allow-Headers', - environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])] - if self.cors_credentials: - headers += [('Access-Control-Allow-Credentials', 'true')] - return headers - - def _gzip(self, response): - """Apply gzip compression to a response.""" - bytesio = six.BytesIO() - with gzip.GzipFile(fileobj=bytesio, mode='w') as gz: - gz.write(response) - return bytesio.getvalue() - - def _deflate(self, response): - """Apply deflate compression to a response.""" - return zlib.compress(response) - - def _service_task(self): # pragma: no cover - """Monitor connected clients and clean up those that time out.""" - while True: - if len(self.sockets) == 0: - # nothing to do - self.sleep(self.ping_timeout) - continue - - # go through the entire client list in a ping interval cycle - sleep_interval = self.ping_timeout / len(self.sockets) - - try: - # iterate over the current clients - for s in self.sockets.copy().values(): - if not s.closing and not s.closed: - s.check_ping_timeout() - self.sleep(sleep_interval) - except (SystemExit, KeyboardInterrupt): - self.logger.info('service task canceled') - break - except: - # an unexpected exception has occurred, log it and continue - self.logger.exception('service task exception') diff --git a/venv/lib/python3.6/site-packages/engineio/socket.py b/venv/lib/python3.6/site-packages/engineio/socket.py deleted file mode 100644 index 495088a..0000000 --- a/venv/lib/python3.6/site-packages/engineio/socket.py +++ /dev/null @@ -1,247 +0,0 @@ -import six -import sys -import time - -from . import exceptions -from . import packet -from . import payload - - -class Socket(object): - """An Engine.IO socket.""" - upgrade_protocols = ['websocket'] - - def __init__(self, server, sid): - self.server = server - self.sid = sid - self.queue = self.server.create_queue() - self.last_ping = time.time() - self.connected = False - self.upgrading = False - self.upgraded = False - self.packet_backlog = [] - self.closing = False - self.closed = False - self.session = {} - - def poll(self): - """Wait for packets to send to the client.""" - queue_empty = self.server.get_queue_empty_exception() - try: - packets = [self.queue.get(timeout=self.server.ping_timeout)] - self.queue.task_done() - except queue_empty: - raise exceptions.QueueEmpty() - if packets == [None]: - return [] - while True: - try: - packets.append(self.queue.get(block=False)) - self.queue.task_done() - except queue_empty: - break - return packets - - def receive(self, pkt): - """Receive packet from the client.""" - packet_name = packet.packet_names[pkt.packet_type] \ - if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' - self.server.logger.info('%s: Received packet %s data %s', - self.sid, packet_name, - pkt.data if not isinstance(pkt.data, bytes) - else '') - if pkt.packet_type == packet.PING: - self.last_ping = time.time() - self.send(packet.Packet(packet.PONG, pkt.data)) - elif pkt.packet_type == packet.MESSAGE: - self.server._trigger_event('message', self.sid, pkt.data, - run_async=self.server.async_handlers) - elif pkt.packet_type == packet.UPGRADE: - self.send(packet.Packet(packet.NOOP)) - elif pkt.packet_type == packet.CLOSE: - self.close(wait=False, abort=True) - else: - raise exceptions.UnknownPacketError() - - def check_ping_timeout(self): - """Make sure the client is still sending pings. - - This helps detect disconnections for long-polling clients. - """ - if self.closed: - raise exceptions.SocketIsClosedError() - if time.time() - self.last_ping > self.server.ping_interval + 5: - self.server.logger.info('%s: Client is gone, closing socket', - self.sid) - # Passing abort=False here will cause close() to write a - # CLOSE packet. This has the effect of updating half-open sockets - # to their correct state of disconnected - self.close(wait=False, abort=False) - return False - return True - - def send(self, pkt): - """Send a packet to the client.""" - if not self.check_ping_timeout(): - return - if self.upgrading: - self.packet_backlog.append(pkt) - else: - self.queue.put(pkt) - self.server.logger.info('%s: Sending packet %s data %s', - self.sid, packet.packet_names[pkt.packet_type], - pkt.data if not isinstance(pkt.data, bytes) - else '') - - def handle_get_request(self, environ, start_response): - """Handle a long-polling GET request from the client.""" - connections = [ - s.strip() - for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] - transport = environ.get('HTTP_UPGRADE', '').lower() - if 'upgrade' in connections and transport in self.upgrade_protocols: - self.server.logger.info('%s: Received request to upgrade to %s', - self.sid, transport) - return getattr(self, '_upgrade_' + transport)(environ, - start_response) - try: - packets = self.poll() - except exceptions.QueueEmpty: - exc = sys.exc_info() - self.close(wait=False) - six.reraise(*exc) - return packets - - def handle_post_request(self, environ): - """Handle a long-polling POST request from the client.""" - length = int(environ.get('CONTENT_LENGTH', '0')) - if length > self.server.max_http_buffer_size: - raise exceptions.ContentTooLongError() - else: - body = environ['wsgi.input'].read(length) - p = payload.Payload(encoded_payload=body) - for pkt in p.packets: - self.receive(pkt) - - def close(self, wait=True, abort=False): - """Close the socket connection.""" - if not self.closed and not self.closing: - self.closing = True - self.server._trigger_event('disconnect', self.sid, run_async=False) - if not abort: - self.send(packet.Packet(packet.CLOSE)) - self.closed = True - self.queue.put(None) - if wait: - self.queue.join() - - def _upgrade_websocket(self, environ, start_response): - """Upgrade the connection from polling to websocket.""" - if self.upgraded: - raise IOError('Socket has been upgraded already') - if self.server._async['websocket'] is None: - # the selected async mode does not support websocket - return self.server._bad_request() - ws = self.server._async['websocket'](self._websocket_handler) - return ws(environ, start_response) - - def _websocket_handler(self, ws): - """Engine.IO handler for websocket transport.""" - # try to set a socket timeout matching the configured ping interval - for attr in ['_sock', 'socket']: # pragma: no cover - if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'): - getattr(ws, attr).settimeout(self.server.ping_timeout) - - if self.connected: - # the socket was already connected, so this is an upgrade - self.upgrading = True # hold packet sends during the upgrade - - pkt = ws.wait() - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.PING or \ - decoded_pkt.data != 'probe': - self.server.logger.info( - '%s: Failed websocket upgrade, no PING packet', self.sid) - return [] - ws.send(packet.Packet( - packet.PONG, - data=six.text_type('probe')).encode(always_bytes=False)) - self.queue.put(packet.Packet(packet.NOOP)) # end poll - - pkt = ws.wait() - decoded_pkt = packet.Packet(encoded_packet=pkt) - if decoded_pkt.packet_type != packet.UPGRADE: - self.upgraded = False - self.server.logger.info( - ('%s: Failed websocket upgrade, expected UPGRADE packet, ' - 'received %s instead.'), - self.sid, pkt) - return [] - self.upgraded = True - - # flush any packets that were sent during the upgrade - for pkt in self.packet_backlog: - self.queue.put(pkt) - self.packet_backlog = [] - self.upgrading = False - else: - self.connected = True - self.upgraded = True - - # start separate writer thread - def writer(): - while True: - packets = None - try: - packets = self.poll() - except exceptions.QueueEmpty: - break - if not packets: - # empty packet list returned -> connection closed - break - try: - for pkt in packets: - ws.send(pkt.encode(always_bytes=False)) - except: - break - writer_task = self.server.start_background_task(writer) - - self.server.logger.info( - '%s: Upgrade to websocket successful', self.sid) - - while True: - p = None - try: - p = ws.wait() - except Exception as e: - # if the socket is already closed, we can assume this is a - # downstream error of that - if not self.closed: # pragma: no cover - self.server.logger.info( - '%s: Unexpected error "%s", closing connection', - self.sid, str(e)) - break - if p is None: - # connection closed by client - break - if isinstance(p, six.text_type): # pragma: no cover - p = p.encode('utf-8') - pkt = packet.Packet(encoded_packet=p) - try: - self.receive(pkt) - except exceptions.UnknownPacketError: # pragma: no cover - pass - except exceptions.SocketIsClosedError: # pragma: no cover - self.server.logger.info('Receive error -- socket is closed') - break - except: # pragma: no cover - # if we get an unexpected exception we log the error and exit - # the connection properly - self.server.logger.exception('Unknown receive error') - break - - self.queue.put(None) # unlock the writer task so that it can exit - writer_task.join() - self.close(wait=False, abort=True) - - return [] diff --git a/venv/lib/python3.6/site-packages/engineio/static_files.py b/venv/lib/python3.6/site-packages/engineio/static_files.py deleted file mode 100644 index 3058f6e..0000000 --- a/venv/lib/python3.6/site-packages/engineio/static_files.py +++ /dev/null @@ -1,55 +0,0 @@ -content_types = { - 'css': 'text/css', - 'gif': 'image/gif', - 'html': 'text/html', - 'jpg': 'image/jpeg', - 'js': 'application/javascript', - 'json': 'application/json', - 'png': 'image/png', - 'txt': 'text/plain', -} - - -def get_static_file(path, static_files): - """Return the local filename and content type for the requested static - file URL. - - :param path: the path portion of the requested URL. - :param static_files: a static file configuration dictionary. - - This function returns a dictionary with two keys, "filename" and - "content_type". If the requested URL does not match any static file, the - return value is None. - """ - if path in static_files: - f = static_files[path] - else: - f = None - rest = '' - while path != '': - path, last = path.rsplit('/', 1) - rest = '/' + last + rest - if path in static_files: - f = static_files[path] + rest - break - elif path + '/' in static_files: - f = static_files[path + '/'] + rest[1:] - break - if f: - if isinstance(f, str): - f = {'filename': f} - if f['filename'].endswith('/'): - if '' in static_files: - if isinstance(static_files[''], str): - f['filename'] += static_files[''] - else: - f['filename'] += static_files['']['filename'] - if 'content_type' in static_files['']: - f['content_type'] = static_files['']['content_type'] - else: - f['filename'] += 'index.html' - if 'content_type' not in f: - ext = f['filename'].rsplit('.')[-1] - f['content_type'] = content_types.get( - ext, 'application/octet-stream') - return f diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/AUTHORS b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/AUTHORS deleted file mode 100644 index 1190dce..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/AUTHORS +++ /dev/null @@ -1,174 +0,0 @@ -Maintainer (i.e., Who To Hassle If You Find Bugs) -------------------------------------------------- -Jakub Stasiak -Nat Goodspeed - -The current maintainer(s) are volunteers with unrelated jobs. -We can only pay sporadic attention to responding to your issue and pull request submissions. -Your patience is greatly appreciated! - -Original Authors ----------------- -* Bob Ippolito -* Donovan Preston - -Contributors ------------- -* AG Projects -* Chris AtLee -* R\. Tyler Ballance -* Denis Bilenko -* Mike Barton -* Patrick Carlisle -* Ben Ford -* Andrew Godwin -* Brantley Harris -* Gregory Holt -* Joe Malicki -* Chet Murthy -* Eugene Oden -* radix -* Scott Robinson -* Tavis Rudd -* Sergey Shepelev -* Chuck Thier -* Nick V -* Daniele Varrazzo -* Ryan Williams -* Geoff Salmon -* Edward George -* Floris Bruynooghe -* Paul Oppenheim -* Jakub Stasiak -* Aldona Majorek -* Victor Sergeyev -* David Szotten -* Victor Stinner -* Samuel Merritt -* Eric Urban - -Linden Lab Contributors ------------------------ -* John Beisley -* Tess Chu -* Nat Goodspeed -* Dave Kaprielian -* Kartic Krishnamurthy -* Bryan O'Sullivan -* Kent Quirk -* Ryan Williams - -Thanks To ---------- -* AdamKG, giving the hint that invalid argument errors were introduced post-0.9.0 -* Luke Tucker, bug report regarding wsgi + webob -* Taso Du Val, reproing an exception squelching bug, saving children's lives ;-) -* Luci Stanescu, for reporting twisted hub bug -* Marcus Cavanaugh, for test case code that has been incredibly useful in tracking down bugs -* Brian Brunswick, for many helpful questions and suggestions on the mailing list -* Cesar Alaniz, for uncovering bugs of great import -* the grugq, for contributing patches, suggestions, and use cases -* Ralf Schmitt, for wsgi/webob incompatibility bug report and suggested fix -* Benoit Chesneau, bug report on green.os and patch to fix it -* Slant, better iterator implementation in tpool -* Ambroff, nice pygtk hub example -* Michael Carter, websocket patch to improve location handling -* Marcin Bachry, nice repro of a bug and good diagnosis leading to the fix -* David Ziegler, reporting issue #53 -* Favo Yang, twisted hub patch -* Schmir, patch that fixes readline method with chunked encoding in wsgi.py, advice on patcher -* Slide, for open-sourcing gogreen -* Holger Krekel, websocket example small fix -* mikepk, debugging MySQLdb/tpool issues -* Malcolm Cleaton, patch for Event exception handling -* Alexey Borzenkov, for finding and fixing issues with Windows error detection (#66, #69), reducing dependencies in zeromq hub (#71) -* Anonymous, finding and fixing error in websocket chat example (#70) -* Edward George, finding and fixing an issue in the [e]poll hubs (#74), and in convenience (#86) -* Ruijun Luo, figuring out incorrect openssl import for wrap_ssl (#73) -* rfk, patch to get green zmq to respect noblock flag. -* Soren Hansen, finding and fixing issue in subprocess (#77) -* Stefano Rivera, making tests pass in absence of postgres (#78) -* Joshua Kwan, fixing busy-wait in eventlet.green.ssl. -* Nick Vatamaniuc, Windows SO_REUSEADDR patch (#83) -* Clay Gerrard, wsgi handle socket closed by client (#95) -* Eric Windisch, zmq getsockopt(EVENTS) wake correct threads (pull request 22) -* Raymond Lu, fixing busy-wait in eventlet.green.ssl.socket.sendall() -* Thomas Grainger, webcrawler example small fix, "requests" library import bug report, Travis integration -* Peter Portante, save syscalls in socket.dup(), environ[REMOTE_PORT] in wsgi -* Peter Skirko, fixing socket.settimeout(0) bug -* Derk Tegeler, Pre-cache proxied GreenSocket methods (Bitbucket #136) -* David Malcolm, optional "timeout" argument to the subprocess module (Bitbucket #89) -* David Goetz, wsgi: Allow minimum_chunk_size to be overriden on a per request basis -* Dmitry Orlov, websocket: accept Upgrade: websocket (lowercase) -* Zhang Hua, profile: accumulate results between runs (Bitbucket #162) -* Astrum Kuo, python3 compatibility fixes; greenthread.unlink() method -* Davanum Srinivas, Python3 compatibility fixes -* Dmitriy Kruglyak, PyPy 2.3 compatibility fix -* Jan Grant, Michael Kerrin, second simultaneous read (GH-94) -* Simon Jagoe, Python3 octal literal fix -* Tushar Gohad, wsgi: Support optional headers w/ "100 Continue" responses -* raylu, fixing operator precedence bug in eventlet.wsgi -* Christoph Gysin, PEP 8 conformance -* Andrey Gubarev -* Corey Wright -* Deva -* Johannes Erdfelt -* Kevin -* QthCN -* Steven Hardy -* Stuart McLaren -* Tomaz Muraus -* ChangBo Guo(gcb), fixing typos in the documentation (GH-194) -* Marc Abramowitz, fixing the README so it renders correctly on PyPI (GH-183) -* Shaun Stanworth, equal chance to acquire semaphore from different greenthreads (GH-136) -* Lior Neudorfer, Make sure SSL retries are done using the exact same data buffer -* Sean Dague, wsgi: Provide python logging compatibility -* Tim Simmons, Use _socket_nodns and select in dnspython support -* Antonio Cuni, fix fd double close on PyPy -* Seyeong Kim -* Ihar Hrachyshka -* Janusz Harkot -* Fukuchi Daisuke -* Ramakrishnan G -* ashutosh-mishra -* Azhar Hussain -* Josh VanderLinden -* Levente Polyak -* Phus Lu -* Collin Stocks, fixing eventlet.green.urllib2.urlopen() so it accepts cafile, capath, or cadefault arguments -* Alexis Lee -* Steven Erenst -* Piët Delport -* Alex Villacís Lasso -* Yashwardhan Singh -* Tim Burke -* OndÅ™ej Nový -* Jarrod Johnson -* Whitney Young -* Matthew D. Pagel -* Matt Yule-Bennett -* Artur Stawiarski -* Tal Wrii -* Roman Podoliaka -* Gevorg Davoian -* OndÅ™ej Kobližek -* Yuichi Bando -* Feng -* Aayush Kasurde -* Linbing -* Geoffrey Thomas -* Costas Christofi, adding permessage-deflate weboscket extension support -* Peter Kovary, adding permessage-deflate weboscket extension support -* Konstantin Enchant -* James Page -* Stefan Nica -* Haikel Guemar -* Miguel Grinberg -* Chris Kerr -* Anthony Sottile -* Quan Tian -* orishoshan -* Matt Bennett -* Ralf Haferkamp -* Jake Tesler -* Aayush Kasurde diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/INSTALLER b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/LICENSE b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/LICENSE deleted file mode 100644 index 2ddd0d9..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -Unless otherwise noted, the files in Eventlet are under the following MIT license: - -Copyright (c) 2005-2006, Bob Ippolito -Copyright (c) 2007-2010, Linden Research, Inc. -Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/METADATA b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/METADATA deleted file mode 100644 index 2492dbe..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/METADATA +++ /dev/null @@ -1,106 +0,0 @@ -Metadata-Version: 2.1 -Name: eventlet -Version: 0.25.0 -Summary: Highly concurrent networking library -Home-page: http://eventlet.net -Author: Linden Lab -Author-email: eventletdev@lists.secondlife.com -License: UNKNOWN -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: Microsoft :: Windows -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python -Classifier: Topic :: Internet -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Dist: dnspython (>=1.15.0) -Requires-Dist: greenlet (>=0.3) -Requires-Dist: monotonic (>=1.4) -Requires-Dist: six (>=1.10.0) -Requires-Dist: enum34 ; python_version < "3.4" - -Eventlet is a concurrent networking library for Python that allows you to change how you run your code, not how you write it. - -It uses epoll or libevent for highly scalable non-blocking I/O. Coroutines ensure that the developer uses a blocking style of programming that is similar to threading, but provide the benefits of non-blocking I/O. The event dispatch is implicit, which means you can easily use Eventlet from the Python interpreter, or as a small part of a larger application. - -It's easy to get started using Eventlet, and easy to convert existing -applications to use it. Start off by looking at the `examples`_, -`common design patterns`_, and the list of `basic API primitives`_. - -.. _examples: http://eventlet.net/doc/examples.html -.. _common design patterns: http://eventlet.net/doc/design_patterns.html -.. _basic API primitives: http://eventlet.net/doc/basic_usage.html - - -Quick Example -=============== - -Here's something you can try right on the command line:: - - % python - >>> import eventlet - >>> from eventlet.green import urllib2 - >>> gt = eventlet.spawn(urllib2.urlopen, 'http://eventlet.net') - >>> gt2 = eventlet.spawn(urllib2.urlopen, 'http://secondlife.com') - >>> gt2.wait() - >>> gt.wait() - - -Getting Eventlet -================== - -The easiest way to get Eventlet is to use pip:: - - pip install -U eventlet - -To install latest development version once:: - - pip install -U https://github.com/eventlet/eventlet/archive/master.zip - - -Building the Docs Locally -========================= - -To build a complete set of HTML documentation, you must have Sphinx, which can be found at http://sphinx.pocoo.org/ (or installed with `pip install Sphinx`):: - - cd doc - make html - -The built html files can be found in doc/_build/html afterward. - - -Twisted -======= - -Eventlet had Twisted hub in the past, but community interest to this integration has dropped over time, -now it is not supported, so with apologies for any inconvenience we discontinue Twisted integration. - -If you have a project that uses Eventlet with Twisted, your options are: - -* use last working release eventlet==0.14 -* start a new project with only Twisted hub code, identify and fix problems. As of eventlet 0.13, `EVENTLET_HUB` environment variable can point to external modules. -* fork Eventlet, revert Twisted removal, identify and fix problems. This work may be merged back into main project. - -Apologies for any inconvenience. - - -Flair -===== - -.. image:: https://travis-ci.org/eventlet/eventlet.svg?branch=master - :target: https://travis-ci.org/eventlet/eventlet - -.. image:: https://codecov.io/gh/eventlet/eventlet/branch/master/graph/badge.svg - :target: https://codecov.io/gh/eventlet/eventlet - - diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/RECORD b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/RECORD deleted file mode 100644 index c2da3fc..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/RECORD +++ /dev/null @@ -1,187 +0,0 @@ -eventlet/__init__.py,sha256=ttio9pCPAVFiww_8ZaEZ6FRE7j2KTagFEH6CdxXBW90,2214 -eventlet/backdoor.py,sha256=QehoeG4OKstofEZVv6xa7D41sC8OKKKJLL9TKyvsozs,3869 -eventlet/convenience.py,sha256=xjaCXWHxhs_iFe83ygOTqQ-ui1tsYbt9lMtJxM04Awk,7173 -eventlet/corolocal.py,sha256=XLpOq3jdWyRxwvD9esg3edoGBIDSZ2NSZqX1WXeosAU,1741 -eventlet/coros.py,sha256=horXHsHTANuqNzWRY4wC-54OUVSy8ERC8U2jQKE-kUU,2077 -eventlet/dagpool.py,sha256=EvLUDyvKE_jWlkiwiA97E_-OR2zvjRWxekMVwGuIM34,26303 -eventlet/db_pool.py,sha256=n4wU6wpgqjCkdwhxjycbyrA46dqzs7yayRWD4cfSZ8E,15763 -eventlet/debug.py,sha256=B6as5ianRkkwKGzE3o4iWEMh93gTnhB9316w0En3cmk,6326 -eventlet/event.py,sha256=iH9HQwWGN6py8Izqyc1X6rO_3OEWShRQZiuj3Xj9834,7518 -eventlet/greenpool.py,sha256=01zT2bQGYY9njOt_mEsSO4-HxvYKq7dAl-yanJseQHg,9825 -eventlet/greenthread.py,sha256=ij4C-56p3yfW_LNt1nLGEixeem-tTWfnxy2s8sRwhmU,11596 -eventlet/patcher.py,sha256=G2dRv20vmOjXQT6BTYb5otHTRLUt_CiHgvwzVIFU13k,17324 -eventlet/pools.py,sha256=LuqUR_95_fNuppZ6OK9CVIj2JxsUttc7HL-tqHnRsmo,6299 -eventlet/queue.py,sha256=hGMLS5ztBwHGepwW8hxAnwIkv2qbgVTTdt61eUPvlF8,18282 -eventlet/semaphore.py,sha256=0ZV1Uf9D1-0av2rTOqstiIjkHX6xz6F8-cowqtQUgXI,12226 -eventlet/timeout.py,sha256=HfbOXARbGLHITJ7M8cGoOigEIyD6a6zUXPzAL76um3U,6481 -eventlet/tpool.py,sha256=Ej31KYL8lQw7F8VUbWXG7WnMwxPR0gu6JLesSaU_WRc,10714 -eventlet/websocket.py,sha256=g0mwQ3ZUlEDDP3RYuvzgZKmv6zsK6bQJx-AW5FM6Y6w,32462 -eventlet/wsgi.py,sha256=gnRzMxarprq7eR-oOSgaSFdN37_HVEhcHZvo5v3iogI,37611 -eventlet/green/BaseHTTPServer.py,sha256=2Ft0aojrxibdqpoeMlv7kcJi_QzFWbsbVbQ798wQOQg,346 -eventlet/green/CGIHTTPServer.py,sha256=BKc7T1hs9ZG9BWIAYih9D5Dq6OYqv_ntEo4LcDIC2mA,552 -eventlet/green/MySQLdb.py,sha256=X_FURsoOfZnv2htH6nG8_uoBygAqY16_g4groiSt9R8,1201 -eventlet/green/Queue.py,sha256=9M3RUmEBVR5P96TlKgCcwOIQvVlc7u16TyllHa9WPrQ,833 -eventlet/green/SimpleHTTPServer.py,sha256=WkrEBwvEdUNuKCJp1n4NY_4x_xhJR-lfws-BWbfclOA,277 -eventlet/green/SocketServer.py,sha256=wkmsIglVoBXR-XuRevVmEopJ4ylQtCXCXp1f-pVooiw,365 -eventlet/green/__init__.py,sha256=upnrKC57DQQBDNvpxXf_IhDapQ6NtEt2hgxIs1pZDao,84 -eventlet/green/_socket_nodns.py,sha256=Oc-5EYs3AST-0HH4Hpi24t2tLp_CrzRX3jDFHN_rPH4,795 -eventlet/green/asynchat.py,sha256=xEpHhpElLssgykgDAUKGRyUnuKS3Ddu0MRRKSt9YciE,212 -eventlet/green/asyncore.py,sha256=ZA8ZR2MYCP9guK51D2LTXxcjibhNERr0OHfZTjx2ymY,258 -eventlet/green/builtin.py,sha256=87kgxDoFRl9HvLKTwXhuaaTjvPRl6nBdB-UBJg8XzJE,1192 -eventlet/green/ftplib.py,sha256=d23VMcAPqw7ZILheDJmueM8qOlWHnq0WFjjSgWouRdA,307 -eventlet/green/httplib.py,sha256=a3RJ82XL7Z_2TbYBcvUeQ7S8d82jhmEITrUq6MNTMws,493 -eventlet/green/os.py,sha256=HCvO3r2XDKbreKGtdWj2rtPZXfRDkrA4T3TQ0ykDsGw,2850 -eventlet/green/profile.py,sha256=ROmWKx2ZThDlYdchObK0sUB0lup7J-UgFx5CoqFWwdQ,9554 -eventlet/green/select.py,sha256=Yay3PQaRZRkfTZyylZi7utQXHsvtv8HB_QWNVfpvEUs,2769 -eventlet/green/selectors.py,sha256=C_aeln-t0FsMG2WosmkIBhGst0KfKglcaJG8U50pxQM,948 -eventlet/green/socket.py,sha256=np5_HqSjA4_y_kYKdSFyHQN0vjzLW_qi_oLFH8bB0T0,1918 -eventlet/green/ssl.py,sha256=IFfUsAUbTaXmQhoXdXVnmJ1w6JhLQHpfaVk5lAkXnIE,19253 -eventlet/green/subprocess.py,sha256=GRTiwbwzduBxOpEsvulVnu8sBkO_iCR66E1CK1GvqxE,5837 -eventlet/green/thread.py,sha256=Q8mndzGr38ZUJ18VJK6WwptyOeIeTjqFZzMidg81n2w,3111 -eventlet/green/threading.py,sha256=9L-pKOAhzXQn-LT1zb_q80mQhUazHDiI9tKXQWd_WXY,3874 -eventlet/green/time.py,sha256=1W7BKbGrfTI1v2-pDnBvzBn01tbQ8zwyqz458BFrjt0,240 -eventlet/green/urllib2.py,sha256=Su3dEhDc8VsKK9PqhIXwgFVOOHVI37TTXU_beqzvg44,488 -eventlet/green/zmq.py,sha256=fm9tpTPavBs0iorleCV08dfXDb6pv4OXaSrIL2J3kUA,18066 -eventlet/green/OpenSSL/SSL.py,sha256=ChOnAol6IPHQ2nBvxhq7zMj_g5V4C9FrLs08AzCPkuk,4533 -eventlet/green/OpenSSL/__init__.py,sha256=uFsYC2F5qrSnhESVsr-fIzfRCRKxnoABnYmO7lgne7Q,92 -eventlet/green/OpenSSL/crypto.py,sha256=dcnjSGP6K274eAxalZEOttUZ1djAStBnbRH-wGBSJu4,29 -eventlet/green/OpenSSL/tsafe.py,sha256=DuY1rHdT2R0tiJkD13ECj-IU7_v-zQKjhTsK6CG8UEM,28 -eventlet/green/OpenSSL/version.py,sha256=3Ti2k01zP3lM6r0YuLbLS_QReJBEHaTJt5k0dNdXtI4,49 -eventlet/green/http/__init__.py,sha256=ve29vEVORq8ZV-_mp6ULOhZUGjdKkSnAISiFDwLhZd4,8793 -eventlet/green/http/client.py,sha256=Lr6t7JJ3BzumxQ3qB40lCMkA8vIS9YVa8vu-UnAqHVM,58730 -eventlet/green/http/cookiejar.py,sha256=0HSmj_sac1TlnGDxJ27dSezwljwOvlAdUH_Bt3-Wy-w,79240 -eventlet/green/http/cookies.py,sha256=6ZJL0cirlTaG9Rmohy5M9NfwMkYRRbJxly8Nrd4446s,24188 -eventlet/green/http/server.py,sha256=jHfdMtiF8_WQHahLCEspBHpm2cCm7wmBKbBRByn7vQs,46596 -eventlet/green/urllib/__init__.py,sha256=9tOImBCBOp1wPQ0C8UkfJ_9KJLwN0IDYZcLiSKd1N8E,1401 -eventlet/green/urllib/error.py,sha256=xlpHJIa8U4QTFolAa3NEy5gEVj_nM3oF2bB-FvdhCQg,157 -eventlet/green/urllib/parse.py,sha256=uJ1R4rbgqlQgINjKm_-oTxveLvCR9anu7U0i7aRS87k,83 -eventlet/green/urllib/request.py,sha256=Upa4bT0-9pSxJOmPuIk3t1rNmGbLEkAOUlpD4aUfCGM,1504 -eventlet/green/urllib/response.py,sha256=ytsGn0pXE94tlZh75hl9X1cFGagjGNBWm6k_PRXOBmM,86 -eventlet/greenio/__init__.py,sha256=U-_kIaeIjRmZOSfrzUOWjjhqJETuO4A79KWgua4foYc,169 -eventlet/greenio/base.py,sha256=USzoiAIOjYUUakEzu6n0xg3L-5cJNqxS8zgfGKU7k7g,18037 -eventlet/greenio/py2.py,sha256=6_yuNN_odTCkIU68KNA4spgPT4dh2aZo00BjVKXwmHo,6670 -eventlet/greenio/py3.py,sha256=FeiT88Ld2cjllNX38DWy6nTaLg4NWkKBJDmslr1dkw8,6384 -eventlet/hubs/__init__.py,sha256=82EPuFBcd7-xBgNeYpX9ta20FbUwpbH0Y4NK2RDuaME,6055 -eventlet/hubs/epolls.py,sha256=h55rCw84ZTCU4iHK0FKoIHSa-ZqhZNXK6KaTOKBkN9w,1027 -eventlet/hubs/hub.py,sha256=Z5Ib5uGBVFz9iT82DMMxpP63qnL8Kn4SB_YwHD7pj2w,17184 -eventlet/hubs/kqueue.py,sha256=CmfUJHquHoUyzrZbmxEVHoMG-0rWqjXaiVhgSDO_38s,3546 -eventlet/hubs/poll.py,sha256=tpEW-RCJIbO17crqYRwBkdXgOYUTL0T92qliw_sTOMw,4021 -eventlet/hubs/pyevent.py,sha256=cr8wHQfbxI8RSbDqc4xE_GytItPLmpH3qdFhEpVIAic,5607 -eventlet/hubs/selects.py,sha256=tei0L7X00M7lDXtJwfzxv9goe5_s75U7kE9PevI9WuY,2054 -eventlet/hubs/timer.py,sha256=y0TJFRGRgHZchJXZSodWhY4FpnrC9wuMfr7VMbJ42fc,3195 -eventlet/support/__init__.py,sha256=RVxVJQdT9srs1CtnA0Dgz4LvbmK4NPYZppEiQ7-fWIE,2085 -eventlet/support/greendns.py,sha256=qQ70qvim0t1yyMTHPmi0lGSIpv92NEUxO2qGPDZdmsI,30698 -eventlet/support/greenlets.py,sha256=H8D0ym8RDU92fzoqgl1OusAJSBOeA5PnLUF57oUUOIM,298 -eventlet/support/psycopg2_patcher.py,sha256=Rzm9GYS7PmrNpKAw04lqJV7KPcxLovnaCUI8CXE328A,2272 -eventlet/support/pylib.py,sha256=EvZ1JZEX3wqWtzfga5HeVL-sLLb805_f_ywX2k5BDHo,274 -eventlet/support/stacklesspypys.py,sha256=6BwZcnsCtb1m4wdK6GygoiPvYV03v7P7YlBxPIE6Zns,275 -eventlet/support/stacklesss.py,sha256=aZfBXegfo_-jGl3-lAaZGTeXs0ulA7CleF9qPRNHq8s,1867 -eventlet/zipkin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -eventlet/zipkin/api.py,sha256=8h3YuMLuKpwUuzRmGeCFRR_JC3QoduOKAFXnQFiyDa8,5444 -eventlet/zipkin/client.py,sha256=R5Epha0YSyPAdb2_cWH1jShPz1DX4mZQNi9zhW1Cu4Q,1700 -eventlet/zipkin/greenthread.py,sha256=ify1VnsJmrFneAwfPl6QE8kgHIPJE5fAE9Ks9wQzeVI,843 -eventlet/zipkin/http.py,sha256=qNd89Fq5iv4xXri_zTJyW9IKjjHa4dF8CWiX8R6KRxg,1767 -eventlet/zipkin/log.py,sha256=jElBHT8H3_vs9T3r8Q-JG30xyajQ7u6wNGWmmMPQ4AA,337 -eventlet/zipkin/patcher.py,sha256=t1g5tXcbuEvNix3ICtZyuIWaJKQtUHJ5ZUqsi14j9Dc,1388 -eventlet/zipkin/wsgi.py,sha256=732J1h_VKs3iAUynQ76joXYZDIA3utU0dF_-_H5qYBM,2276 -eventlet/zipkin/_thrift/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -eventlet/zipkin/_thrift/zipkinCore/__init__.py,sha256=YFcZTT8Cm-6Y4oTiCaqq0DT1lw2W09WqoEc5_pTAwW0,34 -eventlet/zipkin/_thrift/zipkinCore/constants.py,sha256=cbgWT_mN04BRZbyzjr1LzT40xvotzFyz-vbYp8Q_klo,275 -eventlet/zipkin/_thrift/zipkinCore/ttypes.py,sha256=94RG3YtkmpeMmJ-EvKiwnYUtovYlfjrRVnh6sI27cJ0,13497 -eventlet-0.25.0.dist-info/AUTHORS,sha256=qyCotEDXx0wuNTpDiFDomaSiPUSTrZm49r3m8LsMmY0,5837 -eventlet-0.25.0.dist-info/LICENSE,sha256=vOygSX96gUdRFr_0E4cz-yAGC2sitnHmV7YVioYGVuI,1254 -eventlet-0.25.0.dist-info/METADATA,sha256=Oi_mtcE2Wl3LCevOZe1bF9Ugw46gXVhpDDa8jAzxe8I,3878 -eventlet-0.25.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 -eventlet-0.25.0.dist-info/top_level.txt,sha256=k0ONa2J0QJkHxkk-bj2NgX9uRyjA1ebSZzU_hOFSXCc,9 -eventlet-0.25.0.dist-info/RECORD,, -eventlet-0.25.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -eventlet/green/urllib/__pycache__/response.cpython-36.pyc,, -eventlet/green/urllib/__pycache__/parse.cpython-36.pyc,, -eventlet/green/urllib/__pycache__/request.cpython-36.pyc,, -eventlet/green/urllib/__pycache__/__init__.cpython-36.pyc,, -eventlet/green/urllib/__pycache__/error.cpython-36.pyc,, -eventlet/green/__pycache__/time.cpython-36.pyc,, -eventlet/green/__pycache__/threading.cpython-36.pyc,, -eventlet/green/__pycache__/builtin.cpython-36.pyc,, -eventlet/green/__pycache__/thread.cpython-36.pyc,, -eventlet/green/__pycache__/BaseHTTPServer.cpython-36.pyc,, -eventlet/green/__pycache__/profile.cpython-36.pyc,, -eventlet/green/__pycache__/select.cpython-36.pyc,, -eventlet/green/__pycache__/zmq.cpython-36.pyc,, -eventlet/green/__pycache__/urllib2.cpython-36.pyc,, -eventlet/green/__pycache__/socket.cpython-36.pyc,, -eventlet/green/__pycache__/MySQLdb.cpython-36.pyc,, -eventlet/green/__pycache__/httplib.cpython-36.pyc,, -eventlet/green/__pycache__/os.cpython-36.pyc,, -eventlet/green/__pycache__/ssl.cpython-36.pyc,, -eventlet/green/__pycache__/_socket_nodns.cpython-36.pyc,, -eventlet/green/__pycache__/selectors.cpython-36.pyc,, -eventlet/green/__pycache__/subprocess.cpython-36.pyc,, -eventlet/green/__pycache__/asyncore.cpython-36.pyc,, -eventlet/green/__pycache__/asynchat.cpython-36.pyc,, -eventlet/green/__pycache__/ftplib.cpython-36.pyc,, -eventlet/green/__pycache__/SocketServer.cpython-36.pyc,, -eventlet/green/__pycache__/CGIHTTPServer.cpython-36.pyc,, -eventlet/green/__pycache__/__init__.cpython-36.pyc,, -eventlet/green/__pycache__/SimpleHTTPServer.cpython-36.pyc,, -eventlet/green/__pycache__/Queue.cpython-36.pyc,, -eventlet/green/http/__pycache__/client.cpython-36.pyc,, -eventlet/green/http/__pycache__/cookiejar.cpython-36.pyc,, -eventlet/green/http/__pycache__/server.cpython-36.pyc,, -eventlet/green/http/__pycache__/cookies.cpython-36.pyc,, -eventlet/green/http/__pycache__/__init__.cpython-36.pyc,, -eventlet/green/OpenSSL/__pycache__/version.cpython-36.pyc,, -eventlet/green/OpenSSL/__pycache__/SSL.cpython-36.pyc,, -eventlet/green/OpenSSL/__pycache__/crypto.cpython-36.pyc,, -eventlet/green/OpenSSL/__pycache__/tsafe.cpython-36.pyc,, -eventlet/green/OpenSSL/__pycache__/__init__.cpython-36.pyc,, -eventlet/greenio/__pycache__/py3.cpython-36.pyc,, -eventlet/greenio/__pycache__/py2.cpython-36.pyc,, -eventlet/greenio/__pycache__/base.cpython-36.pyc,, -eventlet/greenio/__pycache__/__init__.cpython-36.pyc,, -eventlet/__pycache__/timeout.cpython-36.pyc,, -eventlet/__pycache__/greenpool.cpython-36.pyc,, -eventlet/__pycache__/debug.cpython-36.pyc,, -eventlet/__pycache__/websocket.cpython-36.pyc,, -eventlet/__pycache__/backdoor.cpython-36.pyc,, -eventlet/__pycache__/pools.cpython-36.pyc,, -eventlet/__pycache__/event.cpython-36.pyc,, -eventlet/__pycache__/convenience.cpython-36.pyc,, -eventlet/__pycache__/patcher.cpython-36.pyc,, -eventlet/__pycache__/corolocal.cpython-36.pyc,, -eventlet/__pycache__/semaphore.cpython-36.pyc,, -eventlet/__pycache__/greenthread.cpython-36.pyc,, -eventlet/__pycache__/dagpool.cpython-36.pyc,, -eventlet/__pycache__/wsgi.cpython-36.pyc,, -eventlet/__pycache__/coros.cpython-36.pyc,, -eventlet/__pycache__/tpool.cpython-36.pyc,, -eventlet/__pycache__/__init__.cpython-36.pyc,, -eventlet/__pycache__/queue.cpython-36.pyc,, -eventlet/__pycache__/db_pool.cpython-36.pyc,, -eventlet/zipkin/_thrift/__pycache__/__init__.cpython-36.pyc,, -eventlet/zipkin/_thrift/zipkinCore/__pycache__/constants.cpython-36.pyc,, -eventlet/zipkin/_thrift/zipkinCore/__pycache__/ttypes.cpython-36.pyc,, -eventlet/zipkin/_thrift/zipkinCore/__pycache__/__init__.cpython-36.pyc,, -eventlet/zipkin/__pycache__/client.cpython-36.pyc,, -eventlet/zipkin/__pycache__/patcher.cpython-36.pyc,, -eventlet/zipkin/__pycache__/greenthread.cpython-36.pyc,, -eventlet/zipkin/__pycache__/log.cpython-36.pyc,, -eventlet/zipkin/__pycache__/http.cpython-36.pyc,, -eventlet/zipkin/__pycache__/wsgi.cpython-36.pyc,, -eventlet/zipkin/__pycache__/__init__.cpython-36.pyc,, -eventlet/zipkin/__pycache__/api.cpython-36.pyc,, -eventlet/support/__pycache__/pylib.cpython-36.pyc,, -eventlet/support/__pycache__/stacklesss.cpython-36.pyc,, -eventlet/support/__pycache__/psycopg2_patcher.cpython-36.pyc,, -eventlet/support/__pycache__/greendns.cpython-36.pyc,, -eventlet/support/__pycache__/greenlets.cpython-36.pyc,, -eventlet/support/__pycache__/stacklesspypys.cpython-36.pyc,, -eventlet/support/__pycache__/__init__.cpython-36.pyc,, -eventlet/hubs/__pycache__/epolls.cpython-36.pyc,, -eventlet/hubs/__pycache__/kqueue.cpython-36.pyc,, -eventlet/hubs/__pycache__/timer.cpython-36.pyc,, -eventlet/hubs/__pycache__/selects.cpython-36.pyc,, -eventlet/hubs/__pycache__/poll.cpython-36.pyc,, -eventlet/hubs/__pycache__/hub.cpython-36.pyc,, -eventlet/hubs/__pycache__/__init__.cpython-36.pyc,, -eventlet/hubs/__pycache__/pyevent.cpython-36.pyc,, diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/WHEEL b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/WHEEL deleted file mode 100644 index 78e6f69..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/top_level.txt b/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/top_level.txt deleted file mode 100644 index bfe34bc..0000000 --- a/venv/lib/python3.6/site-packages/eventlet-0.25.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -eventlet diff --git a/venv/lib/python3.6/site-packages/eventlet/__init__.py b/venv/lib/python3.6/site-packages/eventlet/__init__.py deleted file mode 100644 index 1d33db0..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -import os - - -version_info = (0, 25, 0) -__version__ = '.'.join(map(str, version_info)) -# This is to make Debian packaging easier, it ignores import -# errors of greenlet so that the packager can still at least -# access the version. Also this makes easy_install a little quieter -if os.environ.get('EVENTLET_IMPORT_VERSION_ONLY') != '1': - from eventlet import convenience - from eventlet import event - from eventlet import greenpool - from eventlet import greenthread - from eventlet import patcher - from eventlet import queue - from eventlet import semaphore - from eventlet import support - from eventlet import timeout - import greenlet - # Force monotonic library search as early as possible. - # Helpful when CPython < 3.5 on Linux blocked in `os.waitpid(-1)` before first use of hub. - # Example: gunicorn - # https://github.com/eventlet/eventlet/issues/401#issuecomment-327500352 - import monotonic - del monotonic - - connect = convenience.connect - listen = convenience.listen - serve = convenience.serve - StopServe = convenience.StopServe - wrap_ssl = convenience.wrap_ssl - - Event = event.Event - - GreenPool = greenpool.GreenPool - GreenPile = greenpool.GreenPile - - sleep = greenthread.sleep - spawn = greenthread.spawn - spawn_n = greenthread.spawn_n - spawn_after = greenthread.spawn_after - kill = greenthread.kill - - import_patched = patcher.import_patched - monkey_patch = patcher.monkey_patch - - Queue = queue.Queue - - Semaphore = semaphore.Semaphore - CappedSemaphore = semaphore.CappedSemaphore - BoundedSemaphore = semaphore.BoundedSemaphore - - Timeout = timeout.Timeout - with_timeout = timeout.with_timeout - wrap_is_timeout = timeout.wrap_is_timeout - is_timeout = timeout.is_timeout - - getcurrent = greenlet.greenlet.getcurrent - - # deprecated - TimeoutError, exc_after, call_after_global = ( - support.wrap_deprecated(old, new)(fun) for old, new, fun in ( - ('TimeoutError', 'Timeout', Timeout), - ('exc_after', 'greenthread.exc_after', greenthread.exc_after), - ('call_after_global', 'greenthread.call_after_global', greenthread.call_after_global), - )) - -del os diff --git a/venv/lib/python3.6/site-packages/eventlet/backdoor.py b/venv/lib/python3.6/site-packages/eventlet/backdoor.py deleted file mode 100644 index 9a6797a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/backdoor.py +++ /dev/null @@ -1,136 +0,0 @@ -from __future__ import print_function - -from code import InteractiveConsole -import errno -import socket -import sys -import errno -import traceback - -import eventlet -from eventlet import hubs -from eventlet.support import greenlets, get_errno - -try: - sys.ps1 -except AttributeError: - sys.ps1 = '>>> ' -try: - sys.ps2 -except AttributeError: - sys.ps2 = '... ' - - -class FileProxy(object): - def __init__(self, f): - self.f = f - - def isatty(self): - return True - - def flush(self): - pass - - def write(self, data, *a, **kw): - self.f.write(data, *a, **kw) - self.f.flush() - - def readline(self, *a): - return self.f.readline(*a).replace('\r\n', '\n') - - def __getattr__(self, attr): - return getattr(self.f, attr) - - -# @@tavis: the `locals` args below mask the built-in function. Should -# be renamed. -class SocketConsole(greenlets.greenlet): - def __init__(self, desc, hostport, locals): - self.hostport = hostport - self.locals = locals - # mangle the socket - self.desc = FileProxy(desc) - greenlets.greenlet.__init__(self) - - def run(self): - try: - console = InteractiveConsole(self.locals) - console.interact() - finally: - self.switch_out() - self.finalize() - - def switch(self, *args, **kw): - self.saved = sys.stdin, sys.stderr, sys.stdout - sys.stdin = sys.stdout = sys.stderr = self.desc - greenlets.greenlet.switch(self, *args, **kw) - - def switch_out(self): - sys.stdin, sys.stderr, sys.stdout = self.saved - - def finalize(self): - # restore the state of the socket - self.desc = None - if len(self.hostport) >= 2: - host = self.hostport[0] - port = self.hostport[1] - print("backdoor closed to %s:%s" % (host, port,)) - else: - print('backdoor closed') - - -def backdoor_server(sock, locals=None): - """ Blocking function that runs a backdoor server on the socket *sock*, - accepting connections and running backdoor consoles for each client that - connects. - - The *locals* argument is a dictionary that will be included in the locals() - of the interpreters. It can be convenient to stick important application - variables in here. - """ - listening_on = sock.getsockname() - if sock.family == socket.AF_INET: - # Expand result to IP + port - listening_on = '%s:%s' % listening_on - elif sock.family == socket.AF_INET6: - ip, port, _, _ = listening_on - listening_on = '%s:%s' % (ip, port,) - # No action needed if sock.family == socket.AF_UNIX - - print("backdoor server listening on %s" % (listening_on,)) - try: - try: - while True: - socketpair = sock.accept() - backdoor(socketpair, locals) - except socket.error as e: - # Broken pipe means it was shutdown - if get_errno(e) != errno.EPIPE: - raise - finally: - sock.close() - - -def backdoor(conn_info, locals=None): - """Sets up an interactive console on a socket with a single connected - client. This does not block the caller, as it spawns a new greenlet to - handle the console. This is meant to be called from within an accept loop - (such as backdoor_server). - """ - conn, addr = conn_info - if conn.family == socket.AF_INET: - host, port = addr - print("backdoor to %s:%s" % (host, port)) - elif conn.family == socket.AF_INET6: - host, port, _, _ = addr - print("backdoor to %s:%s" % (host, port)) - else: - print('backdoor opened') - fl = conn.makefile("rw") - console = SocketConsole(fl, addr, locals) - hub = hubs.get_hub() - hub.schedule_call_global(0, console.switch) - - -if __name__ == '__main__': - backdoor_server(eventlet.listen(('127.0.0.1', 9000)), {}) diff --git a/venv/lib/python3.6/site-packages/eventlet/convenience.py b/venv/lib/python3.6/site-packages/eventlet/convenience.py deleted file mode 100644 index 5f99ade..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/convenience.py +++ /dev/null @@ -1,189 +0,0 @@ -import sys -import warnings - -from eventlet import greenpool -from eventlet import greenthread -from eventlet import support -from eventlet.green import socket -from eventlet.support import greenlets as greenlet - - -def connect(addr, family=socket.AF_INET, bind=None): - """Convenience function for opening client sockets. - - :param addr: Address of the server to connect to. For TCP sockets, this is a (host, port) tuple. - :param family: Socket family, optional. See :mod:`socket` documentation for available families. - :param bind: Local address to bind to, optional. - :return: The connected green socket object. - """ - sock = socket.socket(family, socket.SOCK_STREAM) - if bind is not None: - sock.bind(bind) - sock.connect(addr) - return sock - - -class ReuseRandomPortWarning(Warning): - pass - - -class ReusePortUnavailableWarning(Warning): - pass - - -def listen(addr, family=socket.AF_INET, backlog=50, reuse_addr=True, reuse_port=None): - """Convenience function for opening server sockets. This - socket can be used in :func:`~eventlet.serve` or a custom ``accept()`` loop. - - Sets SO_REUSEADDR on the socket to save on annoyance. - - :param addr: Address to listen on. For TCP sockets, this is a (host, port) tuple. - :param family: Socket family, optional. See :mod:`socket` documentation for available families. - :param backlog: - - The maximum number of queued connections. Should be at least 1; the maximum - value is system-dependent. - - :return: The listening green socket object. - """ - sock = socket.socket(family, socket.SOCK_STREAM) - if reuse_addr and sys.platform[:3] != 'win': - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if family in (socket.AF_INET, socket.AF_INET6) and addr[1] == 0: - if reuse_port: - warnings.warn( - '''listen on random port (0) with SO_REUSEPORT is dangerous. - Double check your intent. - Example problem: https://github.com/eventlet/eventlet/issues/411''', - ReuseRandomPortWarning, stacklevel=3) - elif reuse_port is None: - reuse_port = True - if reuse_port and hasattr(socket, 'SO_REUSEPORT'): - # NOTE(zhengwei): linux kernel >= 3.9 - try: - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - # OSError is enough on Python 3+ - except (OSError, socket.error) as ex: - if support.get_errno(ex) in (22, 92): - # A famous platform defines unsupported socket option. - # https://github.com/eventlet/eventlet/issues/380 - # https://github.com/eventlet/eventlet/issues/418 - warnings.warn( - '''socket.SO_REUSEPORT is defined but not supported. - On Windows: known bug, wontfix. - On other systems: please comment in the issue linked below. - More information: https://github.com/eventlet/eventlet/issues/380''', - ReusePortUnavailableWarning, stacklevel=3) - - sock.bind(addr) - sock.listen(backlog) - return sock - - -class StopServe(Exception): - """Exception class used for quitting :func:`~eventlet.serve` gracefully.""" - pass - - -def _stop_checker(t, server_gt, conn): - try: - try: - t.wait() - finally: - conn.close() - except greenlet.GreenletExit: - pass - except Exception: - greenthread.kill(server_gt, *sys.exc_info()) - - -def serve(sock, handle, concurrency=1000): - """Runs a server on the supplied socket. Calls the function *handle* in a - separate greenthread for every incoming client connection. *handle* takes - two arguments: the client socket object, and the client address:: - - def myhandle(client_sock, client_addr): - print("client connected", client_addr) - - eventlet.serve(eventlet.listen(('127.0.0.1', 9999)), myhandle) - - Returning from *handle* closes the client socket. - - :func:`serve` blocks the calling greenthread; it won't return until - the server completes. If you desire an immediate return, - spawn a new greenthread for :func:`serve`. - - Any uncaught exceptions raised in *handle* are raised as exceptions - from :func:`serve`, terminating the server, so be sure to be aware of the - exceptions your application can raise. The return value of *handle* is - ignored. - - Raise a :class:`~eventlet.StopServe` exception to gracefully terminate the - server -- that's the only way to get the server() function to return rather - than raise. - - The value in *concurrency* controls the maximum number of - greenthreads that will be open at any time handling requests. When - the server hits the concurrency limit, it stops accepting new - connections until the existing ones complete. - """ - pool = greenpool.GreenPool(concurrency) - server_gt = greenthread.getcurrent() - - while True: - try: - conn, addr = sock.accept() - gt = pool.spawn(handle, conn, addr) - gt.link(_stop_checker, server_gt, conn) - conn, addr, gt = None, None, None - except StopServe: - return - - -def wrap_ssl(sock, *a, **kw): - """Convenience function for converting a regular socket into an - SSL socket. Has the same interface as :func:`ssl.wrap_socket`, - but can also use PyOpenSSL. Though, note that it ignores the - `cert_reqs`, `ssl_version`, `ca_certs`, `do_handshake_on_connect`, - and `suppress_ragged_eofs` arguments when using PyOpenSSL. - - The preferred idiom is to call wrap_ssl directly on the creation - method, e.g., ``wrap_ssl(connect(addr))`` or - ``wrap_ssl(listen(addr), server_side=True)``. This way there is - no "naked" socket sitting around to accidentally corrupt the SSL - session. - - :return Green SSL object. - """ - return wrap_ssl_impl(sock, *a, **kw) - -try: - from eventlet.green import ssl - wrap_ssl_impl = ssl.wrap_socket -except ImportError: - # trying PyOpenSSL - try: - from eventlet.green.OpenSSL import SSL - except ImportError: - def wrap_ssl_impl(*a, **kw): - raise ImportError( - "To use SSL with Eventlet, you must install PyOpenSSL or use Python 2.7 or later.") - else: - def wrap_ssl_impl(sock, keyfile=None, certfile=None, server_side=False, - cert_reqs=None, ssl_version=None, ca_certs=None, - do_handshake_on_connect=True, - suppress_ragged_eofs=True, ciphers=None): - # theoretically the ssl_version could be respected in this line - context = SSL.Context(SSL.SSLv23_METHOD) - if certfile is not None: - context.use_certificate_file(certfile) - if keyfile is not None: - context.use_privatekey_file(keyfile) - context.set_verify(SSL.VERIFY_NONE, lambda *x: True) - - connection = SSL.Connection(context, sock) - if server_side: - connection.set_accept_state() - else: - connection.set_connect_state() - return connection diff --git a/venv/lib/python3.6/site-packages/eventlet/corolocal.py b/venv/lib/python3.6/site-packages/eventlet/corolocal.py deleted file mode 100644 index 1a1a8df..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/corolocal.py +++ /dev/null @@ -1,53 +0,0 @@ -import weakref - -from eventlet import greenthread - -__all__ = ['get_ident', 'local'] - - -def get_ident(): - """ Returns ``id()`` of current greenlet. Useful for debugging.""" - return id(greenthread.getcurrent()) - - -# the entire purpose of this class is to store off the constructor -# arguments in a local variable without calling __init__ directly -class _localbase(object): - __slots__ = '_local__args', '_local__greens' - - def __new__(cls, *args, **kw): - self = object.__new__(cls) - object.__setattr__(self, '_local__args', (args, kw)) - object.__setattr__(self, '_local__greens', weakref.WeakKeyDictionary()) - if (args or kw) and (cls.__init__ is object.__init__): - raise TypeError("Initialization arguments are not supported") - return self - - -def _patch(thrl): - greens = object.__getattribute__(thrl, '_local__greens') - # until we can store the localdict on greenlets themselves, - # we store it in _local__greens on the local object - cur = greenthread.getcurrent() - if cur not in greens: - # must be the first time we've seen this greenlet, call __init__ - greens[cur] = {} - cls = type(thrl) - if cls.__init__ is not object.__init__: - args, kw = object.__getattribute__(thrl, '_local__args') - thrl.__init__(*args, **kw) - object.__setattr__(thrl, '__dict__', greens[cur]) - - -class local(_localbase): - def __getattribute__(self, attr): - _patch(self) - return object.__getattribute__(self, attr) - - def __setattr__(self, attr, value): - _patch(self) - return object.__setattr__(self, attr, value) - - def __delattr__(self, attr): - _patch(self) - return object.__delattr__(self, attr) diff --git a/venv/lib/python3.6/site-packages/eventlet/coros.py b/venv/lib/python3.6/site-packages/eventlet/coros.py deleted file mode 100644 index 431e6f0..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/coros.py +++ /dev/null @@ -1,61 +0,0 @@ -from __future__ import print_function - -from eventlet import event as _event - - -class metaphore(object): - """This is sort of an inverse semaphore: a counter that starts at 0 and - waits only if nonzero. It's used to implement a "wait for all" scenario. - - >>> from eventlet import coros, spawn_n - >>> count = coros.metaphore() - >>> count.wait() - >>> def decrementer(count, id): - ... print("{0} decrementing".format(id)) - ... count.dec() - ... - >>> _ = spawn_n(decrementer, count, 'A') - >>> _ = spawn_n(decrementer, count, 'B') - >>> count.inc(2) - >>> count.wait() - A decrementing - B decrementing - """ - - def __init__(self): - self.counter = 0 - self.event = _event.Event() - # send() right away, else we'd wait on the default 0 count! - self.event.send() - - def inc(self, by=1): - """Increment our counter. If this transitions the counter from zero to - nonzero, make any subsequent :meth:`wait` call wait. - """ - assert by > 0 - self.counter += by - if self.counter == by: - # If we just incremented self.counter by 'by', and the new count - # equals 'by', then the old value of self.counter was 0. - # Transitioning from 0 to a nonzero value means wait() must - # actually wait. - self.event.reset() - - def dec(self, by=1): - """Decrement our counter. If this transitions the counter from nonzero - to zero, a current or subsequent wait() call need no longer wait. - """ - assert by > 0 - self.counter -= by - if self.counter <= 0: - # Don't leave self.counter < 0, that will screw things up in - # future calls. - self.counter = 0 - # Transitioning from nonzero to 0 means wait() need no longer wait. - self.event.send() - - def wait(self): - """Suspend the caller only if our count is nonzero. In that case, - resume the caller once the count decrements to zero again. - """ - self.event.wait() diff --git a/venv/lib/python3.6/site-packages/eventlet/dagpool.py b/venv/lib/python3.6/site-packages/eventlet/dagpool.py deleted file mode 100644 index 68bb49a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/dagpool.py +++ /dev/null @@ -1,602 +0,0 @@ -# @file dagpool.py -# @author Nat Goodspeed -# @date 2016-08-08 -# @brief Provide DAGPool class - -from eventlet.event import Event -from eventlet import greenthread -import six -import collections - - -# value distinguished from any other Python value including None -_MISSING = object() - - -class Collision(Exception): - """ - DAGPool raises Collision when you try to launch two greenthreads with the - same key, or post() a result for a key corresponding to a greenthread, or - post() twice for the same key. As with KeyError, str(collision) names the - key in question. - """ - pass - - -class PropagateError(Exception): - """ - When a DAGPool greenthread terminates with an exception instead of - returning a result, attempting to retrieve its value raises - PropagateError. - - Attributes: - - key - the key of the greenthread which raised the exception - - exc - the exception object raised by the greenthread - """ - def __init__(self, key, exc): - # initialize base class with a reasonable string message - msg = "PropagateError({0}): {1}: {2}" \ - .format(key, exc.__class__.__name__, exc) - super(PropagateError, self).__init__(msg) - self.msg = msg - # Unless we set args, this is unpickleable: - # https://bugs.python.org/issue1692335 - self.args = (key, exc) - self.key = key - self.exc = exc - - def __str__(self): - return self.msg - - -class DAGPool(object): - """ - A DAGPool is a pool that constrains greenthreads, not by max concurrency, - but by data dependencies. - - This is a way to implement general DAG dependencies. A simple dependency - tree (flowing in either direction) can straightforwardly be implemented - using recursion and (e.g.) - :meth:`GreenThread.imap() `. - What gets complicated is when a given node depends on several other nodes - as well as contributing to several other nodes. - - With DAGPool, you concurrently launch all applicable greenthreads; each - will proceed as soon as it has all required inputs. The DAG is implicit in - which items are required by each greenthread. - - Each greenthread is launched in a DAGPool with a key: any value that can - serve as a Python dict key. The caller also specifies an iterable of other - keys on which this greenthread depends. This iterable may be empty. - - The greenthread callable must accept (key, results), where: - - key - is its own key - - results - is an iterable of (key, value) pairs. - - A newly-launched DAGPool greenthread is entered immediately, and can - perform any necessary setup work. At some point it will iterate over the - (key, value) pairs from the passed 'results' iterable. Doing so blocks the - greenthread until a value is available for each of the keys specified in - its initial dependencies iterable. These (key, value) pairs are delivered - in chronological order, *not* the order in which they are initially - specified: each value will be delivered as soon as it becomes available. - - The value returned by a DAGPool greenthread becomes the value for its - key, which unblocks any other greenthreads waiting on that key. - - If a DAGPool greenthread terminates with an exception instead of returning - a value, attempting to retrieve the value raises :class:`PropagateError`, - which binds the key of the original greenthread and the original - exception. Unless the greenthread attempting to retrieve the value handles - PropagateError, that exception will in turn be wrapped in a PropagateError - of its own, and so forth. The code that ultimately handles PropagateError - can follow the chain of PropagateError.exc attributes to discover the flow - of that exception through the DAG of greenthreads. - - External greenthreads may also interact with a DAGPool. See :meth:`wait_each`, - :meth:`waitall`, :meth:`post`. - - It is not recommended to constrain external DAGPool producer greenthreads - in a :class:`GreenPool `: it may be hard to - provably avoid deadlock. - - .. automethod:: __init__ - .. automethod:: __getitem__ - """ - - _Coro = collections.namedtuple("_Coro", ("greenthread", "pending")) - - def __init__(self, preload={}): - """ - DAGPool can be prepopulated with an initial dict or iterable of (key, - value) pairs. These (key, value) pairs are of course immediately - available for any greenthread that depends on any of those keys. - """ - try: - # If a dict is passed, copy it. Don't risk a subsequent - # modification to passed dict affecting our internal state. - iteritems = six.iteritems(preload) - except AttributeError: - # Not a dict, just an iterable of (key, value) pairs - iteritems = preload - - # Load the initial dict - self.values = dict(iteritems) - - # track greenthreads - self.coros = {} - - # The key to blocking greenthreads is the Event. - self.event = Event() - - def waitall(self): - """ - waitall() blocks the calling greenthread until there is a value for - every DAGPool greenthread launched by :meth:`spawn`. It returns a dict - containing all :class:`preload data `, all data from - :meth:`post` and all values returned by spawned greenthreads. - - See also :meth:`wait`. - """ - # waitall() is an alias for compatibility with GreenPool - return self.wait() - - def wait(self, keys=_MISSING): - """ - *keys* is an optional iterable of keys. If you omit the argument, it - waits for all the keys from :class:`preload data `, from - :meth:`post` calls and from :meth:`spawn` calls: in other words, all - the keys of which this DAGPool is aware. - - wait() blocks the calling greenthread until all of the relevant keys - have values. wait() returns a dict whose keys are the relevant keys, - and whose values come from the *preload* data, from values returned by - DAGPool greenthreads or from :meth:`post` calls. - - If a DAGPool greenthread terminates with an exception, wait() will - raise :class:`PropagateError` wrapping that exception. If more than - one greenthread terminates with an exception, it is indeterminate - which one wait() will raise. - - If an external greenthread posts a :class:`PropagateError` instance, - wait() will raise that PropagateError. If more than one greenthread - posts PropagateError, it is indeterminate which one wait() will raise. - - See also :meth:`wait_each_success`, :meth:`wait_each_exception`. - """ - # This is mostly redundant with wait_each() functionality. - return dict(self.wait_each(keys)) - - def wait_each(self, keys=_MISSING): - """ - *keys* is an optional iterable of keys. If you omit the argument, it - waits for all the keys from :class:`preload data `, from - :meth:`post` calls and from :meth:`spawn` calls: in other words, all - the keys of which this DAGPool is aware. - - wait_each() is a generator producing (key, value) pairs as a value - becomes available for each requested key. wait_each() blocks the - calling greenthread until the next value becomes available. If the - DAGPool was prepopulated with values for any of the relevant keys, of - course those can be delivered immediately without waiting. - - Delivery order is intentionally decoupled from the initial sequence of - keys: each value is delivered as soon as it becomes available. If - multiple keys are available at the same time, wait_each() delivers - each of the ready ones in arbitrary order before blocking again. - - The DAGPool does not distinguish between a value returned by one of - its own greenthreads and one provided by a :meth:`post` call or *preload* data. - - The wait_each() generator terminates (raises StopIteration) when all - specified keys have been delivered. Thus, typical usage might be: - - :: - - for key, value in dagpool.wait_each(keys): - # process this ready key and value - # continue processing now that we've gotten values for all keys - - By implication, if you pass wait_each() an empty iterable of keys, it - returns immediately without yielding anything. - - If the value to be delivered is a :class:`PropagateError` exception object, the - generator raises that PropagateError instead of yielding it. - - See also :meth:`wait_each_success`, :meth:`wait_each_exception`. - """ - # Build a local set() and then call _wait_each(). - return self._wait_each(self._get_keyset_for_wait_each(keys)) - - def wait_each_success(self, keys=_MISSING): - """ - wait_each_success() filters results so that only success values are - yielded. In other words, unlike :meth:`wait_each`, wait_each_success() - will not raise :class:`PropagateError`. Not every provided (or - defaulted) key will necessarily be represented, though naturally the - generator will not finish until all have completed. - - In all other respects, wait_each_success() behaves like :meth:`wait_each`. - """ - for key, value in self._wait_each_raw(self._get_keyset_for_wait_each(keys)): - if not isinstance(value, PropagateError): - yield key, value - - def wait_each_exception(self, keys=_MISSING): - """ - wait_each_exception() filters results so that only exceptions are - yielded. Not every provided (or defaulted) key will necessarily be - represented, though naturally the generator will not finish until - all have completed. - - Unlike other DAGPool methods, wait_each_exception() simply yields - :class:`PropagateError` instances as values rather than raising them. - - In all other respects, wait_each_exception() behaves like :meth:`wait_each`. - """ - for key, value in self._wait_each_raw(self._get_keyset_for_wait_each(keys)): - if isinstance(value, PropagateError): - yield key, value - - def _get_keyset_for_wait_each(self, keys): - """ - wait_each(), wait_each_success() and wait_each_exception() promise - that if you pass an iterable of keys, the method will wait for results - from those keys -- but if you omit the keys argument, the method will - wait for results from all known keys. This helper implements that - distinction, returning a set() of the relevant keys. - """ - if keys is not _MISSING: - return set(keys) - else: - # keys arg omitted -- use all the keys we know about - return set(six.iterkeys(self.coros)) | set(six.iterkeys(self.values)) - - def _wait_each(self, pending): - """ - When _wait_each() encounters a value of PropagateError, it raises it. - - In all other respects, _wait_each() behaves like _wait_each_raw(). - """ - for key, value in self._wait_each_raw(pending): - yield key, self._value_or_raise(value) - - @staticmethod - def _value_or_raise(value): - # Most methods attempting to deliver PropagateError should raise that - # instead of simply returning it. - if isinstance(value, PropagateError): - raise value - return value - - def _wait_each_raw(self, pending): - """ - pending is a set() of keys for which we intend to wait. THIS SET WILL - BE DESTRUCTIVELY MODIFIED: as each key acquires a value, that key will - be removed from the passed 'pending' set. - - _wait_each_raw() does not treat a PropagateError instance specially: - it will be yielded to the caller like any other value. - - In all other respects, _wait_each_raw() behaves like wait_each(). - """ - while True: - # Before even waiting, show caller any (key, value) pairs that - # are already available. Copy 'pending' because we want to be able - # to remove items from the original set while iterating. - for key in pending.copy(): - value = self.values.get(key, _MISSING) - if value is not _MISSING: - # found one, it's no longer pending - pending.remove(key) - yield (key, value) - - if not pending: - # Once we've yielded all the caller's keys, done. - break - - # There are still more keys pending, so wait. - self.event.wait() - - def spawn(self, key, depends, function, *args, **kwds): - """ - Launch the passed *function(key, results, ...)* as a greenthread, - passing it: - - - the specified *key* - - an iterable of (key, value) pairs - - whatever other positional args or keywords you specify. - - Iterating over the *results* iterable behaves like calling - :meth:`wait_each(depends) `. - - Returning from *function()* behaves like - :meth:`post(key, return_value) `. - - If *function()* terminates with an exception, that exception is wrapped - in :class:`PropagateError` with the greenthread's *key* and (effectively) posted - as the value for that key. Attempting to retrieve that value will - raise that PropagateError. - - Thus, if the greenthread with key 'a' terminates with an exception, - and greenthread 'b' depends on 'a', when greenthread 'b' attempts to - iterate through its *results* argument, it will encounter - PropagateError. So by default, an uncaught exception will propagate - through all the downstream dependencies. - - If you pass :meth:`spawn` a key already passed to spawn() or :meth:`post`, spawn() - raises :class:`Collision`. - """ - if key in self.coros or key in self.values: - raise Collision(key) - - # The order is a bit tricky. First construct the set() of keys. - pending = set(depends) - # It's important that we pass to _wait_each() the same 'pending' set() - # that we store in self.coros for this key. The generator-iterator - # returned by _wait_each() becomes the function's 'results' iterable. - newcoro = greenthread.spawn(self._wrapper, function, key, - self._wait_each(pending), - *args, **kwds) - # Also capture the same (!) set in the new _Coro object for this key. - # We must be able to observe ready keys being removed from the set. - self.coros[key] = self._Coro(newcoro, pending) - - def _wrapper(self, function, key, results, *args, **kwds): - """ - This wrapper runs the top-level function in a DAGPool greenthread, - posting its return value (or PropagateError) to the DAGPool. - """ - try: - # call our passed function - result = function(key, results, *args, **kwds) - except Exception as err: - # Wrap any exception it may raise in a PropagateError. - result = PropagateError(key, err) - finally: - # function() has returned (or terminated with an exception). We no - # longer need to track this greenthread in self.coros. Remove it - # first so post() won't complain about a running greenthread. - del self.coros[key] - - try: - # as advertised, try to post() our return value - self.post(key, result) - except Collision: - # if we've already post()ed a result, oh well - pass - - # also, in case anyone cares... - return result - - def spawn_many(self, depends, function, *args, **kwds): - """ - spawn_many() accepts a single *function* whose parameters are the same - as for :meth:`spawn`. - - The difference is that spawn_many() accepts a dependency dict - *depends*. A new greenthread is spawned for each key in the dict. That - dict key's value should be an iterable of other keys on which this - greenthread depends. - - If the *depends* dict contains any key already passed to :meth:`spawn` - or :meth:`post`, spawn_many() raises :class:`Collision`. It is - indeterminate how many of the other keys in *depends* will have - successfully spawned greenthreads. - """ - # Iterate over 'depends' items, relying on self.spawn() not to - # context-switch so no one can modify 'depends' along the way. - for key, deps in six.iteritems(depends): - self.spawn(key, deps, function, *args, **kwds) - - def kill(self, key): - """ - Kill the greenthread that was spawned with the specified *key*. - - If no such greenthread was spawned, raise KeyError. - """ - # let KeyError, if any, propagate - self.coros[key].greenthread.kill() - # once killed, remove it - del self.coros[key] - - def post(self, key, value, replace=False): - """ - post(key, value) stores the passed *value* for the passed *key*. It - then causes each greenthread blocked on its results iterable, or on - :meth:`wait_each(keys) `, to check for new values. - A waiting greenthread might not literally resume on every single - post() of a relevant key, but the first post() of a relevant key - ensures that it will resume eventually, and when it does it will catch - up with all relevant post() calls. - - Calling post(key, value) when there is a running greenthread with that - same *key* raises :class:`Collision`. If you must post(key, value) instead of - letting the greenthread run to completion, you must first call - :meth:`kill(key) `. - - The DAGPool implicitly post()s the return value from each of its - greenthreads. But a greenthread may explicitly post() a value for its - own key, which will cause its return value to be discarded. - - Calling post(key, value, replace=False) (the default *replace*) when a - value for that key has already been posted, by any means, raises - :class:`Collision`. - - Calling post(key, value, replace=True) when a value for that key has - already been posted, by any means, replaces the previously-stored - value. However, that may make it complicated to reason about the - behavior of greenthreads waiting on that key. - - After a post(key, value1) followed by post(key, value2, replace=True), - it is unspecified which pending :meth:`wait_each([key...]) ` - calls (or greenthreads iterating over *results* involving that key) - will observe *value1* versus *value2*. It is guaranteed that - subsequent wait_each([key...]) calls (or greenthreads spawned after - that point) will observe *value2*. - - A successful call to - post(key, :class:`PropagateError(key, ExceptionSubclass) `) - ensures that any subsequent attempt to retrieve that key's value will - raise that PropagateError instance. - """ - # First, check if we're trying to post() to a key with a running - # greenthread. - # A DAGPool greenthread is explicitly permitted to post() to its - # OWN key. - coro = self.coros.get(key, _MISSING) - if coro is not _MISSING and coro.greenthread is not greenthread.getcurrent(): - # oh oh, trying to post a value for running greenthread from - # some other greenthread - raise Collision(key) - - # Here, either we're posting a value for a key with no greenthread or - # we're posting from that greenthread itself. - - # Has somebody already post()ed a value for this key? - # Unless replace == True, this is a problem. - if key in self.values and not replace: - raise Collision(key) - - # Either we've never before posted a value for this key, or we're - # posting with replace == True. - - # update our database - self.values[key] = value - # and wake up pending waiters - self.event.send() - # The comment in Event.reset() says: "it's better to create a new - # event rather than reset an old one". Okay, fine. We do want to be - # able to support new waiters, so create a new Event. - self.event = Event() - - def __getitem__(self, key): - """ - __getitem__(key) (aka dagpool[key]) blocks until *key* has a value, - then delivers that value. - """ - # This is a degenerate case of wait_each(). Construct a tuple - # containing only this 'key'. wait_each() will yield exactly one (key, - # value) pair. Return just its value. - for _, value in self.wait_each((key,)): - return value - - def get(self, key, default=None): - """ - get() returns the value for *key*. If *key* does not yet have a value, - get() returns *default*. - """ - return self._value_or_raise(self.values.get(key, default)) - - def keys(self): - """ - Return a snapshot tuple of keys for which we currently have values. - """ - # Explicitly return a copy rather than an iterator: don't assume our - # caller will finish iterating before new values are posted. - return tuple(six.iterkeys(self.values)) - - def items(self): - """ - Return a snapshot tuple of currently-available (key, value) pairs. - """ - # Don't assume our caller will finish iterating before new values are - # posted. - return tuple((key, self._value_or_raise(value)) - for key, value in six.iteritems(self.values)) - - def running(self): - """ - Return number of running DAGPool greenthreads. This includes - greenthreads blocked while iterating through their *results* iterable, - that is, greenthreads waiting on values from other keys. - """ - return len(self.coros) - - def running_keys(self): - """ - Return keys for running DAGPool greenthreads. This includes - greenthreads blocked while iterating through their *results* iterable, - that is, greenthreads waiting on values from other keys. - """ - # return snapshot; don't assume caller will finish iterating before we - # next modify self.coros - return tuple(six.iterkeys(self.coros)) - - def waiting(self): - """ - Return number of waiting DAGPool greenthreads, that is, greenthreads - still waiting on values from other keys. This explicitly does *not* - include external greenthreads waiting on :meth:`wait`, - :meth:`waitall`, :meth:`wait_each`. - """ - # n.b. if Event would provide a count of its waiters, we could say - # something about external greenthreads as well. - # The logic to determine this count is exactly the same as the general - # waiting_for() call. - return len(self.waiting_for()) - - # Use _MISSING instead of None as the default 'key' param so we can permit - # None as a supported key. - def waiting_for(self, key=_MISSING): - """ - waiting_for(key) returns a set() of the keys for which the DAGPool - greenthread spawned with that *key* is still waiting. If you pass a - *key* for which no greenthread was spawned, waiting_for() raises - KeyError. - - waiting_for() without argument returns a dict. Its keys are the keys - of DAGPool greenthreads still waiting on one or more values. In the - returned dict, the value of each such key is the set of other keys for - which that greenthread is still waiting. - - This method allows diagnosing a "hung" DAGPool. If certain - greenthreads are making no progress, it's possible that they are - waiting on keys for which there is no greenthread and no :meth:`post` data. - """ - # We may have greenthreads whose 'pending' entry indicates they're - # waiting on some keys even though values have now been posted for - # some or all of those keys, because those greenthreads have not yet - # regained control since values were posted. So make a point of - # excluding values that are now available. - available = set(six.iterkeys(self.values)) - - if key is not _MISSING: - # waiting_for(key) is semantically different than waiting_for(). - # It's just that they both seem to want the same method name. - coro = self.coros.get(key, _MISSING) - if coro is _MISSING: - # Hmm, no running greenthread with this key. But was there - # EVER a greenthread with this key? If not, let KeyError - # propagate. - self.values[key] - # Oh good, there's a value for this key. Either the - # greenthread finished, or somebody posted a value. Just say - # the greenthread isn't waiting for anything. - return set() - else: - # coro is the _Coro for the running greenthread with the - # specified key. - return coro.pending - available - - # This is a waiting_for() call, i.e. a general query rather than for a - # specific key. - - # Start by iterating over (key, coro) pairs in self.coros. Generate - # (key, pending) pairs in which 'pending' is the set of keys on which - # the greenthread believes it's waiting, minus the set of keys that - # are now available. Filter out any pair in which 'pending' is empty, - # that is, that greenthread will be unblocked next time it resumes. - # Make a dict from those pairs. - return dict((key, pending) - for key, pending in ((key, (coro.pending - available)) - for key, coro in six.iteritems(self.coros)) - if pending) diff --git a/venv/lib/python3.6/site-packages/eventlet/db_pool.py b/venv/lib/python3.6/site-packages/eventlet/db_pool.py deleted file mode 100644 index 301c28c..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/db_pool.py +++ /dev/null @@ -1,461 +0,0 @@ -from __future__ import print_function - -from collections import deque -from contextlib import contextmanager -import sys -import time - -from eventlet.pools import Pool -from eventlet import timeout -from eventlet import hubs -from eventlet.hubs.timer import Timer -from eventlet.greenthread import GreenThread - - -_MISSING = object() - - -class ConnectTimeout(Exception): - pass - - -def cleanup_rollback(conn): - conn.rollback() - - -class BaseConnectionPool(Pool): - def __init__(self, db_module, - min_size=0, max_size=4, - max_idle=10, max_age=30, - connect_timeout=5, - cleanup=cleanup_rollback, - *args, **kwargs): - """ - Constructs a pool with at least *min_size* connections and at most - *max_size* connections. Uses *db_module* to construct new connections. - - The *max_idle* parameter determines how long pooled connections can - remain idle, in seconds. After *max_idle* seconds have elapsed - without the connection being used, the pool closes the connection. - - *max_age* is how long any particular connection is allowed to live. - Connections that have been open for longer than *max_age* seconds are - closed, regardless of idle time. If *max_age* is 0, all connections are - closed on return to the pool, reducing it to a concurrency limiter. - - *connect_timeout* is the duration in seconds that the pool will wait - before timing out on connect() to the database. If triggered, the - timeout will raise a ConnectTimeout from get(). - - The remainder of the arguments are used as parameters to the - *db_module*'s connection constructor. - """ - assert(db_module) - self._db_module = db_module - self._args = args - self._kwargs = kwargs - self.max_idle = max_idle - self.max_age = max_age - self.connect_timeout = connect_timeout - self._expiration_timer = None - self.cleanup = cleanup - super(BaseConnectionPool, self).__init__(min_size=min_size, - max_size=max_size, - order_as_stack=True) - - def _schedule_expiration(self): - """Sets up a timer that will call _expire_old_connections when the - oldest connection currently in the free pool is ready to expire. This - is the earliest possible time that a connection could expire, thus, the - timer will be running as infrequently as possible without missing a - possible expiration. - - If this function is called when a timer is already scheduled, it does - nothing. - - If max_age or max_idle is 0, _schedule_expiration likewise does nothing. - """ - if self.max_age is 0 or self.max_idle is 0: - # expiration is unnecessary because all connections will be expired - # on put - return - - if (self._expiration_timer is not None - and not getattr(self._expiration_timer, 'called', False)): - # the next timer is already scheduled - return - - try: - now = time.time() - self._expire_old_connections(now) - # the last item in the list, because of the stack ordering, - # is going to be the most-idle - idle_delay = (self.free_items[-1][0] - now) + self.max_idle - oldest = min([t[1] for t in self.free_items]) - age_delay = (oldest - now) + self.max_age - - next_delay = min(idle_delay, age_delay) - except (IndexError, ValueError): - # no free items, unschedule ourselves - self._expiration_timer = None - return - - if next_delay > 0: - # set up a continuous self-calling loop - self._expiration_timer = Timer(next_delay, GreenThread(hubs.get_hub().greenlet).switch, - self._schedule_expiration, [], {}) - self._expiration_timer.schedule() - - def _expire_old_connections(self, now): - """Iterates through the open connections contained in the pool, closing - ones that have remained idle for longer than max_idle seconds, or have - been in existence for longer than max_age seconds. - - *now* is the current time, as returned by time.time(). - """ - original_count = len(self.free_items) - expired = [ - conn - for last_used, created_at, conn in self.free_items - if self._is_expired(now, last_used, created_at)] - - new_free = [ - (last_used, created_at, conn) - for last_used, created_at, conn in self.free_items - if not self._is_expired(now, last_used, created_at)] - self.free_items.clear() - self.free_items.extend(new_free) - - # adjust the current size counter to account for expired - # connections - self.current_size -= original_count - len(self.free_items) - - for conn in expired: - self._safe_close(conn, quiet=True) - - def _is_expired(self, now, last_used, created_at): - """Returns true and closes the connection if it's expired. - """ - if (self.max_idle <= 0 or self.max_age <= 0 - or now - last_used > self.max_idle - or now - created_at > self.max_age): - return True - return False - - def _unwrap_connection(self, conn): - """If the connection was wrapped by a subclass of - BaseConnectionWrapper and is still functional (as determined - by the __nonzero__, or __bool__ in python3, method), returns - the unwrapped connection. If anything goes wrong with this - process, returns None. - """ - base = None - try: - if conn: - base = conn._base - conn._destroy() - else: - base = None - except AttributeError: - pass - return base - - def _safe_close(self, conn, quiet=False): - """Closes the (already unwrapped) connection, squelching any - exceptions. - """ - try: - conn.close() - except AttributeError: - pass # conn is None, or junk - except Exception: - if not quiet: - print("Connection.close raised: %s" % (sys.exc_info()[1])) - - def get(self): - conn = super(BaseConnectionPool, self).get() - - # None is a flag value that means that put got called with - # something it couldn't use - if conn is None: - try: - conn = self.create() - except Exception: - # unconditionally increase the free pool because - # even if there are waiters, doing a full put - # would incur a greenlib switch and thus lose the - # exception stack - self.current_size -= 1 - raise - - # if the call to get() draws from the free pool, it will come - # back as a tuple - if isinstance(conn, tuple): - _last_used, created_at, conn = conn - else: - created_at = time.time() - - # wrap the connection so the consumer can call close() safely - wrapped = PooledConnectionWrapper(conn, self) - # annotating the wrapper so that when it gets put in the pool - # again, we'll know how old it is - wrapped._db_pool_created_at = created_at - return wrapped - - def put(self, conn, cleanup=_MISSING): - created_at = getattr(conn, '_db_pool_created_at', 0) - now = time.time() - conn = self._unwrap_connection(conn) - - if self._is_expired(now, now, created_at): - self._safe_close(conn, quiet=False) - conn = None - elif cleanup is not None: - if cleanup is _MISSING: - cleanup = self.cleanup - # by default, call rollback in case the connection is in the middle - # of a transaction. However, rollback has performance implications - # so optionally do nothing or call something else like ping - try: - if conn: - cleanup(conn) - except Exception as e: - # we don't care what the exception was, we just know the - # connection is dead - print("WARNING: cleanup %s raised: %s" % (cleanup, e)) - conn = None - except: - conn = None - raise - - if conn is not None: - super(BaseConnectionPool, self).put((now, created_at, conn)) - else: - # wake up any waiters with a flag value that indicates - # they need to manufacture a connection - if self.waiting() > 0: - super(BaseConnectionPool, self).put(None) - else: - # no waiters -- just change the size - self.current_size -= 1 - self._schedule_expiration() - - @contextmanager - def item(self, cleanup=_MISSING): - conn = self.get() - try: - yield conn - finally: - self.put(conn, cleanup=cleanup) - - def clear(self): - """Close all connections that this pool still holds a reference to, - and removes all references to them. - """ - if self._expiration_timer: - self._expiration_timer.cancel() - free_items, self.free_items = self.free_items, deque() - for item in free_items: - # Free items created using min_size>0 are not tuples. - conn = item[2] if isinstance(item, tuple) else item - self._safe_close(conn, quiet=True) - self.current_size -= 1 - - def __del__(self): - self.clear() - - -class TpooledConnectionPool(BaseConnectionPool): - """A pool which gives out :class:`~eventlet.tpool.Proxy`-based database - connections. - """ - - def create(self): - now = time.time() - return now, now, self.connect( - self._db_module, self.connect_timeout, *self._args, **self._kwargs) - - @classmethod - def connect(cls, db_module, connect_timeout, *args, **kw): - t = timeout.Timeout(connect_timeout, ConnectTimeout()) - try: - from eventlet import tpool - conn = tpool.execute(db_module.connect, *args, **kw) - return tpool.Proxy(conn, autowrap_names=('cursor',)) - finally: - t.cancel() - - -class RawConnectionPool(BaseConnectionPool): - """A pool which gives out plain database connections. - """ - - def create(self): - now = time.time() - return now, now, self.connect( - self._db_module, self.connect_timeout, *self._args, **self._kwargs) - - @classmethod - def connect(cls, db_module, connect_timeout, *args, **kw): - t = timeout.Timeout(connect_timeout, ConnectTimeout()) - try: - return db_module.connect(*args, **kw) - finally: - t.cancel() - - -# default connection pool is the tpool one -ConnectionPool = TpooledConnectionPool - - -class GenericConnectionWrapper(object): - def __init__(self, baseconn): - self._base = baseconn - - # Proxy all method calls to self._base - # FIXME: remove repetition; options to consider: - # * for name in (...): - # setattr(class, name, lambda self, *a, **kw: getattr(self._base, name)(*a, **kw)) - # * def __getattr__(self, name): if name in (...): return getattr(self._base, name) - # * other? - def __enter__(self): - return self._base.__enter__() - - def __exit__(self, exc, value, tb): - return self._base.__exit__(exc, value, tb) - - def __repr__(self): - return self._base.__repr__() - - _proxy_funcs = ( - 'affected_rows', - 'autocommit', - 'begin', - 'change_user', - 'character_set_name', - 'close', - 'commit', - 'cursor', - 'dump_debug_info', - 'errno', - 'error', - 'errorhandler', - 'insert_id', - 'literal', - 'ping', - 'query', - 'rollback', - 'select_db', - 'server_capabilities', - 'set_character_set', - 'set_isolation_level', - 'set_server_option', - 'set_sql_mode', - 'show_warnings', - 'shutdown', - 'sqlstate', - 'stat', - 'store_result', - 'string_literal', - 'thread_id', - 'use_result', - 'warning_count', - ) -for _proxy_fun in GenericConnectionWrapper._proxy_funcs: - # excess wrapper for early binding (closure by value) - def _wrapper(_proxy_fun=_proxy_fun): - def _proxy_method(self, *args, **kwargs): - return getattr(self._base, _proxy_fun)(*args, **kwargs) - _proxy_method.func_name = _proxy_fun - _proxy_method.__name__ = _proxy_fun - _proxy_method.__qualname__ = 'GenericConnectionWrapper.' + _proxy_fun - return _proxy_method - setattr(GenericConnectionWrapper, _proxy_fun, _wrapper(_proxy_fun)) -del GenericConnectionWrapper._proxy_funcs -del _proxy_fun -del _wrapper - - -class PooledConnectionWrapper(GenericConnectionWrapper): - """A connection wrapper where: - - the close method returns the connection to the pool instead of closing it directly - - ``bool(conn)`` returns a reasonable value - - returns itself to the pool if it gets garbage collected - """ - - def __init__(self, baseconn, pool): - super(PooledConnectionWrapper, self).__init__(baseconn) - self._pool = pool - - def __nonzero__(self): - return (hasattr(self, '_base') and bool(self._base)) - - __bool__ = __nonzero__ - - def _destroy(self): - self._pool = None - try: - del self._base - except AttributeError: - pass - - def close(self): - """Return the connection to the pool, and remove the - reference to it so that you can't use it again through this - wrapper object. - """ - if self and self._pool: - self._pool.put(self) - self._destroy() - - def __del__(self): - return # this causes some issues if __del__ is called in the - # main coroutine, so for now this is disabled - # self.close() - - -class DatabaseConnector(object): - """ - This is an object which will maintain a collection of database - connection pools on a per-host basis. - """ - - def __init__(self, module, credentials, - conn_pool=None, *args, **kwargs): - """constructor - *module* - Database module to use. - *credentials* - Mapping of hostname to connect arguments (e.g. username and password) - """ - assert(module) - self._conn_pool_class = conn_pool - if self._conn_pool_class is None: - self._conn_pool_class = ConnectionPool - self._module = module - self._args = args - self._kwargs = kwargs - # this is a map of hostname to username/password - self._credentials = credentials - self._databases = {} - - def credentials_for(self, host): - if host in self._credentials: - return self._credentials[host] - else: - return self._credentials.get('default', None) - - def get(self, host, dbname): - """Returns a ConnectionPool to the target host and schema. - """ - key = (host, dbname) - if key not in self._databases: - new_kwargs = self._kwargs.copy() - new_kwargs['db'] = dbname - new_kwargs['host'] = host - new_kwargs.update(self.credentials_for(host)) - dbpool = self._conn_pool_class( - self._module, *self._args, **new_kwargs) - self._databases[key] = dbpool - - return self._databases[key] diff --git a/venv/lib/python3.6/site-packages/eventlet/debug.py b/venv/lib/python3.6/site-packages/eventlet/debug.py deleted file mode 100644 index 6481aea..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/debug.py +++ /dev/null @@ -1,174 +0,0 @@ -"""The debug module contains utilities and functions for better -debugging Eventlet-powered applications.""" -from __future__ import print_function - -import os -import sys -import linecache -import re -import inspect - -__all__ = ['spew', 'unspew', 'format_hub_listeners', 'format_hub_timers', - 'hub_listener_stacks', 'hub_exceptions', 'tpool_exceptions', - 'hub_prevent_multiple_readers', 'hub_timer_stacks', - 'hub_blocking_detection'] - -_token_splitter = re.compile('\W+') - - -class Spew(object): - - def __init__(self, trace_names=None, show_values=True): - self.trace_names = trace_names - self.show_values = show_values - - def __call__(self, frame, event, arg): - if event == 'line': - lineno = frame.f_lineno - if '__file__' in frame.f_globals: - filename = frame.f_globals['__file__'] - if (filename.endswith('.pyc') or - filename.endswith('.pyo')): - filename = filename[:-1] - name = frame.f_globals['__name__'] - line = linecache.getline(filename, lineno) - else: - name = '[unknown]' - try: - src = inspect.getsourcelines(frame) - line = src[lineno] - except IOError: - line = 'Unknown code named [%s]. VM instruction #%d' % ( - frame.f_code.co_name, frame.f_lasti) - if self.trace_names is None or name in self.trace_names: - print('%s:%s: %s' % (name, lineno, line.rstrip())) - if not self.show_values: - return self - details = [] - tokens = _token_splitter.split(line) - for tok in tokens: - if tok in frame.f_globals: - details.append('%s=%r' % (tok, frame.f_globals[tok])) - if tok in frame.f_locals: - details.append('%s=%r' % (tok, frame.f_locals[tok])) - if details: - print("\t%s" % ' '.join(details)) - return self - - -def spew(trace_names=None, show_values=False): - """Install a trace hook which writes incredibly detailed logs - about what code is being executed to stdout. - """ - sys.settrace(Spew(trace_names, show_values)) - - -def unspew(): - """Remove the trace hook installed by spew. - """ - sys.settrace(None) - - -def format_hub_listeners(): - """ Returns a formatted string of the current listeners on the current - hub. This can be useful in determining what's going on in the event system, - especially when used in conjunction with :func:`hub_listener_stacks`. - """ - from eventlet import hubs - hub = hubs.get_hub() - result = ['READERS:'] - for l in hub.get_readers(): - result.append(repr(l)) - result.append('WRITERS:') - for l in hub.get_writers(): - result.append(repr(l)) - return os.linesep.join(result) - - -def format_hub_timers(): - """ Returns a formatted string of the current timers on the current - hub. This can be useful in determining what's going on in the event system, - especially when used in conjunction with :func:`hub_timer_stacks`. - """ - from eventlet import hubs - hub = hubs.get_hub() - result = ['TIMERS:'] - for l in hub.timers: - result.append(repr(l)) - return os.linesep.join(result) - - -def hub_listener_stacks(state=False): - """Toggles whether or not the hub records the stack when clients register - listeners on file descriptors. This can be useful when trying to figure - out what the hub is up to at any given moment. To inspect the stacks - of the current listeners, call :func:`format_hub_listeners` at critical - junctures in the application logic. - """ - from eventlet import hubs - hubs.get_hub().set_debug_listeners(state) - - -def hub_timer_stacks(state=False): - """Toggles whether or not the hub records the stack when timers are set. - To inspect the stacks of the current timers, call :func:`format_hub_timers` - at critical junctures in the application logic. - """ - from eventlet.hubs import timer - timer._g_debug = state - - -def hub_prevent_multiple_readers(state=True): - """Toggle prevention of multiple greenlets reading from a socket - - When multiple greenlets read from the same socket it is often hard - to predict which greenlet will receive what data. To achieve - resource sharing consider using ``eventlet.pools.Pool`` instead. - - But if you really know what you are doing you can change the state - to ``False`` to stop the hub from protecting against this mistake. - """ - from eventlet.hubs import hub - hub.g_prevent_multiple_readers = state - - -def hub_exceptions(state=True): - """Toggles whether the hub prints exceptions that are raised from its - timers. This can be useful to see how greenthreads are terminating. - """ - from eventlet import hubs - hubs.get_hub().set_timer_exceptions(state) - from eventlet import greenpool - greenpool.DEBUG = state - - -def tpool_exceptions(state=False): - """Toggles whether tpool itself prints exceptions that are raised from - functions that are executed in it, in addition to raising them like - it normally does.""" - from eventlet import tpool - tpool.QUIET = not state - - -def hub_blocking_detection(state=False, resolution=1): - """Toggles whether Eventlet makes an effort to detect blocking - behavior in an application. - - It does this by telling the kernel to raise a SIGALARM after a - short timeout, and clearing the timeout every time the hub - greenlet is resumed. Therefore, any code that runs for a long - time without yielding to the hub will get interrupted by the - blocking detector (don't use it in production!). - - The *resolution* argument governs how long the SIGALARM timeout - waits in seconds. The implementation uses :func:`signal.setitimer` - and can be specified as a floating-point value. - The shorter the resolution, the greater the chance of false - positives. - """ - from eventlet import hubs - assert resolution > 0 - hubs.get_hub().debug_blocking = state - hubs.get_hub().debug_blocking_resolution = resolution - if not state: - hubs.get_hub().block_detect_post() diff --git a/venv/lib/python3.6/site-packages/eventlet/event.py b/venv/lib/python3.6/site-packages/eventlet/event.py deleted file mode 100644 index 6ab455f..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/event.py +++ /dev/null @@ -1,220 +0,0 @@ -from __future__ import print_function - -from eventlet import hubs -from eventlet.support import greenlets as greenlet - -__all__ = ['Event'] - - -class NOT_USED: - def __repr__(self): - return 'NOT_USED' - -NOT_USED = NOT_USED() - - -class Event(object): - """An abstraction where an arbitrary number of coroutines - can wait for one event from another. - - Events are similar to a Queue that can only hold one item, but differ - in two important ways: - - 1. calling :meth:`send` never unschedules the current greenthread - 2. :meth:`send` can only be called once; create a new event to send again. - - They are good for communicating results between coroutines, and - are the basis for how - :meth:`GreenThread.wait() ` - is implemented. - - >>> from eventlet import event - >>> import eventlet - >>> evt = event.Event() - >>> def baz(b): - ... evt.send(b + 1) - ... - >>> _ = eventlet.spawn_n(baz, 3) - >>> evt.wait() - 4 - """ - _result = None - _exc = None - - def __init__(self): - self._waiters = set() - self.reset() - - def __str__(self): - params = (self.__class__.__name__, hex(id(self)), - self._result, self._exc, len(self._waiters)) - return '<%s at %s result=%r _exc=%r _waiters[%d]>' % params - - def reset(self): - # this is kind of a misfeature and doesn't work perfectly well, - # it's better to create a new event rather than reset an old one - # removing documentation so that we don't get new use cases for it - assert self._result is not NOT_USED, 'Trying to re-reset() a fresh event.' - self._result = NOT_USED - self._exc = None - - def ready(self): - """ Return true if the :meth:`wait` call will return immediately. - Used to avoid waiting for things that might take a while to time out. - For example, you can put a bunch of events into a list, and then visit - them all repeatedly, calling :meth:`ready` until one returns ``True``, - and then you can :meth:`wait` on that one.""" - return self._result is not NOT_USED - - def has_exception(self): - return self._exc is not None - - def has_result(self): - return self._result is not NOT_USED and self._exc is None - - def poll(self, notready=None): - if self.ready(): - return self.wait() - return notready - - # QQQ make it return tuple (type, value, tb) instead of raising - # because - # 1) "poll" does not imply raising - # 2) it's better not to screw up caller's sys.exc_info() by default - # (e.g. if caller wants to calls the function in except or finally) - def poll_exception(self, notready=None): - if self.has_exception(): - return self.wait() - return notready - - def poll_result(self, notready=None): - if self.has_result(): - return self.wait() - return notready - - def wait(self, timeout=None): - """Wait until another coroutine calls :meth:`send`. - Returns the value the other coroutine passed to :meth:`send`. - - >>> import eventlet - >>> evt = eventlet.Event() - >>> def wait_on(): - ... retval = evt.wait() - ... print("waited for {0}".format(retval)) - >>> _ = eventlet.spawn(wait_on) - >>> evt.send('result') - >>> eventlet.sleep(0) - waited for result - - Returns immediately if the event has already occurred. - - >>> evt.wait() - 'result' - - When the timeout argument is present and not None, it should be a floating point number - specifying a timeout for the operation in seconds (or fractions thereof). - """ - current = greenlet.getcurrent() - if self._result is NOT_USED: - hub = hubs.get_hub() - self._waiters.add(current) - timer = None - if timeout is not None: - timer = hub.schedule_call_local(timeout, self._do_send, None, None, current) - try: - result = hub.switch() - if timer is not None: - timer.cancel() - return result - finally: - self._waiters.discard(current) - if self._exc is not None: - current.throw(*self._exc) - return self._result - - def send(self, result=None, exc=None): - """Makes arrangements for the waiters to be woken with the - result and then returns immediately to the parent. - - >>> from eventlet import event - >>> import eventlet - >>> evt = event.Event() - >>> def waiter(): - ... print('about to wait') - ... result = evt.wait() - ... print('waited for {0}'.format(result)) - >>> _ = eventlet.spawn(waiter) - >>> eventlet.sleep(0) - about to wait - >>> evt.send('a') - >>> eventlet.sleep(0) - waited for a - - It is an error to call :meth:`send` multiple times on the same event. - - >>> evt.send('whoops') - Traceback (most recent call last): - ... - AssertionError: Trying to re-send() an already-triggered event. - - Use :meth:`reset` between :meth:`send` s to reuse an event object. - """ - assert self._result is NOT_USED, 'Trying to re-send() an already-triggered event.' - self._result = result - if exc is not None and not isinstance(exc, tuple): - exc = (exc, ) - self._exc = exc - hub = hubs.get_hub() - for waiter in self._waiters: - hub.schedule_call_global( - 0, self._do_send, self._result, self._exc, waiter) - - def _do_send(self, result, exc, waiter): - if waiter in self._waiters: - if exc is None: - waiter.switch(result) - else: - waiter.throw(*exc) - - def send_exception(self, *args): - """Same as :meth:`send`, but sends an exception to waiters. - - The arguments to send_exception are the same as the arguments - to ``raise``. If a single exception object is passed in, it - will be re-raised when :meth:`wait` is called, generating a - new stacktrace. - - >>> from eventlet import event - >>> evt = event.Event() - >>> evt.send_exception(RuntimeError()) - >>> evt.wait() - Traceback (most recent call last): - File "", line 1, in - File "eventlet/event.py", line 120, in wait - current.throw(*self._exc) - RuntimeError - - If it's important to preserve the entire original stack trace, - you must pass in the entire :func:`sys.exc_info` tuple. - - >>> import sys - >>> evt = event.Event() - >>> try: - ... raise RuntimeError() - ... except RuntimeError: - ... evt.send_exception(*sys.exc_info()) - ... - >>> evt.wait() - Traceback (most recent call last): - File "", line 1, in - File "eventlet/event.py", line 120, in wait - current.throw(*self._exc) - File "", line 2, in - RuntimeError - - Note that doing so stores a traceback object directly on the - Event object, which may cause reference cycles. See the - :func:`sys.exc_info` documentation. - """ - # the arguments and the same as for greenlet.throw - return self.send(None, args) diff --git a/venv/lib/python3.6/site-packages/eventlet/green/BaseHTTPServer.py b/venv/lib/python3.6/site-packages/eventlet/green/BaseHTTPServer.py deleted file mode 100644 index 493efd2..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/BaseHTTPServer.py +++ /dev/null @@ -1,16 +0,0 @@ -from eventlet import patcher -from eventlet.green import socket -from eventlet.green import SocketServer -import six - -patcher.inject( - 'BaseHTTPServer' if six.PY2 else 'http.server', - globals(), - ('socket', socket), - ('SocketServer', SocketServer), - ('socketserver', SocketServer)) - -del patcher - -if __name__ == '__main__': - test() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/CGIHTTPServer.py b/venv/lib/python3.6/site-packages/eventlet/green/CGIHTTPServer.py deleted file mode 100644 index c384db5..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/CGIHTTPServer.py +++ /dev/null @@ -1,19 +0,0 @@ -from eventlet import patcher -from eventlet.green import BaseHTTPServer -from eventlet.green import SimpleHTTPServer -from eventlet.green import urllib -from eventlet.green import select - -test = None # bind prior to patcher.inject to silence pyflakes warning below -patcher.inject( - 'CGIHTTPServer', - globals(), - ('BaseHTTPServer', BaseHTTPServer), - ('SimpleHTTPServer', SimpleHTTPServer), - ('urllib', urllib), - ('select', select)) - -del patcher - -if __name__ == '__main__': - test() # pyflakes false alarm here unless test = None above diff --git a/venv/lib/python3.6/site-packages/eventlet/green/MySQLdb.py b/venv/lib/python3.6/site-packages/eventlet/green/MySQLdb.py deleted file mode 100644 index 3593542..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/MySQLdb.py +++ /dev/null @@ -1,37 +0,0 @@ -__MySQLdb = __import__('MySQLdb') - -__all__ = __MySQLdb.__all__ -__patched__ = ["connect", "Connect", 'Connection', 'connections'] - -from eventlet.patcher import slurp_properties -slurp_properties( - __MySQLdb, globals(), - ignore=__patched__, srckeys=dir(__MySQLdb)) - -from eventlet import tpool - -__orig_connections = __import__('MySQLdb.connections').connections - - -def Connection(*args, **kw): - conn = tpool.execute(__orig_connections.Connection, *args, **kw) - return tpool.Proxy(conn, autowrap_names=('cursor',)) -connect = Connect = Connection - - -# replicate the MySQLdb.connections module but with a tpooled Connection factory -class MySQLdbConnectionsModule(object): - pass - -connections = MySQLdbConnectionsModule() -for var in dir(__orig_connections): - if not var.startswith('__'): - setattr(connections, var, getattr(__orig_connections, var)) -connections.Connection = Connection - -cursors = __import__('MySQLdb.cursors').cursors -converters = __import__('MySQLdb.converters').converters - -# TODO support instantiating cursors.FooCursor objects directly -# TODO though this is a low priority, it would be nice if we supported -# subclassing eventlet.green.MySQLdb.connections.Connection diff --git a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/SSL.py b/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/SSL.py deleted file mode 100644 index f534cea..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/SSL.py +++ /dev/null @@ -1,124 +0,0 @@ -from OpenSSL import SSL as orig_SSL -from OpenSSL.SSL import * -from eventlet.support import get_errno -from eventlet import greenio -from eventlet.hubs import trampoline -import socket - - -class GreenConnection(greenio.GreenSocket): - """ Nonblocking wrapper for SSL.Connection objects. - """ - - def __init__(self, ctx, sock=None): - if sock is not None: - fd = orig_SSL.Connection(ctx, sock) - else: - # if we're given a Connection object directly, use it; - # this is used in the inherited accept() method - fd = ctx - super(ConnectionType, self).__init__(fd) - - def do_handshake(self): - """ Perform an SSL handshake (usually called after renegotiate or one of - set_accept_state or set_accept_state). This can raise the same exceptions as - send and recv. """ - if self.act_non_blocking: - return self.fd.do_handshake() - while True: - try: - return self.fd.do_handshake() - except WantReadError: - trampoline(self.fd.fileno(), - read=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - except WantWriteError: - trampoline(self.fd.fileno(), - write=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - - def dup(self): - raise NotImplementedError("Dup not supported on SSL sockets") - - def makefile(self, mode='r', bufsize=-1): - raise NotImplementedError("Makefile not supported on SSL sockets") - - def read(self, size): - """Works like a blocking call to SSL_read(), whose behavior is - described here: http://www.openssl.org/docs/ssl/SSL_read.html""" - if self.act_non_blocking: - return self.fd.read(size) - while True: - try: - return self.fd.read(size) - except WantReadError: - trampoline(self.fd.fileno(), - read=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - except WantWriteError: - trampoline(self.fd.fileno(), - write=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - except SysCallError as e: - if get_errno(e) == -1 or get_errno(e) > 0: - return '' - - recv = read - - def write(self, data): - """Works like a blocking call to SSL_write(), whose behavior is - described here: http://www.openssl.org/docs/ssl/SSL_write.html""" - if not data: - return 0 # calling SSL_write() with 0 bytes to be sent is undefined - if self.act_non_blocking: - return self.fd.write(data) - while True: - try: - return self.fd.write(data) - except WantReadError: - trampoline(self.fd.fileno(), - read=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - except WantWriteError: - trampoline(self.fd.fileno(), - write=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - - send = write - - def sendall(self, data): - """Send "all" data on the connection. This calls send() repeatedly until - all data is sent. If an error occurs, it's impossible to tell how much data - has been sent. - - No return value.""" - tail = self.send(data) - while tail < len(data): - tail += self.send(data[tail:]) - - def shutdown(self): - if self.act_non_blocking: - return self.fd.shutdown() - while True: - try: - return self.fd.shutdown() - except WantReadError: - trampoline(self.fd.fileno(), - read=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - except WantWriteError: - trampoline(self.fd.fileno(), - write=True, - timeout=self.gettimeout(), - timeout_exc=socket.timeout) - -Connection = ConnectionType = GreenConnection - -del greenio diff --git a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/__init__.py b/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/__init__.py deleted file mode 100644 index d861476..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from . import crypto -from . import SSL -from . import tsafe -from .version import __version__ diff --git a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/crypto.py b/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/crypto.py deleted file mode 100644 index 0a57f6f..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/crypto.py +++ /dev/null @@ -1 +0,0 @@ -from OpenSSL.crypto import * diff --git a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/tsafe.py b/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/tsafe.py deleted file mode 100644 index dd0dd8c..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/tsafe.py +++ /dev/null @@ -1 +0,0 @@ -from OpenSSL.tsafe import * diff --git a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/version.py b/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/version.py deleted file mode 100644 index c886ef0..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/OpenSSL/version.py +++ /dev/null @@ -1 +0,0 @@ -from OpenSSL.version import __version__, __doc__ diff --git a/venv/lib/python3.6/site-packages/eventlet/green/Queue.py b/venv/lib/python3.6/site-packages/eventlet/green/Queue.py deleted file mode 100644 index f999c3b..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/Queue.py +++ /dev/null @@ -1,32 +0,0 @@ -from eventlet import queue - -__all__ = ['Empty', 'Full', 'LifoQueue', 'PriorityQueue', 'Queue'] - -__patched__ = ['LifoQueue', 'PriorityQueue', 'Queue'] - -# these classes exist to paper over the major operational difference between -# eventlet.queue.Queue and the stdlib equivalents - - -class Queue(queue.Queue): - def __init__(self, maxsize=0): - if maxsize == 0: - maxsize = None - super(Queue, self).__init__(maxsize) - - -class PriorityQueue(queue.PriorityQueue): - def __init__(self, maxsize=0): - if maxsize == 0: - maxsize = None - super(PriorityQueue, self).__init__(maxsize) - - -class LifoQueue(queue.LifoQueue): - def __init__(self, maxsize=0): - if maxsize == 0: - maxsize = None - super(LifoQueue, self).__init__(maxsize) - -Empty = queue.Empty -Full = queue.Full diff --git a/venv/lib/python3.6/site-packages/eventlet/green/SimpleHTTPServer.py b/venv/lib/python3.6/site-packages/eventlet/green/SimpleHTTPServer.py deleted file mode 100644 index 89d8b28..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/SimpleHTTPServer.py +++ /dev/null @@ -1,14 +0,0 @@ -from eventlet import patcher -from eventlet.green import BaseHTTPServer -from eventlet.green import urllib - -patcher.inject( - 'SimpleHTTPServer', - globals(), - ('BaseHTTPServer', BaseHTTPServer), - ('urllib', urllib)) - -del patcher - -if __name__ == '__main__': - test() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/SocketServer.py b/venv/lib/python3.6/site-packages/eventlet/green/SocketServer.py deleted file mode 100644 index 17a4d43..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/SocketServer.py +++ /dev/null @@ -1,15 +0,0 @@ -from eventlet import patcher - -from eventlet.green import socket -from eventlet.green import select -from eventlet.green import threading -import six - -patcher.inject( - 'SocketServer' if six.PY2 else 'socketserver', - globals(), - ('socket', socket), - ('select', select), - ('threading', threading)) - -# QQQ ForkingMixIn should be fixed to use green waitpid? diff --git a/venv/lib/python3.6/site-packages/eventlet/green/__init__.py b/venv/lib/python3.6/site-packages/eventlet/green/__init__.py deleted file mode 100644 index d965325..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# this package contains modules from the standard library converted to use eventlet diff --git a/venv/lib/python3.6/site-packages/eventlet/green/_socket_nodns.py b/venv/lib/python3.6/site-packages/eventlet/green/_socket_nodns.py deleted file mode 100644 index 7dca20a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/_socket_nodns.py +++ /dev/null @@ -1,33 +0,0 @@ -__socket = __import__('socket') - -__all__ = __socket.__all__ -__patched__ = ['fromfd', 'socketpair', 'ssl', 'socket', 'timeout'] - -import eventlet.patcher -eventlet.patcher.slurp_properties(__socket, globals(), ignore=__patched__, srckeys=dir(__socket)) - -os = __import__('os') -import sys -from eventlet import greenio - - -socket = greenio.GreenSocket -_GLOBAL_DEFAULT_TIMEOUT = greenio._GLOBAL_DEFAULT_TIMEOUT -timeout = greenio.socket_timeout - -try: - __original_fromfd__ = __socket.fromfd - - def fromfd(*args): - return socket(__original_fromfd__(*args)) -except AttributeError: - pass - -try: - __original_socketpair__ = __socket.socketpair - - def socketpair(*args): - one, two = __original_socketpair__(*args) - return socket(one), socket(two) -except AttributeError: - pass diff --git a/venv/lib/python3.6/site-packages/eventlet/green/asynchat.py b/venv/lib/python3.6/site-packages/eventlet/green/asynchat.py deleted file mode 100644 index e074749..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/asynchat.py +++ /dev/null @@ -1,11 +0,0 @@ -from eventlet import patcher -from eventlet.green import asyncore -from eventlet.green import socket - -patcher.inject( - 'asynchat', - globals(), - ('asyncore', asyncore), - ('socket', socket)) - -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/asyncore.py b/venv/lib/python3.6/site-packages/eventlet/green/asyncore.py deleted file mode 100644 index 6a5f797..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/asyncore.py +++ /dev/null @@ -1,13 +0,0 @@ -from eventlet import patcher -from eventlet.green import select -from eventlet.green import socket -from eventlet.green import time - -patcher.inject( - "asyncore", - globals(), - ('select', select), - ('socket', socket), - ('time', time)) - -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/builtin.py b/venv/lib/python3.6/site-packages/eventlet/green/builtin.py deleted file mode 100644 index 3dd2c76..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/builtin.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -In order to detect a filehandle that's been closed, our only clue may be -the operating system returning the same filehandle in response to some -other operation. - -The builtins 'file' and 'open' are patched to collaborate with the -notify_opened protocol. -""" - -builtins_orig = __builtins__ - -from eventlet import hubs -from eventlet.hubs import hub -from eventlet.patcher import slurp_properties -import sys - -__all__ = dir(builtins_orig) -__patched__ = ['file', 'open'] - -slurp_properties(builtins_orig, globals(), - ignore=__patched__, srckeys=dir(builtins_orig)) - -hubs.get_hub() - -__original_file = file - - -class file(__original_file): - def __init__(self, *args, **kwargs): - super(file, self).__init__(*args, **kwargs) - hubs.notify_opened(self.fileno()) - -__original_open = open -__opening = False - - -def open(*args): - global __opening - result = __original_open(*args) - if not __opening: - # This is incredibly ugly. 'open' is used under the hood by - # the import process. So, ensure we don't wind up in an - # infinite loop. - __opening = True - hubs.notify_opened(result.fileno()) - __opening = False - return result diff --git a/venv/lib/python3.6/site-packages/eventlet/green/ftplib.py b/venv/lib/python3.6/site-packages/eventlet/green/ftplib.py deleted file mode 100644 index b452e1d..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/ftplib.py +++ /dev/null @@ -1,13 +0,0 @@ -from eventlet import patcher - -# *NOTE: there might be some funny business with the "SOCKS" module -# if it even still exists -from eventlet.green import socket - -patcher.inject('ftplib', globals(), ('socket', socket)) - -del patcher - -# Run test program when run as a script -if __name__ == '__main__': - test() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/http/__init__.py b/venv/lib/python3.6/site-packages/eventlet/green/http/__init__.py deleted file mode 100644 index 2e86175..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/http/__init__.py +++ /dev/null @@ -1,191 +0,0 @@ -# This is part of Python source code with Eventlet-specific modifications. -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved" are retained in Python alone or in any derivative version prepared by -# Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -import six -assert six.PY3, 'This is a Python 3 module' - -from enum import IntEnum - -__all__ = ['HTTPStatus'] - -class HTTPStatus(IntEnum): - """HTTP status codes and reason phrases - - Status codes from the following RFCs are all observed: - - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - """ - def __new__(cls, value, phrase, description=''): - obj = int.__new__(cls, value) - obj._value_ = value - - obj.phrase = phrase - obj.description = description - return obj - - # informational - CONTINUE = 100, 'Continue', 'Request received, please continue' - SWITCHING_PROTOCOLS = (101, 'Switching Protocols', - 'Switching to new protocol; obey Upgrade header') - PROCESSING = 102, 'Processing' - - # success - OK = 200, 'OK', 'Request fulfilled, document follows' - CREATED = 201, 'Created', 'Document created, URL follows' - ACCEPTED = (202, 'Accepted', - 'Request accepted, processing continues off-line') - NON_AUTHORITATIVE_INFORMATION = (203, - 'Non-Authoritative Information', 'Request fulfilled from cache') - NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows' - RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input' - PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows' - MULTI_STATUS = 207, 'Multi-Status' - ALREADY_REPORTED = 208, 'Already Reported' - IM_USED = 226, 'IM Used' - - # redirection - MULTIPLE_CHOICES = (300, 'Multiple Choices', - 'Object has several resources -- see URI list') - MOVED_PERMANENTLY = (301, 'Moved Permanently', - 'Object moved permanently -- see URI list') - FOUND = 302, 'Found', 'Object moved temporarily -- see URI list' - SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list' - NOT_MODIFIED = (304, 'Not Modified', - 'Document has not changed since given time') - USE_PROXY = (305, 'Use Proxy', - 'You must use proxy specified in Location to access this resource') - TEMPORARY_REDIRECT = (307, 'Temporary Redirect', - 'Object moved temporarily -- see URI list') - PERMANENT_REDIRECT = (308, 'Permanent Redirect', - 'Object moved temporarily -- see URI list') - - # client error - BAD_REQUEST = (400, 'Bad Request', - 'Bad request syntax or unsupported method') - UNAUTHORIZED = (401, 'Unauthorized', - 'No permission -- see authorization schemes') - PAYMENT_REQUIRED = (402, 'Payment Required', - 'No payment -- see charging schemes') - FORBIDDEN = (403, 'Forbidden', - 'Request forbidden -- authorization will not help') - NOT_FOUND = (404, 'Not Found', - 'Nothing matches the given URI') - METHOD_NOT_ALLOWED = (405, 'Method Not Allowed', - 'Specified method is invalid for this resource') - NOT_ACCEPTABLE = (406, 'Not Acceptable', - 'URI not available in preferred format') - PROXY_AUTHENTICATION_REQUIRED = (407, - 'Proxy Authentication Required', - 'You must authenticate with this proxy before proceeding') - REQUEST_TIMEOUT = (408, 'Request Timeout', - 'Request timed out; try again later') - CONFLICT = 409, 'Conflict', 'Request conflict' - GONE = (410, 'Gone', - 'URI no longer exists and has been permanently removed') - LENGTH_REQUIRED = (411, 'Length Required', - 'Client must specify Content-Length') - PRECONDITION_FAILED = (412, 'Precondition Failed', - 'Precondition in headers is false') - REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large', - 'Entity is too large') - REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long', - 'URI is too long') - UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type', - 'Entity body in unsupported format') - REQUESTED_RANGE_NOT_SATISFIABLE = (416, - 'Requested Range Not Satisfiable', - 'Cannot satisfy request range') - EXPECTATION_FAILED = (417, 'Expectation Failed', - 'Expect condition could not be satisfied') - UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity' - LOCKED = 423, 'Locked' - FAILED_DEPENDENCY = 424, 'Failed Dependency' - UPGRADE_REQUIRED = 426, 'Upgrade Required' - PRECONDITION_REQUIRED = (428, 'Precondition Required', - 'The origin server requires the request to be conditional') - TOO_MANY_REQUESTS = (429, 'Too Many Requests', - 'The user has sent too many requests in ' - 'a given amount of time ("rate limiting")') - REQUEST_HEADER_FIELDS_TOO_LARGE = (431, - 'Request Header Fields Too Large', - 'The server is unwilling to process the request because its header ' - 'fields are too large') - - # server errors - INTERNAL_SERVER_ERROR = (500, 'Internal Server Error', - 'Server got itself in trouble') - NOT_IMPLEMENTED = (501, 'Not Implemented', - 'Server does not support this operation') - BAD_GATEWAY = (502, 'Bad Gateway', - 'Invalid responses from another server/proxy') - SERVICE_UNAVAILABLE = (503, 'Service Unavailable', - 'The server cannot process the request due to a high load') - GATEWAY_TIMEOUT = (504, 'Gateway Timeout', - 'The gateway server did not receive a timely response') - HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported', - 'Cannot fulfill request') - VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates' - INSUFFICIENT_STORAGE = 507, 'Insufficient Storage' - LOOP_DETECTED = 508, 'Loop Detected' - NOT_EXTENDED = 510, 'Not Extended' - NETWORK_AUTHENTICATION_REQUIRED = (511, - 'Network Authentication Required', - 'The client needs to authenticate to gain network access') diff --git a/venv/lib/python3.6/site-packages/eventlet/green/http/client.py b/venv/lib/python3.6/site-packages/eventlet/green/http/client.py deleted file mode 100644 index e4bd2ad..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/http/client.py +++ /dev/null @@ -1,1567 +0,0 @@ -# This is part of Python source code with Eventlet-specific modifications. -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved" are retained in Python alone or in any derivative version prepared by -# Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -"""HTTP/1.1 client library - - - - -HTTPConnection goes through a number of "states", which define when a client -may legally make another request or fetch the response for a particular -request. This diagram details these state transitions: - - (null) - | - | HTTPConnection() - v - Idle - | - | putrequest() - v - Request-started - | - | ( putheader() )* endheaders() - v - Request-sent - |\_____________________________ - | | getresponse() raises - | response = getresponse() | ConnectionError - v v - Unread-response Idle - [Response-headers-read] - |\____________________ - | | - | response.read() | putrequest() - v v - Idle Req-started-unread-response - ______/| - / | - response.read() | | ( putheader() )* endheaders() - v v - Request-started Req-sent-unread-response - | - | response.read() - v - Request-sent - -This diagram presents the following rules: - -- a second request may not be started until {response-headers-read} - -- a response [object] cannot be retrieved until {request-sent} - -- there is no differentiation between an unread response body and a - partially read response body - -Note: this enforcement is applied by the HTTPConnection class. The - HTTPResponse class does not enforce this state machine, which - implies sophisticated clients may accelerate the request/response - pipeline. Caution should be taken, though: accelerating the states - beyond the above pattern may imply knowledge of the server's - connection-close behavior for certain requests. For example, it - is impossible to tell whether the server will close the connection - UNTIL the response headers have been read; this means that further - requests cannot be placed into the pipeline until it is known that - the server will NOT be closing the connection. - -Logical State __state __response -------------- ------- ---------- -Idle _CS_IDLE None -Request-started _CS_REQ_STARTED None -Request-sent _CS_REQ_SENT None -Unread-response _CS_IDLE -Req-started-unread-response _CS_REQ_STARTED -Req-sent-unread-response _CS_REQ_SENT -""" - -import email.parser -import email.message -import io -import re -import collections -from urllib.parse import urlsplit - -from eventlet.green import http, os, socket - -# HTTPMessage, parse_headers(), and the HTTP status code constants are -# intentionally omitted for simplicity -__all__ = ["HTTPResponse", "HTTPConnection", - "HTTPException", "NotConnected", "UnknownProtocol", - "UnknownTransferEncoding", "UnimplementedFileMode", - "IncompleteRead", "InvalidURL", "ImproperConnectionState", - "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", - "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error", - "responses"] - -HTTP_PORT = 80 -HTTPS_PORT = 443 - -_UNKNOWN = 'UNKNOWN' - -# connection states -_CS_IDLE = 'Idle' -_CS_REQ_STARTED = 'Request-started' -_CS_REQ_SENT = 'Request-sent' - - -# hack to maintain backwards compatibility -globals().update(http.HTTPStatus.__members__) - -# another hack to maintain backwards compatibility -# Mapping status codes to official W3C names -responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()} - -# maximal amount of data to read at one time in _safe_read -MAXAMOUNT = 1048576 - -# maximal line length when calling readline(). -_MAXLINE = 65536 -_MAXHEADERS = 100 - -# Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) -# -# VCHAR = %x21-7E -# obs-text = %x80-FF -# header-field = field-name ":" OWS field-value OWS -# field-name = token -# field-value = *( field-content / obs-fold ) -# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] -# field-vchar = VCHAR / obs-text -# -# obs-fold = CRLF 1*( SP / HTAB ) -# ; obsolete line folding -# ; see Section 3.2.4 - -# token = 1*tchar -# -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" -# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" -# / DIGIT / ALPHA -# ; any VCHAR, except delimiters -# -# VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1 - -# the patterns for both name and value are more leniant than RFC -# definitions to allow for backwards compatibility -# Eventlet change: match used instead of fullmatch for Python 3.3 compatibility -_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*\Z').match -_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search - -# We always set the Content-Length header for these methods because some -# servers will otherwise respond with a 411 -_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} - - -def _encode(data, name='data'): - """Call data.encode("latin-1") but show a better error message.""" - try: - return data.encode("latin-1") - except UnicodeEncodeError as err: - raise UnicodeEncodeError( - err.encoding, - err.object, - err.start, - err.end, - "%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') " - "if you want to send it encoded in UTF-8." % - (name.title(), data[err.start:err.end], name)) from None - - -class HTTPMessage(email.message.Message): - # XXX The only usage of this method is in - # http.server.CGIHTTPRequestHandler. Maybe move the code there so - # that it doesn't need to be part of the public API. The API has - # never been defined so this could cause backwards compatibility - # issues. - - def getallmatchingheaders(self, name): - """Find all header lines matching a given header name. - - Look through the list of headers and find all lines matching a given - header name (and their continuation lines). A list of the lines is - returned, without interpretation. If the header does not occur, an - empty list is returned. If the header occurs multiple times, all - occurrences are returned. Case is not important in the header name. - - """ - name = name.lower() + ':' - n = len(name) - lst = [] - hit = 0 - for line in self.keys(): - if line[:n].lower() == name: - hit = 1 - elif not line[:1].isspace(): - hit = 0 - if hit: - lst.append(line) - return lst - -def parse_headers(fp, _class=HTTPMessage): - """Parses only RFC2822 headers from a file pointer. - - email Parser wants to see strings rather than bytes. - But a TextIOWrapper around self.rfile would buffer too many bytes - from the stream, bytes which we later need to read as bytes. - So we read the correct bytes here, as bytes, for email Parser - to parse. - - """ - headers = [] - while True: - line = fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("header line") - headers.append(line) - if len(headers) > _MAXHEADERS: - raise HTTPException("got more than %d headers" % _MAXHEADERS) - if line in (b'\r\n', b'\n', b''): - break - hstring = b''.join(headers).decode('iso-8859-1') - return email.parser.Parser(_class=_class).parsestr(hstring) - - -class HTTPResponse(io.BufferedIOBase): - - # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. - - # The bytes from the socket object are iso-8859-1 strings. - # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded - # text following RFC 2047. The basic status line parsing only - # accepts iso-8859-1. - - def __init__(self, sock, debuglevel=0, method=None, url=None): - # If the response includes a content-length header, we need to - # make sure that the client doesn't read more than the - # specified number of bytes. If it does, it will block until - # the server times out and closes the connection. This will - # happen if a self.fp.read() is done (without a size) whether - # self.fp is buffered or not. So, no self.fp.read() by - # clients unless they know what they are doing. - self.fp = sock.makefile("rb") - self.debuglevel = debuglevel - self._method = method - - # The HTTPResponse object is returned via urllib. The clients - # of http and urllib expect different attributes for the - # headers. headers is used here and supports urllib. msg is - # provided as a backwards compatibility layer for http - # clients. - - self.headers = self.msg = None - - # from the Status-Line of the response - self.version = _UNKNOWN # HTTP-Version - self.status = _UNKNOWN # Status-Code - self.reason = _UNKNOWN # Reason-Phrase - - self.chunked = _UNKNOWN # is "chunked" being used? - self.chunk_left = _UNKNOWN # bytes left to read in current chunk - self.length = _UNKNOWN # number of bytes left in response - self.will_close = _UNKNOWN # conn will close at end of response - - def _read_status(self): - line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") - if len(line) > _MAXLINE: - raise LineTooLong("status line") - if self.debuglevel > 0: - print("reply:", repr(line)) - if not line: - # Presumably, the server closed the connection before - # sending a valid response. - raise RemoteDisconnected("Remote end closed connection without" - " response") - try: - version, status, reason = line.split(None, 2) - except ValueError: - try: - version, status = line.split(None, 1) - reason = "" - except ValueError: - # empty version will cause next test to fail. - version = "" - if not version.startswith("HTTP/"): - self._close_conn() - raise BadStatusLine(line) - - # The status code is a three-digit number - try: - status = int(status) - if status < 100 or status > 999: - raise BadStatusLine(line) - except ValueError: - raise BadStatusLine(line) - return version, status, reason - - def begin(self): - if self.headers is not None: - # we've already started reading the response - return - - # read until we get a non-100 response - while True: - version, status, reason = self._read_status() - if status != CONTINUE: - break - # skip the header from the 100 response - while True: - skip = self.fp.readline(_MAXLINE + 1) - if len(skip) > _MAXLINE: - raise LineTooLong("header line") - skip = skip.strip() - if not skip: - break - if self.debuglevel > 0: - print("header:", skip) - - self.code = self.status = status - self.reason = reason.strip() - if version in ("HTTP/1.0", "HTTP/0.9"): - # Some servers might still return "0.9", treat it as 1.0 anyway - self.version = 10 - elif version.startswith("HTTP/1."): - self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 - else: - raise UnknownProtocol(version) - - self.headers = self.msg = parse_headers(self.fp) - - if self.debuglevel > 0: - for hdr in self.headers: - print("header:", hdr, end=" ") - - # are we using the chunked-style of transfer encoding? - tr_enc = self.headers.get("transfer-encoding") - if tr_enc and tr_enc.lower() == "chunked": - self.chunked = True - self.chunk_left = None - else: - self.chunked = False - - # will the connection close at the end of the response? - self.will_close = self._check_close() - - # do we have a Content-Length? - # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" - self.length = None - length = self.headers.get("content-length") - - # are we using the chunked-style of transfer encoding? - tr_enc = self.headers.get("transfer-encoding") - if length and not self.chunked: - try: - self.length = int(length) - except ValueError: - self.length = None - else: - if self.length < 0: # ignore nonsensical negative lengths - self.length = None - else: - self.length = None - - # does the body have a fixed length? (of zero) - if (status == NO_CONTENT or status == NOT_MODIFIED or - 100 <= status < 200 or # 1xx codes - self._method == "HEAD"): - self.length = 0 - - # if the connection remains open, and we aren't using chunked, and - # a content-length was not provided, then assume that the connection - # WILL close. - if (not self.will_close and - not self.chunked and - self.length is None): - self.will_close = True - - def _check_close(self): - conn = self.headers.get("connection") - if self.version == 11: - # An HTTP/1.1 proxy is assumed to stay open unless - # explicitly closed. - conn = self.headers.get("connection") - if conn and "close" in conn.lower(): - return True - return False - - # Some HTTP/1.0 implementations have support for persistent - # connections, using rules different than HTTP/1.1. - - # For older HTTP, Keep-Alive indicates persistent connection. - if self.headers.get("keep-alive"): - return False - - # At least Akamai returns a "Connection: Keep-Alive" header, - # which was supposed to be sent by the client. - if conn and "keep-alive" in conn.lower(): - return False - - # Proxy-Connection is a netscape hack. - pconn = self.headers.get("proxy-connection") - if pconn and "keep-alive" in pconn.lower(): - return False - - # otherwise, assume it will close - return True - - def _close_conn(self): - fp = self.fp - self.fp = None - fp.close() - - def close(self): - try: - super().close() # set "closed" flag - finally: - if self.fp: - self._close_conn() - - # These implementations are for the benefit of io.BufferedReader. - - # XXX This class should probably be revised to act more like - # the "raw stream" that BufferedReader expects. - - def flush(self): - super().flush() - if self.fp: - self.fp.flush() - - def readable(self): - """Always returns True""" - return True - - # End of "raw stream" methods - - def isclosed(self): - """True if the connection is closed.""" - # NOTE: it is possible that we will not ever call self.close(). This - # case occurs when will_close is TRUE, length is None, and we - # read up to the last byte, but NOT past it. - # - # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be - # called, meaning self.isclosed() is meaningful. - return self.fp is None - - def read(self, amt=None): - if self.fp is None: - return b"" - - if self._method == "HEAD": - self._close_conn() - return b"" - - if amt is not None: - # Amount is given, implement using readinto - b = bytearray(amt) - n = self.readinto(b) - return memoryview(b)[:n].tobytes() - else: - # Amount is not given (unbounded read) so we must check self.length - # and self.chunked - - if self.chunked: - return self._readall_chunked() - - if self.length is None: - s = self.fp.read() - else: - try: - s = self._safe_read(self.length) - except IncompleteRead: - self._close_conn() - raise - self.length = 0 - self._close_conn() # we read everything - return s - - def readinto(self, b): - """Read up to len(b) bytes into bytearray b and return the number - of bytes read. - """ - - if self.fp is None: - return 0 - - if self._method == "HEAD": - self._close_conn() - return 0 - - if self.chunked: - return self._readinto_chunked(b) - - if self.length is not None: - if len(b) > self.length: - # clip the read to the "end of response" - b = memoryview(b)[0:self.length] - - # we do not use _safe_read() here because this may be a .will_close - # connection, and the user is reading more bytes than will be provided - # (for example, reading in 1k chunks) - n = self.fp.readinto(b) - if not n and b: - # Ideally, we would raise IncompleteRead if the content-length - # wasn't satisfied, but it might break compatibility. - self._close_conn() - elif self.length is not None: - self.length -= n - if not self.length: - self._close_conn() - return n - - def _read_next_chunk_size(self): - # Read the next chunk size from the file - line = self.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("chunk size") - i = line.find(b";") - if i >= 0: - line = line[:i] # strip chunk-extensions - try: - return int(line, 16) - except ValueError: - # close the connection as protocol synchronisation is - # probably lost - self._close_conn() - raise - - def _read_and_discard_trailer(self): - # read and discard trailer up to the CRLF terminator - ### note: we shouldn't have any trailers! - while True: - line = self.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("trailer line") - if not line: - # a vanishingly small number of sites EOF without - # sending the trailer - break - if line in (b'\r\n', b'\n', b''): - break - - def _get_chunk_left(self): - # return self.chunk_left, reading a new chunk if necessary. - # chunk_left == 0: at the end of the current chunk, need to close it - # chunk_left == None: No current chunk, should read next. - # This function returns non-zero or None if the last chunk has - # been read. - chunk_left = self.chunk_left - if not chunk_left: # Can be 0 or None - if chunk_left is not None: - # We are at the end of chunk. dicard chunk end - self._safe_read(2) # toss the CRLF at the end of the chunk - try: - chunk_left = self._read_next_chunk_size() - except ValueError: - raise IncompleteRead(b'') - if chunk_left == 0: - # last chunk: 1*("0") [ chunk-extension ] CRLF - self._read_and_discard_trailer() - # we read everything; close the "file" - self._close_conn() - chunk_left = None - self.chunk_left = chunk_left - return chunk_left - - def _readall_chunked(self): - assert self.chunked != _UNKNOWN - value = [] - try: - while True: - chunk_left = self._get_chunk_left() - if chunk_left is None: - break - value.append(self._safe_read(chunk_left)) - self.chunk_left = 0 - return b''.join(value) - except IncompleteRead: - raise IncompleteRead(b''.join(value)) - - def _readinto_chunked(self, b): - assert self.chunked != _UNKNOWN - total_bytes = 0 - mvb = memoryview(b) - try: - while True: - chunk_left = self._get_chunk_left() - if chunk_left is None: - return total_bytes - - if len(mvb) <= chunk_left: - n = self._safe_readinto(mvb) - self.chunk_left = chunk_left - n - return total_bytes + n - - temp_mvb = mvb[:chunk_left] - n = self._safe_readinto(temp_mvb) - mvb = mvb[n:] - total_bytes += n - self.chunk_left = 0 - - except IncompleteRead: - raise IncompleteRead(bytes(b[0:total_bytes])) - - def _safe_read(self, amt): - """Read the number of bytes requested, compensating for partial reads. - - Normally, we have a blocking socket, but a read() can be interrupted - by a signal (resulting in a partial read). - - Note that we cannot distinguish between EOF and an interrupt when zero - bytes have been read. IncompleteRead() will be raised in this - situation. - - This function should be used when bytes "should" be present for - reading. If the bytes are truly not available (due to EOF), then the - IncompleteRead exception can be used to detect the problem. - """ - s = [] - while amt > 0: - chunk = self.fp.read(min(amt, MAXAMOUNT)) - if not chunk: - raise IncompleteRead(b''.join(s), amt) - s.append(chunk) - amt -= len(chunk) - return b"".join(s) - - def _safe_readinto(self, b): - """Same as _safe_read, but for reading into a buffer.""" - total_bytes = 0 - mvb = memoryview(b) - while total_bytes < len(b): - if MAXAMOUNT < len(mvb): - temp_mvb = mvb[0:MAXAMOUNT] - n = self.fp.readinto(temp_mvb) - else: - n = self.fp.readinto(mvb) - if not n: - raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) - mvb = mvb[n:] - total_bytes += n - return total_bytes - - def read1(self, n=-1): - """Read with at most one underlying system call. If at least one - byte is buffered, return that instead. - """ - if self.fp is None or self._method == "HEAD": - return b"" - if self.chunked: - return self._read1_chunked(n) - if self.length is not None and (n < 0 or n > self.length): - n = self.length - try: - result = self.fp.read1(n) - except ValueError: - if n >= 0: - raise - # some implementations, like BufferedReader, don't support -1 - # Read an arbitrarily selected largeish chunk. - result = self.fp.read1(16*1024) - if not result and n: - self._close_conn() - elif self.length is not None: - self.length -= len(result) - return result - - def peek(self, n=-1): - # Having this enables IOBase.readline() to read more than one - # byte at a time - if self.fp is None or self._method == "HEAD": - return b"" - if self.chunked: - return self._peek_chunked(n) - return self.fp.peek(n) - - def readline(self, limit=-1): - if self.fp is None or self._method == "HEAD": - return b"" - if self.chunked: - # Fallback to IOBase readline which uses peek() and read() - return super().readline(limit) - if self.length is not None and (limit < 0 or limit > self.length): - limit = self.length - result = self.fp.readline(limit) - if not result and limit: - self._close_conn() - elif self.length is not None: - self.length -= len(result) - return result - - def _read1_chunked(self, n): - # Strictly speaking, _get_chunk_left() may cause more than one read, - # but that is ok, since that is to satisfy the chunked protocol. - chunk_left = self._get_chunk_left() - if chunk_left is None or n == 0: - return b'' - if not (0 <= n <= chunk_left): - n = chunk_left # if n is negative or larger than chunk_left - read = self.fp.read1(n) - self.chunk_left -= len(read) - if not read: - raise IncompleteRead(b"") - return read - - def _peek_chunked(self, n): - # Strictly speaking, _get_chunk_left() may cause more than one read, - # but that is ok, since that is to satisfy the chunked protocol. - try: - chunk_left = self._get_chunk_left() - except IncompleteRead: - return b'' # peek doesn't worry about protocol - if chunk_left is None: - return b'' # eof - # peek is allowed to return more than requested. Just request the - # entire chunk, and truncate what we get. - return self.fp.peek(chunk_left)[:chunk_left] - - def fileno(self): - return self.fp.fileno() - - def getheader(self, name, default=None): - '''Returns the value of the header matching *name*. - - If there are multiple matching headers, the values are - combined into a single string separated by commas and spaces. - - If no matching header is found, returns *default* or None if - the *default* is not specified. - - If the headers are unknown, raises http.client.ResponseNotReady. - - ''' - if self.headers is None: - raise ResponseNotReady() - headers = self.headers.get_all(name) or default - if isinstance(headers, str) or not hasattr(headers, '__iter__'): - return headers - else: - return ', '.join(headers) - - def getheaders(self): - """Return list of (header, value) tuples.""" - if self.headers is None: - raise ResponseNotReady() - return list(self.headers.items()) - - # We override IOBase.__iter__ so that it doesn't check for closed-ness - - def __iter__(self): - return self - - # For compatibility with old-style urllib responses. - - def info(self): - '''Returns an instance of the class mimetools.Message containing - meta-information associated with the URL. - - When the method is HTTP, these headers are those returned by - the server at the head of the retrieved HTML page (including - Content-Length and Content-Type). - - When the method is FTP, a Content-Length header will be - present if (as is now usual) the server passed back a file - length in response to the FTP retrieval request. A - Content-Type header will be present if the MIME type can be - guessed. - - When the method is local-file, returned headers will include - a Date representing the file's last-modified time, a - Content-Length giving file size, and a Content-Type - containing a guess at the file's type. See also the - description of the mimetools module. - - ''' - return self.headers - - def geturl(self): - '''Return the real URL of the page. - - In some cases, the HTTP server redirects a client to another - URL. The urlopen() function handles this transparently, but in - some cases the caller needs to know which URL the client was - redirected to. The geturl() method can be used to get at this - redirected URL. - - ''' - return self.url - - def getcode(self): - '''Return the HTTP status code that was sent with the response, - or None if the URL is not an HTTP URL. - - ''' - return self.status - -class HTTPConnection: - - _http_vsn = 11 - _http_vsn_str = 'HTTP/1.1' - - response_class = HTTPResponse - default_port = HTTP_PORT - auto_open = 1 - debuglevel = 0 - - @staticmethod - def _is_textIO(stream): - """Test whether a file-like object is a text or a binary stream. - """ - return isinstance(stream, io.TextIOBase) - - @staticmethod - def _get_content_length(body, method): - """Get the content-length based on the body. - - If the body is None, we set Content-Length: 0 for methods that expect - a body (RFC 7230, Section 3.3.2). We also set the Content-Length for - any method if the body is a str or bytes-like object and not a file. - """ - if body is None: - # do an explicit check for not None here to distinguish - # between unset and set but empty - if method.upper() in _METHODS_EXPECTING_BODY: - return 0 - else: - return None - - if hasattr(body, 'read'): - # file-like object. - return None - - try: - # does it implement the buffer protocol (bytes, bytearray, array)? - mv = memoryview(body) - return mv.nbytes - except TypeError: - pass - - if isinstance(body, str): - return len(body) - - return None - - def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None): - self.timeout = timeout - self.source_address = source_address - self.sock = None - self._buffer = [] - self.__response = None - self.__state = _CS_IDLE - self._method = None - self._tunnel_host = None - self._tunnel_port = None - self._tunnel_headers = {} - - (self.host, self.port) = self._get_hostport(host, port) - - # This is stored as an instance variable to allow unit - # tests to replace it with a suitable mockup - self._create_connection = socket.create_connection - - def set_tunnel(self, host, port=None, headers=None): - """Set up host and port for HTTP CONNECT tunnelling. - - In a connection that uses HTTP CONNECT tunneling, the host passed to the - constructor is used as a proxy server that relays all communication to - the endpoint passed to `set_tunnel`. This done by sending an HTTP - CONNECT request to the proxy server when the connection is established. - - This method must be called before the HTML connection has been - established. - - The headers argument should be a mapping of extra HTTP headers to send - with the CONNECT request. - """ - - if self.sock: - raise RuntimeError("Can't set up tunnel for established connection") - - self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) - if headers: - self._tunnel_headers = headers - else: - self._tunnel_headers.clear() - - def _get_hostport(self, host, port): - if port is None: - i = host.rfind(':') - j = host.rfind(']') # ipv6 addresses have [...] - if i > j: - try: - port = int(host[i+1:]) - except ValueError: - if host[i+1:] == "": # http://foo.com:/ == http://foo.com/ - port = self.default_port - else: - raise InvalidURL("nonnumeric port: '%s'" % host[i+1:]) - host = host[:i] - else: - port = self.default_port - if host and host[0] == '[' and host[-1] == ']': - host = host[1:-1] - - return (host, port) - - def set_debuglevel(self, level): - self.debuglevel = level - - def _tunnel(self): - connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host, - self._tunnel_port) - connect_bytes = connect_str.encode("ascii") - self.send(connect_bytes) - for header, value in self._tunnel_headers.items(): - header_str = "%s: %s\r\n" % (header, value) - header_bytes = header_str.encode("latin-1") - self.send(header_bytes) - self.send(b'\r\n') - - response = self.response_class(self.sock, method=self._method) - (version, code, message) = response._read_status() - - if code != http.HTTPStatus.OK: - self.close() - raise OSError("Tunnel connection failed: %d %s" % (code, - message.strip())) - while True: - line = response.fp.readline(_MAXLINE + 1) - if len(line) > _MAXLINE: - raise LineTooLong("header line") - if not line: - # for sites which EOF without sending a trailer - break - if line in (b'\r\n', b'\n', b''): - break - - if self.debuglevel > 0: - print('header:', line.decode()) - - def connect(self): - """Connect to the host and port specified in __init__.""" - self.sock = self._create_connection( - (self.host,self.port), self.timeout, self.source_address) - self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - - if self._tunnel_host: - self._tunnel() - - def close(self): - """Close the connection to the HTTP server.""" - self.__state = _CS_IDLE - try: - sock = self.sock - if sock: - self.sock = None - sock.close() # close it manually... there may be other refs - finally: - response = self.__response - if response: - self.__response = None - response.close() - - def send(self, data): - """Send `data' to the server. - ``data`` can be a string object, a bytes object, an array object, a - file-like object that supports a .read() method, or an iterable object. - """ - - if self.sock is None: - if self.auto_open: - self.connect() - else: - raise NotConnected() - - if self.debuglevel > 0: - print("send:", repr(data)) - blocksize = 8192 - if hasattr(data, "read") : - if self.debuglevel > 0: - print("sendIng a read()able") - encode = False - try: - mode = data.mode - except AttributeError: - # io.BytesIO and other file-like objects don't have a `mode` - # attribute. - pass - else: - if "b" not in mode: - encode = True - if self.debuglevel > 0: - print("encoding file using iso-8859-1") - while 1: - datablock = data.read(blocksize) - if not datablock: - break - if encode: - datablock = datablock.encode("iso-8859-1") - self.sock.sendall(datablock) - return - try: - self.sock.sendall(data) - except TypeError: - if isinstance(data, collections.Iterable): - for d in data: - self.sock.sendall(d) - else: - raise TypeError("data should be a bytes-like object " - "or an iterable, got %r" % type(data)) - - def _output(self, s): - """Add a line of output to the current request buffer. - - Assumes that the line does *not* end with \\r\\n. - """ - self._buffer.append(s) - - def _read_readable(self, readable): - blocksize = 8192 - if self.debuglevel > 0: - print("sendIng a read()able") - encode = self._is_textIO(readable) - if encode and self.debuglevel > 0: - print("encoding file using iso-8859-1") - while True: - datablock = readable.read(blocksize) - if not datablock: - break - if encode: - datablock = datablock.encode("iso-8859-1") - yield datablock - - def _send_output(self, message_body=None, encode_chunked=False): - """Send the currently buffered request and clear the buffer. - - Appends an extra \\r\\n to the buffer. - A message_body may be specified, to be appended to the request. - """ - self._buffer.extend((b"", b"")) - msg = b"\r\n".join(self._buffer) - del self._buffer[:] - self.send(msg) - - if message_body is not None: - - # create a consistent interface to message_body - if hasattr(message_body, 'read'): - # Let file-like take precedence over byte-like. This - # is needed to allow the current position of mmap'ed - # files to be taken into account. - chunks = self._read_readable(message_body) - else: - try: - # this is solely to check to see if message_body - # implements the buffer API. it /would/ be easier - # to capture if PyObject_CheckBuffer was exposed - # to Python. - memoryview(message_body) - except TypeError: - try: - chunks = iter(message_body) - except TypeError: - raise TypeError("message_body should be a bytes-like " - "object or an iterable, got %r" - % type(message_body)) - else: - # the object implements the buffer interface and - # can be passed directly into socket methods - chunks = (message_body,) - - for chunk in chunks: - if not chunk: - if self.debuglevel > 0: - print('Zero length chunk ignored') - continue - - if encode_chunked and self._http_vsn == 11: - # chunked encoding - chunk = '{0:X}\r\n'.format(len(chunk)).encode('ascii') + chunk + b'\r\n' - self.send(chunk) - - if encode_chunked and self._http_vsn == 11: - # end chunked transfer - self.send(b'0\r\n\r\n') - - def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): - """Send a request to the server. - - `method' specifies an HTTP request method, e.g. 'GET'. - `url' specifies the object being requested, e.g. '/index.html'. - `skip_host' if True does not add automatically a 'Host:' header - `skip_accept_encoding' if True does not add automatically an - 'Accept-Encoding:' header - """ - - # if a prior response has been completed, then forget about it. - if self.__response and self.__response.isclosed(): - self.__response = None - - - # in certain cases, we cannot issue another request on this connection. - # this occurs when: - # 1) we are in the process of sending a request. (_CS_REQ_STARTED) - # 2) a response to a previous request has signalled that it is going - # to close the connection upon completion. - # 3) the headers for the previous response have not been read, thus - # we cannot determine whether point (2) is true. (_CS_REQ_SENT) - # - # if there is no prior response, then we can request at will. - # - # if point (2) is true, then we will have passed the socket to the - # response (effectively meaning, "there is no prior response"), and - # will open a new one when a new request is made. - # - # Note: if a prior response exists, then we *can* start a new request. - # We are not allowed to begin fetching the response to this new - # request, however, until that prior response is complete. - # - if self.__state == _CS_IDLE: - self.__state = _CS_REQ_STARTED - else: - raise CannotSendRequest(self.__state) - - # Save the method we use, we need it later in the response phase - self._method = method - if not url: - url = '/' - request = '%s %s %s' % (method, url, self._http_vsn_str) - - # Non-ASCII characters should have been eliminated earlier - self._output(request.encode('ascii')) - - if self._http_vsn == 11: - # Issue some standard headers for better HTTP/1.1 compliance - - if not skip_host: - # this header is issued *only* for HTTP/1.1 - # connections. more specifically, this means it is - # only issued when the client uses the new - # HTTPConnection() class. backwards-compat clients - # will be using HTTP/1.0 and those clients may be - # issuing this header themselves. we should NOT issue - # it twice; some web servers (such as Apache) barf - # when they see two Host: headers - - # If we need a non-standard port,include it in the - # header. If the request is going through a proxy, - # but the host of the actual URL, not the host of the - # proxy. - - netloc = '' - if url.startswith('http'): - nil, netloc, nil, nil, nil = urlsplit(url) - - if netloc: - try: - netloc_enc = netloc.encode("ascii") - except UnicodeEncodeError: - netloc_enc = netloc.encode("idna") - self.putheader('Host', netloc_enc) - else: - if self._tunnel_host: - host = self._tunnel_host - port = self._tunnel_port - else: - host = self.host - port = self.port - - try: - host_enc = host.encode("ascii") - except UnicodeEncodeError: - host_enc = host.encode("idna") - - # As per RFC 273, IPv6 address should be wrapped with [] - # when used as Host header - - if host.find(':') >= 0: - host_enc = b'[' + host_enc + b']' - - if port == self.default_port: - self.putheader('Host', host_enc) - else: - host_enc = host_enc.decode("ascii") - self.putheader('Host', "%s:%s" % (host_enc, port)) - - # note: we are assuming that clients will not attempt to set these - # headers since *this* library must deal with the - # consequences. this also means that when the supporting - # libraries are updated to recognize other forms, then this - # code should be changed (removed or updated). - - # we only want a Content-Encoding of "identity" since we don't - # support encodings such as x-gzip or x-deflate. - if not skip_accept_encoding: - self.putheader('Accept-Encoding', 'identity') - - # we can accept "chunked" Transfer-Encodings, but no others - # NOTE: no TE header implies *only* "chunked" - #self.putheader('TE', 'chunked') - - # if TE is supplied in the header, then it must appear in a - # Connection header. - #self.putheader('Connection', 'TE') - - else: - # For HTTP/1.0, the server will assume "not chunked" - pass - - def putheader(self, header, *values): - """Send a request header line to the server. - - For example: h.putheader('Accept', 'text/html') - """ - if self.__state != _CS_REQ_STARTED: - raise CannotSendHeader() - - if hasattr(header, 'encode'): - header = header.encode('ascii') - - if not _is_legal_header_name(header): - raise ValueError('Invalid header name %r' % (header,)) - - values = list(values) - for i, one_value in enumerate(values): - if hasattr(one_value, 'encode'): - values[i] = one_value.encode('latin-1') - elif isinstance(one_value, int): - values[i] = str(one_value).encode('ascii') - - if _is_illegal_header_value(values[i]): - raise ValueError('Invalid header value %r' % (values[i],)) - - value = b'\r\n\t'.join(values) - header = header + b': ' + value - self._output(header) - - def endheaders(self, message_body=None, **kwds): - """Indicate that the last header line has been sent to the server. - - This method sends the request to the server. The optional message_body - argument can be used to pass a message body associated with the - request. - """ - encode_chunked = kwds.pop('encode_chunked', False) - if kwds: - # mimic interpreter error for unrecognized keyword - raise TypeError("endheaders() got an unexpected keyword argument '{0}'" - .format(kwds.popitem()[0])) - - if self.__state == _CS_REQ_STARTED: - self.__state = _CS_REQ_SENT - else: - raise CannotSendHeader() - self._send_output(message_body, encode_chunked=encode_chunked) - - def request(self, method, url, body=None, headers={}, **kwds): - """Send a complete request to the server.""" - encode_chunked = kwds.pop('encode_chunked', False) - if kwds: - # mimic interpreter error for unrecognized keyword - raise TypeError("request() got an unexpected keyword argument '{0}'" - .format(kwds.popitem()[0])) - self._send_request(method, url, body, headers, encode_chunked) - - def _set_content_length(self, body, method): - # Set the content-length based on the body. If the body is "empty", we - # set Content-Length: 0 for methods that expect a body (RFC 7230, - # Section 3.3.2). If the body is set for other methods, we set the - # header provided we can figure out what the length is. - thelen = None - method_expects_body = method.upper() in _METHODS_EXPECTING_BODY - if body is None and method_expects_body: - thelen = '0' - elif body is not None: - try: - thelen = str(len(body)) - except TypeError: - # If this is a file-like object, try to - # fstat its file descriptor - try: - thelen = str(os.fstat(body.fileno()).st_size) - except (AttributeError, OSError): - # Don't send a length if this failed - if self.debuglevel > 0: print("Cannot stat!!") - - if thelen is not None: - self.putheader('Content-Length', thelen) - - def _send_request(self, method, url, body, headers, encode_chunked): - # Honor explicitly requested Host: and Accept-Encoding: headers. - header_names = frozenset(k.lower() for k in headers) - skips = {} - if 'host' in header_names: - skips['skip_host'] = 1 - if 'accept-encoding' in header_names: - skips['skip_accept_encoding'] = 1 - - self.putrequest(method, url, **skips) - - # chunked encoding will happen if HTTP/1.1 is used and either - # the caller passes encode_chunked=True or the following - # conditions hold: - # 1. content-length has not been explicitly set - # 2. the body is a file or iterable, but not a str or bytes-like - # 3. Transfer-Encoding has NOT been explicitly set by the caller - - if 'content-length' not in header_names: - # only chunk body if not explicitly set for backwards - # compatibility, assuming the client code is already handling the - # chunking - if 'transfer-encoding' not in header_names: - # if content-length cannot be automatically determined, fall - # back to chunked encoding - encode_chunked = False - content_length = self._get_content_length(body, method) - if content_length is None: - if body is not None: - if self.debuglevel > 0: - print('Unable to determine size of %r' % body) - encode_chunked = True - self.putheader('Transfer-Encoding', 'chunked') - else: - self.putheader('Content-Length', str(content_length)) - else: - encode_chunked = False - - for hdr, value in headers.items(): - self.putheader(hdr, value) - if isinstance(body, str): - # RFC 2616 Section 3.7.1 says that text default has a - # default charset of iso-8859-1. - body = _encode(body, 'body') - self.endheaders(body, encode_chunked=encode_chunked) - - def getresponse(self): - """Get the response from the server. - - If the HTTPConnection is in the correct state, returns an - instance of HTTPResponse or of whatever object is returned by - the response_class variable. - - If a request has not been sent or if a previous response has - not be handled, ResponseNotReady is raised. If the HTTP - response indicates that the connection should be closed, then - it will be closed before the response is returned. When the - connection is closed, the underlying socket is closed. - """ - - # if a prior response has been completed, then forget about it. - if self.__response and self.__response.isclosed(): - self.__response = None - - # if a prior response exists, then it must be completed (otherwise, we - # cannot read this response's header to determine the connection-close - # behavior) - # - # note: if a prior response existed, but was connection-close, then the - # socket and response were made independent of this HTTPConnection - # object since a new request requires that we open a whole new - # connection - # - # this means the prior response had one of two states: - # 1) will_close: this connection was reset and the prior socket and - # response operate independently - # 2) persistent: the response was retained and we await its - # isclosed() status to become true. - # - if self.__state != _CS_REQ_SENT or self.__response: - raise ResponseNotReady(self.__state) - - if self.debuglevel > 0: - response = self.response_class(self.sock, self.debuglevel, - method=self._method) - else: - response = self.response_class(self.sock, method=self._method) - - try: - try: - response.begin() - except ConnectionError: - self.close() - raise - assert response.will_close != _UNKNOWN - self.__state = _CS_IDLE - - if response.will_close: - # this effectively passes the connection to the response - self.close() - else: - # remember this, so we can tell when it is complete - self.__response = response - - return response - except: - response.close() - raise - -try: - import ssl -except ImportError: - pass -else: - class HTTPSConnection(HTTPConnection): - "This class allows communication via SSL." - - default_port = HTTPS_PORT - - # XXX Should key_file and cert_file be deprecated in favour of context? - - def __init__(self, host, port=None, key_file=None, cert_file=None, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, - source_address=None, *, context=None, - check_hostname=None): - super(HTTPSConnection, self).__init__(host, port, timeout, - source_address) - self.key_file = key_file - self.cert_file = cert_file - if context is None: - context = ssl._create_default_https_context() - will_verify = context.verify_mode != ssl.CERT_NONE - if check_hostname is None: - check_hostname = context.check_hostname - if check_hostname and not will_verify: - raise ValueError("check_hostname needs a SSL context with " - "either CERT_OPTIONAL or CERT_REQUIRED") - if key_file or cert_file: - context.load_cert_chain(cert_file, key_file) - self._context = context - self._check_hostname = check_hostname - - def connect(self): - "Connect to a host on a given (SSL) port." - - super().connect() - - if self._tunnel_host: - server_hostname = self._tunnel_host - else: - server_hostname = self.host - - self.sock = self._context.wrap_socket(self.sock, - server_hostname=server_hostname) - if not self._context.check_hostname and self._check_hostname: - try: - ssl.match_hostname(self.sock.getpeercert(), server_hostname) - except Exception: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - - __all__.append("HTTPSConnection") - -class HTTPException(Exception): - # Subclasses that define an __init__ must call Exception.__init__ - # or define self.args. Otherwise, str() will fail. - pass - -class NotConnected(HTTPException): - pass - -class InvalidURL(HTTPException): - pass - -class UnknownProtocol(HTTPException): - def __init__(self, version): - self.args = version, - self.version = version - -class UnknownTransferEncoding(HTTPException): - pass - -class UnimplementedFileMode(HTTPException): - pass - -class IncompleteRead(HTTPException): - def __init__(self, partial, expected=None): - self.args = partial, - self.partial = partial - self.expected = expected - def __repr__(self): - if self.expected is not None: - e = ', %i more expected' % self.expected - else: - e = '' - return '%s(%i bytes read%s)' % (self.__class__.__name__, - len(self.partial), e) - def __str__(self): - return repr(self) - -class ImproperConnectionState(HTTPException): - pass - -class CannotSendRequest(ImproperConnectionState): - pass - -class CannotSendHeader(ImproperConnectionState): - pass - -class ResponseNotReady(ImproperConnectionState): - pass - -class BadStatusLine(HTTPException): - def __init__(self, line): - if not line: - line = repr(line) - self.args = line, - self.line = line - -class LineTooLong(HTTPException): - def __init__(self, line_type): - HTTPException.__init__(self, "got more than %d bytes when reading %s" - % (_MAXLINE, line_type)) - -class RemoteDisconnected(ConnectionResetError, BadStatusLine): - def __init__(self, *pos, **kw): - BadStatusLine.__init__(self, "") - ConnectionResetError.__init__(self, *pos, **kw) - -# for backwards compatibility -error = HTTPException diff --git a/venv/lib/python3.6/site-packages/eventlet/green/http/cookiejar.py b/venv/lib/python3.6/site-packages/eventlet/green/http/cookiejar.py deleted file mode 100644 index 9c884e9..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/http/cookiejar.py +++ /dev/null @@ -1,2152 +0,0 @@ -# This is part of Python source code with Eventlet-specific modifications. -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved" are retained in Python alone or in any derivative version prepared by -# Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -r"""HTTP cookie handling for web clients. - -This module has (now fairly distant) origins in Gisle Aas' Perl module -HTTP::Cookies, from the libwww-perl library. - -Docstrings, comments and debug strings in this code refer to the -attributes of the HTTP cookie system as cookie-attributes, to distinguish -them clearly from Python attributes. - -Class diagram (note that BSDDBCookieJar and the MSIE* classes are not -distributed with the Python standard library, but are available from -http://wwwsearch.sf.net/): - - CookieJar____ - / \ \ - FileCookieJar \ \ - / | \ \ \ - MozillaCookieJar | LWPCookieJar \ \ - | | \ - | ---MSIEBase | \ - | / | | \ - | / MSIEDBCookieJar BSDDBCookieJar - |/ - MSIECookieJar - -""" - -__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', - 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] - -import copy -import datetime -import re -import time -# Eventlet change: urllib.request used to be imported here but it's not used, -# removed for clarity -import urllib.parse -from calendar import timegm - -from eventlet.green import threading as _threading, time -from eventlet.green.http import client as http_client # only for the default HTTP port - -debug = False # set to True to enable debugging via the logging module -logger = None - -def _debug(*args): - if not debug: - return - global logger - if not logger: - import logging - logger = logging.getLogger("http.cookiejar") - return logger.debug(*args) - - -DEFAULT_HTTP_PORT = str(http_client.HTTP_PORT) -MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar " - "instance initialised with one)") - -def _warn_unhandled_exception(): - # There are a few catch-all except: statements in this module, for - # catching input that's bad in unexpected ways. Warn if any - # exceptions are caught there. - import io, warnings, traceback - f = io.StringIO() - traceback.print_exc(None, f) - msg = f.getvalue() - warnings.warn("http.cookiejar bug!\n%s" % msg, stacklevel=2) - - -# Date/time conversion -# ----------------------------------------------------------------------------- - -EPOCH_YEAR = 1970 -def _timegm(tt): - year, month, mday, hour, min, sec = tt[:6] - if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and - (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)): - return timegm(tt) - else: - return None - -DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] -MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] -MONTHS_LOWER = [] -for month in MONTHS: MONTHS_LOWER.append(month.lower()) - -def time2isoz(t=None): - """Return a string representing time in seconds since epoch, t. - - If the function is called without an argument, it will use the current - time. - - The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ", - representing Universal Time (UTC, aka GMT). An example of this format is: - - 1994-11-24 08:49:37Z - - """ - if t is None: - dt = datetime.datetime.utcnow() - else: - dt = datetime.datetime.utcfromtimestamp(t) - return "%04d-%02d-%02d %02d:%02d:%02dZ" % ( - dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) - -def time2netscape(t=None): - """Return a string representing time in seconds since epoch, t. - - If the function is called without an argument, it will use the current - time. - - The format of the returned string is like this: - - Wed, DD-Mon-YYYY HH:MM:SS GMT - - """ - if t is None: - dt = datetime.datetime.utcnow() - else: - dt = datetime.datetime.utcfromtimestamp(t) - return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % ( - DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1], - dt.year, dt.hour, dt.minute, dt.second) - - -UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None} - -TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$", re.ASCII) -def offset_from_tz_string(tz): - offset = None - if tz in UTC_ZONES: - offset = 0 - else: - m = TIMEZONE_RE.search(tz) - if m: - offset = 3600 * int(m.group(2)) - if m.group(3): - offset = offset + 60 * int(m.group(3)) - if m.group(1) == '-': - offset = -offset - return offset - -def _str2time(day, mon, yr, hr, min, sec, tz): - yr = int(yr) - if yr > datetime.MAXYEAR: - return None - - # translate month name to number - # month numbers start with 1 (January) - try: - mon = MONTHS_LOWER.index(mon.lower())+1 - except ValueError: - # maybe it's already a number - try: - imon = int(mon) - except ValueError: - return None - if 1 <= imon <= 12: - mon = imon - else: - return None - - # make sure clock elements are defined - if hr is None: hr = 0 - if min is None: min = 0 - if sec is None: sec = 0 - - day = int(day) - hr = int(hr) - min = int(min) - sec = int(sec) - - if yr < 1000: - # find "obvious" year - cur_yr = time.localtime(time.time())[0] - m = cur_yr % 100 - tmp = yr - yr = yr + cur_yr - m - m = m - tmp - if abs(m) > 50: - if m > 0: yr = yr + 100 - else: yr = yr - 100 - - # convert UTC time tuple to seconds since epoch (not timezone-adjusted) - t = _timegm((yr, mon, day, hr, min, sec, tz)) - - if t is not None: - # adjust time using timezone string, to get absolute time since epoch - if tz is None: - tz = "UTC" - tz = tz.upper() - offset = offset_from_tz_string(tz) - if offset is None: - return None - t = t - offset - - return t - -STRICT_DATE_RE = re.compile( - r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) " - "(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$", re.ASCII) -WEEKDAY_RE = re.compile( - r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I | re.ASCII) -LOOSE_HTTP_DATE_RE = re.compile( - r"""^ - (\d\d?) # day - (?:\s+|[-\/]) - (\w+) # month - (?:\s+|[-\/]) - (\d+) # year - (?: - (?:\s+|:) # separator before clock - (\d\d?):(\d\d) # hour:min - (?::(\d\d))? # optional seconds - )? # optional clock - \s* - ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone - \s* - (?:\(\w+\))? # ASCII representation of timezone in parens. - \s*$""", re.X | re.ASCII) -def http2time(text): - """Returns time in seconds since epoch of time represented by a string. - - Return value is an integer. - - None is returned if the format of str is unrecognized, the time is outside - the representable range, or the timezone string is not recognized. If the - string contains no timezone, UTC is assumed. - - The timezone in the string may be numerical (like "-0800" or "+0100") or a - string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the - timezone strings equivalent to UTC (zero offset) are known to the function. - - The function loosely parses the following formats: - - Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format - Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format - Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format - 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday) - 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday) - 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday) - - The parser ignores leading and trailing whitespace. The time may be - absent. - - If the year is given with only 2 digits, the function will select the - century that makes the year closest to the current date. - - """ - # fast exit for strictly conforming string - m = STRICT_DATE_RE.search(text) - if m: - g = m.groups() - mon = MONTHS_LOWER.index(g[1].lower()) + 1 - tt = (int(g[2]), mon, int(g[0]), - int(g[3]), int(g[4]), float(g[5])) - return _timegm(tt) - - # No, we need some messy parsing... - - # clean up - text = text.lstrip() - text = WEEKDAY_RE.sub("", text, 1) # Useless weekday - - # tz is time zone specifier string - day, mon, yr, hr, min, sec, tz = [None]*7 - - # loose regexp parse - m = LOOSE_HTTP_DATE_RE.search(text) - if m is not None: - day, mon, yr, hr, min, sec, tz = m.groups() - else: - return None # bad format - - return _str2time(day, mon, yr, hr, min, sec, tz) - -ISO_DATE_RE = re.compile( - """^ - (\d{4}) # year - [-\/]? - (\d\d?) # numerical month - [-\/]? - (\d\d?) # day - (?: - (?:\s+|[-:Tt]) # separator before clock - (\d\d?):?(\d\d) # hour:min - (?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional) - )? # optional clock - \s* - ([-+]?\d\d?:?(:?\d\d)? - |Z|z)? # timezone (Z is "zero meridian", i.e. GMT) - \s*$""", re.X | re. ASCII) -def iso2time(text): - """ - As for http2time, but parses the ISO 8601 formats: - - 1994-02-03 14:15:29 -0100 -- ISO 8601 format - 1994-02-03 14:15:29 -- zone is optional - 1994-02-03 -- only date - 1994-02-03T14:15:29 -- Use T as separator - 19940203T141529Z -- ISO 8601 compact format - 19940203 -- only date - - """ - # clean up - text = text.lstrip() - - # tz is time zone specifier string - day, mon, yr, hr, min, sec, tz = [None]*7 - - # loose regexp parse - m = ISO_DATE_RE.search(text) - if m is not None: - # XXX there's an extra bit of the timezone I'm ignoring here: is - # this the right thing to do? - yr, mon, day, hr, min, sec, tz, _ = m.groups() - else: - return None # bad format - - return _str2time(day, mon, yr, hr, min, sec, tz) - - -# Header parsing -# ----------------------------------------------------------------------------- - -def unmatched(match): - """Return unmatched part of re.Match object.""" - start, end = match.span(0) - return match.string[:start]+match.string[end:] - -HEADER_TOKEN_RE = re.compile(r"^\s*([^=\s;,]+)") -HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"") -HEADER_VALUE_RE = re.compile(r"^\s*=\s*([^\s;,]*)") -HEADER_ESCAPE_RE = re.compile(r"\\(.)") -def split_header_words(header_values): - r"""Parse header values into a list of lists containing key,value pairs. - - The function knows how to deal with ",", ";" and "=" as well as quoted - values after "=". A list of space separated tokens are parsed as if they - were separated by ";". - - If the header_values passed as argument contains multiple values, then they - are treated as if they were a single value separated by comma ",". - - This means that this function is useful for parsing header fields that - follow this syntax (BNF as from the HTTP/1.1 specification, but we relax - the requirement for tokens). - - headers = #header - header = (token | parameter) *( [";"] (token | parameter)) - - token = 1* - separators = "(" | ")" | "<" | ">" | "@" - | "," | ";" | ":" | "\" | <"> - | "/" | "[" | "]" | "?" | "=" - | "{" | "}" | SP | HT - - quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) - qdtext = > - quoted-pair = "\" CHAR - - parameter = attribute "=" value - attribute = token - value = token | quoted-string - - Each header is represented by a list of key/value pairs. The value for a - simple token (not part of a parameter) is None. Syntactically incorrect - headers will not necessarily be parsed as you would want. - - This is easier to describe with some examples: - - >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz']) - [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]] - >>> split_header_words(['text/html; charset="iso-8859-1"']) - [[('text/html', None), ('charset', 'iso-8859-1')]] - >>> split_header_words([r'Basic realm="\"foo\bar\""']) - [[('Basic', None), ('realm', '"foobar"')]] - - """ - assert not isinstance(header_values, str) - result = [] - for text in header_values: - orig_text = text - pairs = [] - while text: - m = HEADER_TOKEN_RE.search(text) - if m: - text = unmatched(m) - name = m.group(1) - m = HEADER_QUOTED_VALUE_RE.search(text) - if m: # quoted value - text = unmatched(m) - value = m.group(1) - value = HEADER_ESCAPE_RE.sub(r"\1", value) - else: - m = HEADER_VALUE_RE.search(text) - if m: # unquoted value - text = unmatched(m) - value = m.group(1) - value = value.rstrip() - else: - # no value, a lone token - value = None - pairs.append((name, value)) - elif text.lstrip().startswith(","): - # concatenated headers, as per RFC 2616 section 4.2 - text = text.lstrip()[1:] - if pairs: result.append(pairs) - pairs = [] - else: - # skip junk - non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text) - assert nr_junk_chars > 0, ( - "split_header_words bug: '%s', '%s', %s" % - (orig_text, text, pairs)) - text = non_junk - if pairs: result.append(pairs) - return result - -HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])") -def join_header_words(lists): - """Do the inverse (almost) of the conversion done by split_header_words. - - Takes a list of lists of (key, value) pairs and produces a single header - value. Attribute values are quoted if needed. - - >>> join_header_words([[("text/plain", None), ("charset", "iso-8859-1")]]) - 'text/plain; charset="iso-8859-1"' - >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859-1")]]) - 'text/plain, charset="iso-8859-1"' - - """ - headers = [] - for pairs in lists: - attr = [] - for k, v in pairs: - if v is not None: - if not re.search(r"^\w+$", v): - v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v) # escape " and \ - v = '"%s"' % v - k = "%s=%s" % (k, v) - attr.append(k) - if attr: headers.append("; ".join(attr)) - return ", ".join(headers) - -def strip_quotes(text): - if text.startswith('"'): - text = text[1:] - if text.endswith('"'): - text = text[:-1] - return text - -def parse_ns_headers(ns_headers): - """Ad-hoc parser for Netscape protocol cookie-attributes. - - The old Netscape cookie format for Set-Cookie can for instance contain - an unquoted "," in the expires field, so we have to use this ad-hoc - parser instead of split_header_words. - - XXX This may not make the best possible effort to parse all the crap - that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient - parser is probably better, so could do worse than following that if - this ever gives any trouble. - - Currently, this is also used for parsing RFC 2109 cookies. - - """ - known_attrs = ("expires", "domain", "path", "secure", - # RFC 2109 attrs (may turn up in Netscape cookies, too) - "version", "port", "max-age") - - result = [] - for ns_header in ns_headers: - pairs = [] - version_set = False - - # XXX: The following does not strictly adhere to RFCs in that empty - # names and values are legal (the former will only appear once and will - # be overwritten if multiple occurrences are present). This is - # mostly to deal with backwards compatibility. - for ii, param in enumerate(ns_header.split(';')): - param = param.strip() - - key, sep, val = param.partition('=') - key = key.strip() - - if not key: - if ii == 0: - break - else: - continue - - # allow for a distinction between present and empty and missing - # altogether - val = val.strip() if sep else None - - if ii != 0: - lc = key.lower() - if lc in known_attrs: - key = lc - - if key == "version": - # This is an RFC 2109 cookie. - if val is not None: - val = strip_quotes(val) - version_set = True - elif key == "expires": - # convert expires date to seconds since epoch - if val is not None: - val = http2time(strip_quotes(val)) # None if invalid - pairs.append((key, val)) - - if pairs: - if not version_set: - pairs.append(("version", "0")) - result.append(pairs) - - return result - - -IPV4_RE = re.compile(r"\.\d+$", re.ASCII) -def is_HDN(text): - """Return True if text is a host domain name.""" - # XXX - # This may well be wrong. Which RFC is HDN defined in, if any (for - # the purposes of RFC 2965)? - # For the current implementation, what about IPv6? Remember to look - # at other uses of IPV4_RE also, if change this. - if IPV4_RE.search(text): - return False - if text == "": - return False - if text[0] == "." or text[-1] == ".": - return False - return True - -def domain_match(A, B): - """Return True if domain A domain-matches domain B, according to RFC 2965. - - A and B may be host domain names or IP addresses. - - RFC 2965, section 1: - - Host names can be specified either as an IP address or a HDN string. - Sometimes we compare one host name with another. (Such comparisons SHALL - be case-insensitive.) Host A's name domain-matches host B's if - - * their host name strings string-compare equal; or - - * A is a HDN string and has the form NB, where N is a non-empty - name string, B has the form .B', and B' is a HDN string. (So, - x.y.com domain-matches .Y.com but not Y.com.) - - Note that domain-match is not a commutative operation: a.b.c.com - domain-matches .c.com, but not the reverse. - - """ - # Note that, if A or B are IP addresses, the only relevant part of the - # definition of the domain-match algorithm is the direct string-compare. - A = A.lower() - B = B.lower() - if A == B: - return True - if not is_HDN(A): - return False - i = A.rfind(B) - if i == -1 or i == 0: - # A does not have form NB, or N is the empty string - return False - if not B.startswith("."): - return False - if not is_HDN(B[1:]): - return False - return True - -def liberal_is_HDN(text): - """Return True if text is a sort-of-like a host domain name. - - For accepting/blocking domains. - - """ - if IPV4_RE.search(text): - return False - return True - -def user_domain_match(A, B): - """For blocking/accepting domains. - - A and B may be host domain names or IP addresses. - - """ - A = A.lower() - B = B.lower() - if not (liberal_is_HDN(A) and liberal_is_HDN(B)): - if A == B: - # equal IP addresses - return True - return False - initial_dot = B.startswith(".") - if initial_dot and A.endswith(B): - return True - if not initial_dot and A == B: - return True - return False - -cut_port_re = re.compile(r":\d+$", re.ASCII) -def request_host(request): - """Return request-host, as defined by RFC 2965. - - Variation from RFC: returned value is lowercased, for convenient - comparison. - - """ - url = request.get_full_url() - host = urllib.parse.urlparse(url)[1] - if host == "": - host = request.get_header("Host", "") - - # remove port, if present - host = cut_port_re.sub("", host, 1) - return host.lower() - -def eff_request_host(request): - """Return a tuple (request-host, effective request-host name). - - As defined by RFC 2965, except both are lowercased. - - """ - erhn = req_host = request_host(request) - if req_host.find(".") == -1 and not IPV4_RE.search(req_host): - erhn = req_host + ".local" - return req_host, erhn - -def request_path(request): - """Path component of request-URI, as defined by RFC 2965.""" - url = request.get_full_url() - parts = urllib.parse.urlsplit(url) - path = escape_path(parts.path) - if not path.startswith("/"): - # fix bad RFC 2396 absoluteURI - path = "/" + path - return path - -def request_port(request): - host = request.host - i = host.find(':') - if i >= 0: - port = host[i+1:] - try: - int(port) - except ValueError: - _debug("nonnumeric port: '%s'", port) - return None - else: - port = DEFAULT_HTTP_PORT - return port - -# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't -# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738). -HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()" -ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])") -def uppercase_escaped_char(match): - return "%%%s" % match.group(1).upper() -def escape_path(path): - """Escape any invalid characters in HTTP URL, and uppercase all escapes.""" - # There's no knowing what character encoding was used to create URLs - # containing %-escapes, but since we have to pick one to escape invalid - # path characters, we pick UTF-8, as recommended in the HTML 4.0 - # specification: - # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1 - # And here, kind of: draft-fielding-uri-rfc2396bis-03 - # (And in draft IRI specification: draft-duerst-iri-05) - # (And here, for new URI schemes: RFC 2718) - path = urllib.parse.quote(path, HTTP_PATH_SAFE) - path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path) - return path - -def reach(h): - """Return reach of host h, as defined by RFC 2965, section 1. - - The reach R of a host name H is defined as follows: - - * If - - - H is the host domain name of a host; and, - - - H has the form A.B; and - - - A has no embedded (that is, interior) dots; and - - - B has at least one embedded dot, or B is the string "local". - then the reach of H is .B. - - * Otherwise, the reach of H is H. - - >>> reach("www.acme.com") - '.acme.com' - >>> reach("acme.com") - 'acme.com' - >>> reach("acme.local") - '.local' - - """ - i = h.find(".") - if i >= 0: - #a = h[:i] # this line is only here to show what a is - b = h[i+1:] - i = b.find(".") - if is_HDN(h) and (i >= 0 or b == "local"): - return "."+b - return h - -def is_third_party(request): - """ - - RFC 2965, section 3.3.6: - - An unverifiable transaction is to a third-party host if its request- - host U does not domain-match the reach R of the request-host O in the - origin transaction. - - """ - req_host = request_host(request) - if not domain_match(req_host, reach(request.origin_req_host)): - return True - else: - return False - - -class Cookie: - """HTTP Cookie. - - This class represents both Netscape and RFC 2965 cookies. - - This is deliberately a very simple class. It just holds attributes. It's - possible to construct Cookie instances that don't comply with the cookie - standards. CookieJar.make_cookies is the factory function for Cookie - objects -- it deals with cookie parsing, supplying defaults, and - normalising to the representation used in this class. CookiePolicy is - responsible for checking them to see whether they should be accepted from - and returned to the server. - - Note that the port may be present in the headers, but unspecified ("Port" - rather than"Port=80", for example); if this is the case, port is None. - - """ - - def __init__(self, version, name, value, - port, port_specified, - domain, domain_specified, domain_initial_dot, - path, path_specified, - secure, - expires, - discard, - comment, - comment_url, - rest, - rfc2109=False, - ): - - if version is not None: version = int(version) - if expires is not None: expires = int(float(expires)) - if port is None and port_specified is True: - raise ValueError("if port is None, port_specified must be false") - - self.version = version - self.name = name - self.value = value - self.port = port - self.port_specified = port_specified - # normalise case, as per RFC 2965 section 3.3.3 - self.domain = domain.lower() - self.domain_specified = domain_specified - # Sigh. We need to know whether the domain given in the - # cookie-attribute had an initial dot, in order to follow RFC 2965 - # (as clarified in draft errata). Needed for the returned $Domain - # value. - self.domain_initial_dot = domain_initial_dot - self.path = path - self.path_specified = path_specified - self.secure = secure - self.expires = expires - self.discard = discard - self.comment = comment - self.comment_url = comment_url - self.rfc2109 = rfc2109 - - self._rest = copy.copy(rest) - - def has_nonstandard_attr(self, name): - return name in self._rest - def get_nonstandard_attr(self, name, default=None): - return self._rest.get(name, default) - def set_nonstandard_attr(self, name, value): - self._rest[name] = value - - def is_expired(self, now=None): - if now is None: now = time.time() - if (self.expires is not None) and (self.expires <= now): - return True - return False - - def __str__(self): - if self.port is None: p = "" - else: p = ":"+self.port - limit = self.domain + p + self.path - if self.value is not None: - namevalue = "%s=%s" % (self.name, self.value) - else: - namevalue = self.name - return "" % (namevalue, limit) - - def __repr__(self): - args = [] - for name in ("version", "name", "value", - "port", "port_specified", - "domain", "domain_specified", "domain_initial_dot", - "path", "path_specified", - "secure", "expires", "discard", "comment", "comment_url", - ): - attr = getattr(self, name) - args.append("%s=%s" % (name, repr(attr))) - args.append("rest=%s" % repr(self._rest)) - args.append("rfc2109=%s" % repr(self.rfc2109)) - return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) - - -class CookiePolicy: - """Defines which cookies get accepted from and returned to server. - - May also modify cookies, though this is probably a bad idea. - - The subclass DefaultCookiePolicy defines the standard rules for Netscape - and RFC 2965 cookies -- override that if you want a customised policy. - - """ - def set_ok(self, cookie, request): - """Return true if (and only if) cookie should be accepted from server. - - Currently, pre-expired cookies never get this far -- the CookieJar - class deletes such cookies itself. - - """ - raise NotImplementedError() - - def return_ok(self, cookie, request): - """Return true if (and only if) cookie should be returned to server.""" - raise NotImplementedError() - - def domain_return_ok(self, domain, request): - """Return false if cookies should not be returned, given cookie domain. - """ - return True - - def path_return_ok(self, path, request): - """Return false if cookies should not be returned, given cookie path. - """ - return True - - -class DefaultCookiePolicy(CookiePolicy): - """Implements the standard rules for accepting and returning cookies.""" - - DomainStrictNoDots = 1 - DomainStrictNonDomain = 2 - DomainRFC2965Match = 4 - - DomainLiberal = 0 - DomainStrict = DomainStrictNoDots|DomainStrictNonDomain - - def __init__(self, - blocked_domains=None, allowed_domains=None, - netscape=True, rfc2965=False, - rfc2109_as_netscape=None, - hide_cookie2=False, - strict_domain=False, - strict_rfc2965_unverifiable=True, - strict_ns_unverifiable=False, - strict_ns_domain=DomainLiberal, - strict_ns_set_initial_dollar=False, - strict_ns_set_path=False, - ): - """Constructor arguments should be passed as keyword arguments only.""" - self.netscape = netscape - self.rfc2965 = rfc2965 - self.rfc2109_as_netscape = rfc2109_as_netscape - self.hide_cookie2 = hide_cookie2 - self.strict_domain = strict_domain - self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable - self.strict_ns_unverifiable = strict_ns_unverifiable - self.strict_ns_domain = strict_ns_domain - self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar - self.strict_ns_set_path = strict_ns_set_path - - if blocked_domains is not None: - self._blocked_domains = tuple(blocked_domains) - else: - self._blocked_domains = () - - if allowed_domains is not None: - allowed_domains = tuple(allowed_domains) - self._allowed_domains = allowed_domains - - def blocked_domains(self): - """Return the sequence of blocked domains (as a tuple).""" - return self._blocked_domains - def set_blocked_domains(self, blocked_domains): - """Set the sequence of blocked domains.""" - self._blocked_domains = tuple(blocked_domains) - - def is_blocked(self, domain): - for blocked_domain in self._blocked_domains: - if user_domain_match(domain, blocked_domain): - return True - return False - - def allowed_domains(self): - """Return None, or the sequence of allowed domains (as a tuple).""" - return self._allowed_domains - def set_allowed_domains(self, allowed_domains): - """Set the sequence of allowed domains, or None.""" - if allowed_domains is not None: - allowed_domains = tuple(allowed_domains) - self._allowed_domains = allowed_domains - - def is_not_allowed(self, domain): - if self._allowed_domains is None: - return False - for allowed_domain in self._allowed_domains: - if user_domain_match(domain, allowed_domain): - return False - return True - - def set_ok(self, cookie, request): - """ - If you override .set_ok(), be sure to call this method. If it returns - false, so should your subclass (assuming your subclass wants to be more - strict about which cookies to accept). - - """ - _debug(" - checking cookie %s=%s", cookie.name, cookie.value) - - assert cookie.name is not None - - for n in "version", "verifiability", "name", "path", "domain", "port": - fn_name = "set_ok_"+n - fn = getattr(self, fn_name) - if not fn(cookie, request): - return False - - return True - - def set_ok_version(self, cookie, request): - if cookie.version is None: - # Version is always set to 0 by parse_ns_headers if it's a Netscape - # cookie, so this must be an invalid RFC 2965 cookie. - _debug(" Set-Cookie2 without version attribute (%s=%s)", - cookie.name, cookie.value) - return False - if cookie.version > 0 and not self.rfc2965: - _debug(" RFC 2965 cookies are switched off") - return False - elif cookie.version == 0 and not self.netscape: - _debug(" Netscape cookies are switched off") - return False - return True - - def set_ok_verifiability(self, cookie, request): - if request.unverifiable and is_third_party(request): - if cookie.version > 0 and self.strict_rfc2965_unverifiable: - _debug(" third-party RFC 2965 cookie during " - "unverifiable transaction") - return False - elif cookie.version == 0 and self.strict_ns_unverifiable: - _debug(" third-party Netscape cookie during " - "unverifiable transaction") - return False - return True - - def set_ok_name(self, cookie, request): - # Try and stop servers setting V0 cookies designed to hack other - # servers that know both V0 and V1 protocols. - if (cookie.version == 0 and self.strict_ns_set_initial_dollar and - cookie.name.startswith("$")): - _debug(" illegal name (starts with '$'): '%s'", cookie.name) - return False - return True - - def set_ok_path(self, cookie, request): - if cookie.path_specified: - req_path = request_path(request) - if ((cookie.version > 0 or - (cookie.version == 0 and self.strict_ns_set_path)) and - not req_path.startswith(cookie.path)): - _debug(" path attribute %s is not a prefix of request " - "path %s", cookie.path, req_path) - return False - return True - - def set_ok_domain(self, cookie, request): - if self.is_blocked(cookie.domain): - _debug(" domain %s is in user block-list", cookie.domain) - return False - if self.is_not_allowed(cookie.domain): - _debug(" domain %s is not in user allow-list", cookie.domain) - return False - if cookie.domain_specified: - req_host, erhn = eff_request_host(request) - domain = cookie.domain - if self.strict_domain and (domain.count(".") >= 2): - # XXX This should probably be compared with the Konqueror - # (kcookiejar.cpp) and Mozilla implementations, but it's a - # losing battle. - i = domain.rfind(".") - j = domain.rfind(".", 0, i) - if j == 0: # domain like .foo.bar - tld = domain[i+1:] - sld = domain[j+1:i] - if sld.lower() in ("co", "ac", "com", "edu", "org", "net", - "gov", "mil", "int", "aero", "biz", "cat", "coop", - "info", "jobs", "mobi", "museum", "name", "pro", - "travel", "eu") and len(tld) == 2: - # domain like .co.uk - _debug(" country-code second level domain %s", domain) - return False - if domain.startswith("."): - undotted_domain = domain[1:] - else: - undotted_domain = domain - embedded_dots = (undotted_domain.find(".") >= 0) - if not embedded_dots and domain != ".local": - _debug(" non-local domain %s contains no embedded dot", - domain) - return False - if cookie.version == 0: - if (not erhn.endswith(domain) and - (not erhn.startswith(".") and - not ("."+erhn).endswith(domain))): - _debug(" effective request-host %s (even with added " - "initial dot) does not end with %s", - erhn, domain) - return False - if (cookie.version > 0 or - (self.strict_ns_domain & self.DomainRFC2965Match)): - if not domain_match(erhn, domain): - _debug(" effective request-host %s does not domain-match " - "%s", erhn, domain) - return False - if (cookie.version > 0 or - (self.strict_ns_domain & self.DomainStrictNoDots)): - host_prefix = req_host[:-len(domain)] - if (host_prefix.find(".") >= 0 and - not IPV4_RE.search(req_host)): - _debug(" host prefix %s for domain %s contains a dot", - host_prefix, domain) - return False - return True - - def set_ok_port(self, cookie, request): - if cookie.port_specified: - req_port = request_port(request) - if req_port is None: - req_port = "80" - else: - req_port = str(req_port) - for p in cookie.port.split(","): - try: - int(p) - except ValueError: - _debug(" bad port %s (not numeric)", p) - return False - if p == req_port: - break - else: - _debug(" request port (%s) not found in %s", - req_port, cookie.port) - return False - return True - - def return_ok(self, cookie, request): - """ - If you override .return_ok(), be sure to call this method. If it - returns false, so should your subclass (assuming your subclass wants to - be more strict about which cookies to return). - - """ - # Path has already been checked by .path_return_ok(), and domain - # blocking done by .domain_return_ok(). - _debug(" - checking cookie %s=%s", cookie.name, cookie.value) - - for n in "version", "verifiability", "secure", "expires", "port", "domain": - fn_name = "return_ok_"+n - fn = getattr(self, fn_name) - if not fn(cookie, request): - return False - return True - - def return_ok_version(self, cookie, request): - if cookie.version > 0 and not self.rfc2965: - _debug(" RFC 2965 cookies are switched off") - return False - elif cookie.version == 0 and not self.netscape: - _debug(" Netscape cookies are switched off") - return False - return True - - def return_ok_verifiability(self, cookie, request): - if request.unverifiable and is_third_party(request): - if cookie.version > 0 and self.strict_rfc2965_unverifiable: - _debug(" third-party RFC 2965 cookie during unverifiable " - "transaction") - return False - elif cookie.version == 0 and self.strict_ns_unverifiable: - _debug(" third-party Netscape cookie during unverifiable " - "transaction") - return False - return True - - def return_ok_secure(self, cookie, request): - if cookie.secure and request.type != "https": - _debug(" secure cookie with non-secure request") - return False - return True - - def return_ok_expires(self, cookie, request): - if cookie.is_expired(self._now): - _debug(" cookie expired") - return False - return True - - def return_ok_port(self, cookie, request): - if cookie.port: - req_port = request_port(request) - if req_port is None: - req_port = "80" - for p in cookie.port.split(","): - if p == req_port: - break - else: - _debug(" request port %s does not match cookie port %s", - req_port, cookie.port) - return False - return True - - def return_ok_domain(self, cookie, request): - req_host, erhn = eff_request_host(request) - domain = cookie.domain - - # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't - if (cookie.version == 0 and - (self.strict_ns_domain & self.DomainStrictNonDomain) and - not cookie.domain_specified and domain != erhn): - _debug(" cookie with unspecified domain does not string-compare " - "equal to request domain") - return False - - if cookie.version > 0 and not domain_match(erhn, domain): - _debug(" effective request-host name %s does not domain-match " - "RFC 2965 cookie domain %s", erhn, domain) - return False - if cookie.version == 0 and not ("."+erhn).endswith(domain): - _debug(" request-host %s does not match Netscape cookie domain " - "%s", req_host, domain) - return False - return True - - def domain_return_ok(self, domain, request): - # Liberal check of. This is here as an optimization to avoid - # having to load lots of MSIE cookie files unless necessary. - req_host, erhn = eff_request_host(request) - if not req_host.startswith("."): - req_host = "."+req_host - if not erhn.startswith("."): - erhn = "."+erhn - if not (req_host.endswith(domain) or erhn.endswith(domain)): - #_debug(" request domain %s does not match cookie domain %s", - # req_host, domain) - return False - - if self.is_blocked(domain): - _debug(" domain %s is in user block-list", domain) - return False - if self.is_not_allowed(domain): - _debug(" domain %s is not in user allow-list", domain) - return False - - return True - - def path_return_ok(self, path, request): - _debug("- checking cookie path=%s", path) - req_path = request_path(request) - if not req_path.startswith(path): - _debug(" %s does not path-match %s", req_path, path) - return False - return True - - -def vals_sorted_by_key(adict): - keys = sorted(adict.keys()) - return map(adict.get, keys) - -def deepvalues(mapping): - """Iterates over nested mapping, depth-first, in sorted order by key.""" - values = vals_sorted_by_key(mapping) - for obj in values: - mapping = False - try: - obj.items - except AttributeError: - pass - else: - mapping = True - yield from deepvalues(obj) - if not mapping: - yield obj - - -# Used as second parameter to dict.get() method, to distinguish absent -# dict key from one with a None value. -class Absent: pass - -class CookieJar: - """Collection of HTTP cookies. - - You may not need to know about this class: try - urllib.request.build_opener(HTTPCookieProcessor).open(url). - """ - - non_word_re = re.compile(r"\W") - quote_re = re.compile(r"([\"\\])") - strict_domain_re = re.compile(r"\.?[^.]*") - domain_re = re.compile(r"[^.]*") - dots_re = re.compile(r"^\.+") - - magic_re = re.compile(r"^\#LWP-Cookies-(\d+\.\d+)", re.ASCII) - - def __init__(self, policy=None): - if policy is None: - policy = DefaultCookiePolicy() - self._policy = policy - - self._cookies_lock = _threading.RLock() - self._cookies = {} - - def set_policy(self, policy): - self._policy = policy - - def _cookies_for_domain(self, domain, request): - cookies = [] - if not self._policy.domain_return_ok(domain, request): - return [] - _debug("Checking %s for cookies to return", domain) - cookies_by_path = self._cookies[domain] - for path in cookies_by_path.keys(): - if not self._policy.path_return_ok(path, request): - continue - cookies_by_name = cookies_by_path[path] - for cookie in cookies_by_name.values(): - if not self._policy.return_ok(cookie, request): - _debug(" not returning cookie") - continue - _debug(" it's a match") - cookies.append(cookie) - return cookies - - def _cookies_for_request(self, request): - """Return a list of cookies to be returned to server.""" - cookies = [] - for domain in self._cookies.keys(): - cookies.extend(self._cookies_for_domain(domain, request)) - return cookies - - def _cookie_attrs(self, cookies): - """Return a list of cookie-attributes to be returned to server. - - like ['foo="bar"; $Path="/"', ...] - - The $Version attribute is also added when appropriate (currently only - once per request). - - """ - # add cookies in order of most specific (ie. longest) path first - cookies.sort(key=lambda a: len(a.path), reverse=True) - - version_set = False - - attrs = [] - for cookie in cookies: - # set version of Cookie header - # XXX - # What should it be if multiple matching Set-Cookie headers have - # different versions themselves? - # Answer: there is no answer; was supposed to be settled by - # RFC 2965 errata, but that may never appear... - version = cookie.version - if not version_set: - version_set = True - if version > 0: - attrs.append("$Version=%s" % version) - - # quote cookie value if necessary - # (not for Netscape protocol, which already has any quotes - # intact, due to the poorly-specified Netscape Cookie: syntax) - if ((cookie.value is not None) and - self.non_word_re.search(cookie.value) and version > 0): - value = self.quote_re.sub(r"\\\1", cookie.value) - else: - value = cookie.value - - # add cookie-attributes to be returned in Cookie header - if cookie.value is None: - attrs.append(cookie.name) - else: - attrs.append("%s=%s" % (cookie.name, value)) - if version > 0: - if cookie.path_specified: - attrs.append('$Path="%s"' % cookie.path) - if cookie.domain.startswith("."): - domain = cookie.domain - if (not cookie.domain_initial_dot and - domain.startswith(".")): - domain = domain[1:] - attrs.append('$Domain="%s"' % domain) - if cookie.port is not None: - p = "$Port" - if cookie.port_specified: - p = p + ('="%s"' % cookie.port) - attrs.append(p) - - return attrs - - def add_cookie_header(self, request): - """Add correct Cookie: header to request (urllib.request.Request object). - - The Cookie2 header is also added unless policy.hide_cookie2 is true. - - """ - _debug("add_cookie_header") - self._cookies_lock.acquire() - try: - - self._policy._now = self._now = int(time.time()) - - cookies = self._cookies_for_request(request) - - attrs = self._cookie_attrs(cookies) - if attrs: - if not request.has_header("Cookie"): - request.add_unredirected_header( - "Cookie", "; ".join(attrs)) - - # if necessary, advertise that we know RFC 2965 - if (self._policy.rfc2965 and not self._policy.hide_cookie2 and - not request.has_header("Cookie2")): - for cookie in cookies: - if cookie.version != 1: - request.add_unredirected_header("Cookie2", '$Version="1"') - break - - finally: - self._cookies_lock.release() - - self.clear_expired_cookies() - - def _normalized_cookie_tuples(self, attrs_set): - """Return list of tuples containing normalised cookie information. - - attrs_set is the list of lists of key,value pairs extracted from - the Set-Cookie or Set-Cookie2 headers. - - Tuples are name, value, standard, rest, where name and value are the - cookie name and value, standard is a dictionary containing the standard - cookie-attributes (discard, secure, version, expires or max-age, - domain, path and port) and rest is a dictionary containing the rest of - the cookie-attributes. - - """ - cookie_tuples = [] - - boolean_attrs = "discard", "secure" - value_attrs = ("version", - "expires", "max-age", - "domain", "path", "port", - "comment", "commenturl") - - for cookie_attrs in attrs_set: - name, value = cookie_attrs[0] - - # Build dictionary of standard cookie-attributes (standard) and - # dictionary of other cookie-attributes (rest). - - # Note: expiry time is normalised to seconds since epoch. V0 - # cookies should have the Expires cookie-attribute, and V1 cookies - # should have Max-Age, but since V1 includes RFC 2109 cookies (and - # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we - # accept either (but prefer Max-Age). - max_age_set = False - - bad_cookie = False - - standard = {} - rest = {} - for k, v in cookie_attrs[1:]: - lc = k.lower() - # don't lose case distinction for unknown fields - if lc in value_attrs or lc in boolean_attrs: - k = lc - if k in boolean_attrs and v is None: - # boolean cookie-attribute is present, but has no value - # (like "discard", rather than "port=80") - v = True - if k in standard: - # only first value is significant - continue - if k == "domain": - if v is None: - _debug(" missing value for domain attribute") - bad_cookie = True - break - # RFC 2965 section 3.3.3 - v = v.lower() - if k == "expires": - if max_age_set: - # Prefer max-age to expires (like Mozilla) - continue - if v is None: - _debug(" missing or invalid value for expires " - "attribute: treating as session cookie") - continue - if k == "max-age": - max_age_set = True - try: - v = int(v) - except ValueError: - _debug(" missing or invalid (non-numeric) value for " - "max-age attribute") - bad_cookie = True - break - # convert RFC 2965 Max-Age to seconds since epoch - # XXX Strictly you're supposed to follow RFC 2616 - # age-calculation rules. Remember that zero Max-Age - # is a request to discard (old and new) cookie, though. - k = "expires" - v = self._now + v - if (k in value_attrs) or (k in boolean_attrs): - if (v is None and - k not in ("port", "comment", "commenturl")): - _debug(" missing value for %s attribute" % k) - bad_cookie = True - break - standard[k] = v - else: - rest[k] = v - - if bad_cookie: - continue - - cookie_tuples.append((name, value, standard, rest)) - - return cookie_tuples - - def _cookie_from_cookie_tuple(self, tup, request): - # standard is dict of standard cookie-attributes, rest is dict of the - # rest of them - name, value, standard, rest = tup - - domain = standard.get("domain", Absent) - path = standard.get("path", Absent) - port = standard.get("port", Absent) - expires = standard.get("expires", Absent) - - # set the easy defaults - version = standard.get("version", None) - if version is not None: - try: - version = int(version) - except ValueError: - return None # invalid version, ignore cookie - secure = standard.get("secure", False) - # (discard is also set if expires is Absent) - discard = standard.get("discard", False) - comment = standard.get("comment", None) - comment_url = standard.get("commenturl", None) - - # set default path - if path is not Absent and path != "": - path_specified = True - path = escape_path(path) - else: - path_specified = False - path = request_path(request) - i = path.rfind("/") - if i != -1: - if version == 0: - # Netscape spec parts company from reality here - path = path[:i] - else: - path = path[:i+1] - if len(path) == 0: path = "/" - - # set default domain - domain_specified = domain is not Absent - # but first we have to remember whether it starts with a dot - domain_initial_dot = False - if domain_specified: - domain_initial_dot = bool(domain.startswith(".")) - if domain is Absent: - req_host, erhn = eff_request_host(request) - domain = erhn - elif not domain.startswith("."): - domain = "."+domain - - # set default port - port_specified = False - if port is not Absent: - if port is None: - # Port attr present, but has no value: default to request port. - # Cookie should then only be sent back on that port. - port = request_port(request) - else: - port_specified = True - port = re.sub(r"\s+", "", port) - else: - # No port attr present. Cookie can be sent back on any port. - port = None - - # set default expires and discard - if expires is Absent: - expires = None - discard = True - elif expires <= self._now: - # Expiry date in past is request to delete cookie. This can't be - # in DefaultCookiePolicy, because can't delete cookies there. - try: - self.clear(domain, path, name) - except KeyError: - pass - _debug("Expiring cookie, domain='%s', path='%s', name='%s'", - domain, path, name) - return None - - return Cookie(version, - name, value, - port, port_specified, - domain, domain_specified, domain_initial_dot, - path, path_specified, - secure, - expires, - discard, - comment, - comment_url, - rest) - - def _cookies_from_attrs_set(self, attrs_set, request): - cookie_tuples = self._normalized_cookie_tuples(attrs_set) - - cookies = [] - for tup in cookie_tuples: - cookie = self._cookie_from_cookie_tuple(tup, request) - if cookie: cookies.append(cookie) - return cookies - - def _process_rfc2109_cookies(self, cookies): - rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None) - if rfc2109_as_ns is None: - rfc2109_as_ns = not self._policy.rfc2965 - for cookie in cookies: - if cookie.version == 1: - cookie.rfc2109 = True - if rfc2109_as_ns: - # treat 2109 cookies as Netscape cookies rather than - # as RFC2965 cookies - cookie.version = 0 - - def make_cookies(self, response, request): - """Return sequence of Cookie objects extracted from response object.""" - # get cookie-attributes for RFC 2965 and Netscape protocols - headers = response.info() - rfc2965_hdrs = headers.get_all("Set-Cookie2", []) - ns_hdrs = headers.get_all("Set-Cookie", []) - - rfc2965 = self._policy.rfc2965 - netscape = self._policy.netscape - - if ((not rfc2965_hdrs and not ns_hdrs) or - (not ns_hdrs and not rfc2965) or - (not rfc2965_hdrs and not netscape) or - (not netscape and not rfc2965)): - return [] # no relevant cookie headers: quick exit - - try: - cookies = self._cookies_from_attrs_set( - split_header_words(rfc2965_hdrs), request) - except Exception: - _warn_unhandled_exception() - cookies = [] - - if ns_hdrs and netscape: - try: - # RFC 2109 and Netscape cookies - ns_cookies = self._cookies_from_attrs_set( - parse_ns_headers(ns_hdrs), request) - except Exception: - _warn_unhandled_exception() - ns_cookies = [] - self._process_rfc2109_cookies(ns_cookies) - - # Look for Netscape cookies (from Set-Cookie headers) that match - # corresponding RFC 2965 cookies (from Set-Cookie2 headers). - # For each match, keep the RFC 2965 cookie and ignore the Netscape - # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are - # bundled in with the Netscape cookies for this purpose, which is - # reasonable behaviour. - if rfc2965: - lookup = {} - for cookie in cookies: - lookup[(cookie.domain, cookie.path, cookie.name)] = None - - def no_matching_rfc2965(ns_cookie, lookup=lookup): - key = ns_cookie.domain, ns_cookie.path, ns_cookie.name - return key not in lookup - ns_cookies = filter(no_matching_rfc2965, ns_cookies) - - if ns_cookies: - cookies.extend(ns_cookies) - - return cookies - - def set_cookie_if_ok(self, cookie, request): - """Set a cookie if policy says it's OK to do so.""" - self._cookies_lock.acquire() - try: - self._policy._now = self._now = int(time.time()) - - if self._policy.set_ok(cookie, request): - self.set_cookie(cookie) - - - finally: - self._cookies_lock.release() - - def set_cookie(self, cookie): - """Set a cookie, without checking whether or not it should be set.""" - c = self._cookies - self._cookies_lock.acquire() - try: - if cookie.domain not in c: c[cookie.domain] = {} - c2 = c[cookie.domain] - if cookie.path not in c2: c2[cookie.path] = {} - c3 = c2[cookie.path] - c3[cookie.name] = cookie - finally: - self._cookies_lock.release() - - def extract_cookies(self, response, request): - """Extract cookies from response, where allowable given the request.""" - _debug("extract_cookies: %s", response.info()) - self._cookies_lock.acquire() - try: - self._policy._now = self._now = int(time.time()) - - for cookie in self.make_cookies(response, request): - if self._policy.set_ok(cookie, request): - _debug(" setting cookie: %s", cookie) - self.set_cookie(cookie) - finally: - self._cookies_lock.release() - - def clear(self, domain=None, path=None, name=None): - """Clear some cookies. - - Invoking this method without arguments will clear all cookies. If - given a single argument, only cookies belonging to that domain will be - removed. If given two arguments, cookies belonging to the specified - path within that domain are removed. If given three arguments, then - the cookie with the specified name, path and domain is removed. - - Raises KeyError if no matching cookie exists. - - """ - if name is not None: - if (domain is None) or (path is None): - raise ValueError( - "domain and path must be given to remove a cookie by name") - del self._cookies[domain][path][name] - elif path is not None: - if domain is None: - raise ValueError( - "domain must be given to remove cookies by path") - del self._cookies[domain][path] - elif domain is not None: - del self._cookies[domain] - else: - self._cookies = {} - - def clear_session_cookies(self): - """Discard all session cookies. - - Note that the .save() method won't save session cookies anyway, unless - you ask otherwise by passing a true ignore_discard argument. - - """ - self._cookies_lock.acquire() - try: - for cookie in self: - if cookie.discard: - self.clear(cookie.domain, cookie.path, cookie.name) - finally: - self._cookies_lock.release() - - def clear_expired_cookies(self): - """Discard all expired cookies. - - You probably don't need to call this method: expired cookies are never - sent back to the server (provided you're using DefaultCookiePolicy), - this method is called by CookieJar itself every so often, and the - .save() method won't save expired cookies anyway (unless you ask - otherwise by passing a true ignore_expires argument). - - """ - self._cookies_lock.acquire() - try: - now = time.time() - for cookie in self: - if cookie.is_expired(now): - self.clear(cookie.domain, cookie.path, cookie.name) - finally: - self._cookies_lock.release() - - def __iter__(self): - return deepvalues(self._cookies) - - def __len__(self): - """Return number of contained cookies.""" - i = 0 - for cookie in self: i = i + 1 - return i - - def __repr__(self): - r = [] - for cookie in self: r.append(repr(cookie)) - return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) - - def __str__(self): - r = [] - for cookie in self: r.append(str(cookie)) - return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) - - -# derives from OSError for backwards-compatibility with Python 2.4.0 -class LoadError(OSError): pass - -class FileCookieJar(CookieJar): - """CookieJar that can be loaded from and saved to a file.""" - - def __init__(self, filename=None, delayload=False, policy=None): - """ - Cookies are NOT loaded from the named file until either the .load() or - .revert() method is called. - - """ - CookieJar.__init__(self, policy) - if filename is not None: - try: - filename+"" - except: - raise ValueError("filename must be string-like") - self.filename = filename - self.delayload = bool(delayload) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - """Save cookies to a file.""" - raise NotImplementedError() - - def load(self, filename=None, ignore_discard=False, ignore_expires=False): - """Load cookies from a file.""" - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - with open(filename) as f: - self._really_load(f, filename, ignore_discard, ignore_expires) - - def revert(self, filename=None, - ignore_discard=False, ignore_expires=False): - """Clear all cookies and reload cookies from a saved file. - - Raises LoadError (or OSError) if reversion is not successful; the - object's state will not be altered if this happens. - - """ - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - self._cookies_lock.acquire() - try: - - old_state = copy.deepcopy(self._cookies) - self._cookies = {} - try: - self.load(filename, ignore_discard, ignore_expires) - except OSError: - self._cookies = old_state - raise - - finally: - self._cookies_lock.release() - - -def lwp_cookie_str(cookie): - """Return string representation of Cookie in the LWP cookie file format. - - Actually, the format is extended a bit -- see module docstring. - - """ - h = [(cookie.name, cookie.value), - ("path", cookie.path), - ("domain", cookie.domain)] - if cookie.port is not None: h.append(("port", cookie.port)) - if cookie.path_specified: h.append(("path_spec", None)) - if cookie.port_specified: h.append(("port_spec", None)) - if cookie.domain_initial_dot: h.append(("domain_dot", None)) - if cookie.secure: h.append(("secure", None)) - if cookie.expires: h.append(("expires", - time2isoz(float(cookie.expires)))) - if cookie.discard: h.append(("discard", None)) - if cookie.comment: h.append(("comment", cookie.comment)) - if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) - - keys = sorted(cookie._rest.keys()) - for k in keys: - h.append((k, str(cookie._rest[k]))) - - h.append(("version", str(cookie.version))) - - return join_header_words([h]) - -class LWPCookieJar(FileCookieJar): - """ - The LWPCookieJar saves a sequence of "Set-Cookie3" lines. - "Set-Cookie3" is the format used by the libwww-perl library, not known - to be compatible with any browser, but which is easy to read and - doesn't lose information about RFC 2965 cookies. - - Additional methods - - as_lwp_str(ignore_discard=True, ignore_expired=True) - - """ - - def as_lwp_str(self, ignore_discard=True, ignore_expires=True): - """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. - - ignore_discard and ignore_expires: see docstring for FileCookieJar.save - - """ - now = time.time() - r = [] - for cookie in self: - if not ignore_discard and cookie.discard: - continue - if not ignore_expires and cookie.is_expired(now): - continue - r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) - return "\n".join(r+[""]) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - with open(filename, "w") as f: - # There really isn't an LWP Cookies 2.0 format, but this indicates - # that there is extra information in here (domain_dot and - # port_spec) while still being compatible with libwww-perl, I hope. - f.write("#LWP-Cookies-2.0\n") - f.write(self.as_lwp_str(ignore_discard, ignore_expires)) - - def _really_load(self, f, filename, ignore_discard, ignore_expires): - magic = f.readline() - if not self.magic_re.search(magic): - msg = ("%r does not look like a Set-Cookie3 (LWP) format " - "file" % filename) - raise LoadError(msg) - - now = time.time() - - header = "Set-Cookie3:" - boolean_attrs = ("port_spec", "path_spec", "domain_dot", - "secure", "discard") - value_attrs = ("version", - "port", "path", "domain", - "expires", - "comment", "commenturl") - - try: - while 1: - line = f.readline() - if line == "": break - if not line.startswith(header): - continue - line = line[len(header):].strip() - - for data in split_header_words([line]): - name, value = data[0] - standard = {} - rest = {} - for k in boolean_attrs: - standard[k] = False - for k, v in data[1:]: - if k is not None: - lc = k.lower() - else: - lc = None - # don't lose case distinction for unknown fields - if (lc in value_attrs) or (lc in boolean_attrs): - k = lc - if k in boolean_attrs: - if v is None: v = True - standard[k] = v - elif k in value_attrs: - standard[k] = v - else: - rest[k] = v - - h = standard.get - expires = h("expires") - discard = h("discard") - if expires is not None: - expires = iso2time(expires) - if expires is None: - discard = True - domain = h("domain") - domain_specified = domain.startswith(".") - c = Cookie(h("version"), name, value, - h("port"), h("port_spec"), - domain, domain_specified, h("domain_dot"), - h("path"), h("path_spec"), - h("secure"), - expires, - discard, - h("comment"), - h("commenturl"), - rest) - if not ignore_discard and c.discard: - continue - if not ignore_expires and c.is_expired(now): - continue - self.set_cookie(c) - except OSError: - raise - except Exception: - _warn_unhandled_exception() - raise LoadError("invalid Set-Cookie3 format file %r: %r" % - (filename, line)) - - -class MozillaCookieJar(FileCookieJar): - """ - - WARNING: you may want to backup your browser's cookies file if you use - this class to save cookies. I *think* it works, but there have been - bugs in the past! - - This class differs from CookieJar only in the format it uses to save and - load cookies to and from a file. This class uses the Mozilla/Netscape - `cookies.txt' format. lynx uses this file format, too. - - Don't expect cookies saved while the browser is running to be noticed by - the browser (in fact, Mozilla on unix will overwrite your saved cookies if - you change them on disk while it's running; on Windows, you probably can't - save at all while the browser is running). - - Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to - Netscape cookies on saving. - - In particular, the cookie version and port number information is lost, - together with information about whether or not Path, Port and Discard were - specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the - domain as set in the HTTP header started with a dot (yes, I'm aware some - domains in Netscape files start with a dot and some don't -- trust me, you - really don't want to know any more about this). - - Note that though Mozilla and Netscape use the same format, they use - slightly different headers. The class saves cookies using the Netscape - header by default (Mozilla can cope with that). - - """ - magic_re = re.compile("#( Netscape)? HTTP Cookie File") - header = """\ -# Netscape HTTP Cookie File -# http://curl.haxx.se/rfc/cookie_spec.html -# This is a generated file! Do not edit. - -""" - - def _really_load(self, f, filename, ignore_discard, ignore_expires): - now = time.time() - - magic = f.readline() - if not self.magic_re.search(magic): - raise LoadError( - "%r does not look like a Netscape format cookies file" % - filename) - - try: - while 1: - line = f.readline() - if line == "": break - - # last field may be absent, so keep any trailing tab - if line.endswith("\n"): line = line[:-1] - - # skip comments and blank lines XXX what is $ for? - if (line.strip().startswith(("#", "$")) or - line.strip() == ""): - continue - - domain, domain_specified, path, secure, expires, name, value = \ - line.split("\t") - secure = (secure == "TRUE") - domain_specified = (domain_specified == "TRUE") - if name == "": - # cookies.txt regards 'Set-Cookie: foo' as a cookie - # with no name, whereas http.cookiejar regards it as a - # cookie with no value. - name = value - value = None - - initial_dot = domain.startswith(".") - assert domain_specified == initial_dot - - discard = False - if expires == "": - expires = None - discard = True - - # assume path_specified is false - c = Cookie(0, name, value, - None, False, - domain, domain_specified, initial_dot, - path, False, - secure, - expires, - discard, - None, - None, - {}) - if not ignore_discard and c.discard: - continue - if not ignore_expires and c.is_expired(now): - continue - self.set_cookie(c) - - except OSError: - raise - except Exception: - _warn_unhandled_exception() - raise LoadError("invalid Netscape format cookies file %r: %r" % - (filename, line)) - - def save(self, filename=None, ignore_discard=False, ignore_expires=False): - if filename is None: - if self.filename is not None: filename = self.filename - else: raise ValueError(MISSING_FILENAME_TEXT) - - with open(filename, "w") as f: - f.write(self.header) - now = time.time() - for cookie in self: - if not ignore_discard and cookie.discard: - continue - if not ignore_expires and cookie.is_expired(now): - continue - if cookie.secure: secure = "TRUE" - else: secure = "FALSE" - if cookie.domain.startswith("."): initial_dot = "TRUE" - else: initial_dot = "FALSE" - if cookie.expires is not None: - expires = str(cookie.expires) - else: - expires = "" - if cookie.value is None: - # cookies.txt regards 'Set-Cookie: foo' as a cookie - # with no name, whereas http.cookiejar regards it as a - # cookie with no value. - name = "" - value = cookie.name - else: - name = cookie.name - value = cookie.value - f.write( - "\t".join([cookie.domain, initial_dot, cookie.path, - secure, expires, name, value])+ - "\n") diff --git a/venv/lib/python3.6/site-packages/eventlet/green/http/cookies.py b/venv/lib/python3.6/site-packages/eventlet/green/http/cookies.py deleted file mode 100644 index 0a0a150..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/http/cookies.py +++ /dev/null @@ -1,691 +0,0 @@ -# This is part of Python source code with Eventlet-specific modifications. -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved" are retained in Python alone or in any derivative version prepared by -# Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -#### -# Copyright 2000 by Timothy O'Malley -# -# All Rights Reserved -# -# Permission to use, copy, modify, and distribute this software -# and its documentation for any purpose and without fee is hereby -# granted, provided that the above copyright notice appear in all -# copies and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Timothy O'Malley not be used in advertising or publicity -# pertaining to distribution of the software without specific, written -# prior permission. -# -# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS -# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR -# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS -# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -# PERFORMANCE OF THIS SOFTWARE. -# -#### -# -# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp -# by Timothy O'Malley -# -# Cookie.py is a Python module for the handling of HTTP -# cookies as a Python dictionary. See RFC 2109 for more -# information on cookies. -# -# The original idea to treat Cookies as a dictionary came from -# Dave Mitchell (davem@magnet.com) in 1995, when he released the -# first version of nscookie.py. -# -#### - -r""" -Here's a sample session to show how to use this module. -At the moment, this is the only documentation. - -The Basics ----------- - -Importing is easy... - - >>> from http import cookies - -Most of the time you start by creating a cookie. - - >>> C = cookies.SimpleCookie() - -Once you've created your Cookie, you can add values just as if it were -a dictionary. - - >>> C = cookies.SimpleCookie() - >>> C["fig"] = "newton" - >>> C["sugar"] = "wafer" - >>> C.output() - 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer' - -Notice that the printable representation of a Cookie is the -appropriate format for a Set-Cookie: header. This is the -default behavior. You can change the header and printed -attributes by using the .output() function - - >>> C = cookies.SimpleCookie() - >>> C["rocky"] = "road" - >>> C["rocky"]["path"] = "/cookie" - >>> print(C.output(header="Cookie:")) - Cookie: rocky=road; Path=/cookie - >>> print(C.output(attrs=[], header="Cookie:")) - Cookie: rocky=road - -The load() method of a Cookie extracts cookies from a string. In a -CGI script, you would use this method to extract the cookies from the -HTTP_COOKIE environment variable. - - >>> C = cookies.SimpleCookie() - >>> C.load("chips=ahoy; vienna=finger") - >>> C.output() - 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger' - -The load() method is darn-tootin smart about identifying cookies -within a string. Escaped quotation marks, nested semicolons, and other -such trickeries do not confuse it. - - >>> C = cookies.SimpleCookie() - >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";') - >>> print(C) - Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;" - -Each element of the Cookie also supports all of the RFC 2109 -Cookie attributes. Here's an example which sets the Path -attribute. - - >>> C = cookies.SimpleCookie() - >>> C["oreo"] = "doublestuff" - >>> C["oreo"]["path"] = "/" - >>> print(C) - Set-Cookie: oreo=doublestuff; Path=/ - -Each dictionary element has a 'value' attribute, which gives you -back the value associated with the key. - - >>> C = cookies.SimpleCookie() - >>> C["twix"] = "none for you" - >>> C["twix"].value - 'none for you' - -The SimpleCookie expects that all values should be standard strings. -Just to be sure, SimpleCookie invokes the str() builtin to convert -the value to a string, when the values are set dictionary-style. - - >>> C = cookies.SimpleCookie() - >>> C["number"] = 7 - >>> C["string"] = "seven" - >>> C["number"].value - '7' - >>> C["string"].value - 'seven' - >>> C.output() - 'Set-Cookie: number=7\r\nSet-Cookie: string=seven' - -Finis. -""" - -# -# Import our required modules -# -import re -import string - -__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] - -_nulljoin = ''.join -_semispacejoin = '; '.join -_spacejoin = ' '.join - -def _warn_deprecated_setter(setter): - import warnings - msg = ('The .%s setter is deprecated. The attribute will be read-only in ' - 'future releases. Please use the set() method instead.' % setter) - warnings.warn(msg, DeprecationWarning, stacklevel=3) - -# -# Define an exception visible to External modules -# -class CookieError(Exception): - pass - - -# These quoting routines conform to the RFC2109 specification, which in -# turn references the character definitions from RFC2068. They provide -# a two-way quoting algorithm. Any non-text character is translated -# into a 4 character sequence: a forward-slash followed by the -# three-digit octal equivalent of the character. Any '\' or '"' is -# quoted with a preceding '\' slash. -# Because of the way browsers really handle cookies (as opposed to what -# the RFC says) we also encode "," and ";". -# -# These are taken from RFC2068 and RFC2109. -# _LegalChars is the list of chars which don't require "'s -# _Translator hash-table for fast quoting -# -_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" -_UnescapedChars = _LegalChars + ' ()/<=>?@[]{}' - -_Translator = {n: '\\%03o' % n - for n in set(range(256)) - set(map(ord, _UnescapedChars))} -_Translator.update({ - ord('"'): '\\"', - ord('\\'): '\\\\', -}) - -# Eventlet change: match used instead of fullmatch for Python 3.3 compatibility -_is_legal_key = re.compile(r'[%s]+\Z' % re.escape(_LegalChars)).match - -def _quote(str): - r"""Quote a string for use in a cookie header. - - If the string does not need to be double-quoted, then just return the - string. Otherwise, surround the string in doublequotes and quote - (with a \) special characters. - """ - if str is None or _is_legal_key(str): - return str - else: - return '"' + str.translate(_Translator) + '"' - - -_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") -_QuotePatt = re.compile(r"[\\].") - -def _unquote(str): - # If there aren't any doublequotes, - # then there can't be any special characters. See RFC 2109. - if str is None or len(str) < 2: - return str - if str[0] != '"' or str[-1] != '"': - return str - - # We have to assume that we must decode this string. - # Down to work. - - # Remove the "s - str = str[1:-1] - - # Check for special sequences. Examples: - # \012 --> \n - # \" --> " - # - i = 0 - n = len(str) - res = [] - while 0 <= i < n: - o_match = _OctalPatt.search(str, i) - q_match = _QuotePatt.search(str, i) - if not o_match and not q_match: # Neither matched - res.append(str[i:]) - break - # else: - j = k = -1 - if o_match: - j = o_match.start(0) - if q_match: - k = q_match.start(0) - if q_match and (not o_match or k < j): # QuotePatt matched - res.append(str[i:k]) - res.append(str[k+1]) - i = k + 2 - else: # OctalPatt matched - res.append(str[i:j]) - res.append(chr(int(str[j+1:j+4], 8))) - i = j + 4 - return _nulljoin(res) - -# The _getdate() routine is used to set the expiration time in the cookie's HTTP -# header. By default, _getdate() returns the current time in the appropriate -# "expires" format for a Set-Cookie header. The one optional argument is an -# offset from now, in seconds. For example, an offset of -3600 means "one hour -# ago". The offset may be a floating point number. -# - -_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - -_monthname = [None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - -def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname): - from eventlet.green.time import gmtime, time - now = time() - year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future) - return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % \ - (weekdayname[wd], day, monthname[month], year, hh, mm, ss) - - -class Morsel(dict): - """A class to hold ONE (key, value) pair. - - In a cookie, each such pair may have several attributes, so this class is - used to keep the attributes associated with the appropriate key,value pair. - This class also includes a coded_value attribute, which is used to hold - the network representation of the value. This is most useful when Python - objects are pickled for network transit. - """ - # RFC 2109 lists these attributes as reserved: - # path comment domain - # max-age secure version - # - # For historical reasons, these attributes are also reserved: - # expires - # - # This is an extension from Microsoft: - # httponly - # - # This dictionary provides a mapping from the lowercase - # variant on the left to the appropriate traditional - # formatting on the right. - _reserved = { - "expires" : "expires", - "path" : "Path", - "comment" : "Comment", - "domain" : "Domain", - "max-age" : "Max-Age", - "secure" : "Secure", - "httponly" : "HttpOnly", - "version" : "Version", - } - - _flags = {'secure', 'httponly'} - - def __init__(self): - # Set defaults - self._key = self._value = self._coded_value = None - - # Set default attributes - for key in self._reserved: - dict.__setitem__(self, key, "") - - @property - def key(self): - return self._key - - @key.setter - def key(self, key): - _warn_deprecated_setter('key') - self._key = key - - @property - def value(self): - return self._value - - @value.setter - def value(self, value): - _warn_deprecated_setter('value') - self._value = value - - @property - def coded_value(self): - return self._coded_value - - @coded_value.setter - def coded_value(self, coded_value): - _warn_deprecated_setter('coded_value') - self._coded_value = coded_value - - def __setitem__(self, K, V): - K = K.lower() - if not K in self._reserved: - raise CookieError("Invalid attribute %r" % (K,)) - dict.__setitem__(self, K, V) - - def setdefault(self, key, val=None): - key = key.lower() - if key not in self._reserved: - raise CookieError("Invalid attribute %r" % (key,)) - return dict.setdefault(self, key, val) - - def __eq__(self, morsel): - if not isinstance(morsel, Morsel): - return NotImplemented - return (dict.__eq__(self, morsel) and - self._value == morsel._value and - self._key == morsel._key and - self._coded_value == morsel._coded_value) - - __ne__ = object.__ne__ - - def copy(self): - morsel = Morsel() - dict.update(morsel, self) - morsel.__dict__.update(self.__dict__) - return morsel - - def update(self, values): - data = {} - for key, val in dict(values).items(): - key = key.lower() - if key not in self._reserved: - raise CookieError("Invalid attribute %r" % (key,)) - data[key] = val - dict.update(self, data) - - def isReservedKey(self, K): - return K.lower() in self._reserved - - def set(self, key, val, coded_val, LegalChars=_LegalChars): - if LegalChars != _LegalChars: - import warnings - warnings.warn( - 'LegalChars parameter is deprecated, ignored and will ' - 'be removed in future versions.', DeprecationWarning, - stacklevel=2) - - if key.lower() in self._reserved: - raise CookieError('Attempt to set a reserved key %r' % (key,)) - if not _is_legal_key(key): - raise CookieError('Illegal key %r' % (key,)) - - # It's a good key, so save it. - self._key = key - self._value = val - self._coded_value = coded_val - - def __getstate__(self): - return { - 'key': self._key, - 'value': self._value, - 'coded_value': self._coded_value, - } - - def __setstate__(self, state): - self._key = state['key'] - self._value = state['value'] - self._coded_value = state['coded_value'] - - def output(self, attrs=None, header="Set-Cookie:"): - return "%s %s" % (header, self.OutputString(attrs)) - - __str__ = output - - def __repr__(self): - return '<%s: %s>' % (self.__class__.__name__, self.OutputString()) - - def js_output(self, attrs=None): - # Print javascript - return """ - - """ % (self.OutputString(attrs).replace('"', r'\"')) - - def OutputString(self, attrs=None): - # Build up our result - # - result = [] - append = result.append - - # First, the key=value pair - append("%s=%s" % (self.key, self.coded_value)) - - # Now add any defined attributes - if attrs is None: - attrs = self._reserved - items = sorted(self.items()) - for key, value in items: - if value == "": - continue - if key not in attrs: - continue - if key == "expires" and isinstance(value, int): - append("%s=%s" % (self._reserved[key], _getdate(value))) - elif key == "max-age" and isinstance(value, int): - append("%s=%d" % (self._reserved[key], value)) - elif key in self._flags: - if value: - append(str(self._reserved[key])) - else: - append("%s=%s" % (self._reserved[key], value)) - - # Return the result - return _semispacejoin(result) - - -# -# Pattern for finding cookie -# -# This used to be strict parsing based on the RFC2109 and RFC2068 -# specifications. I have since discovered that MSIE 3.0x doesn't -# follow the character rules outlined in those specs. As a -# result, the parsing rules here are less strict. -# - -_LegalKeyChars = r"\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=" -_LegalValueChars = _LegalKeyChars + '\[\]' -_CookiePattern = re.compile(r""" - (?x) # This is a verbose pattern - \s* # Optional whitespace at start of cookie - (?P # Start of group 'key' - [""" + _LegalKeyChars + r"""]+? # Any word of at least one letter - ) # End of group 'key' - ( # Optional group: there may not be a value. - \s*=\s* # Equal Sign - (?P # Start of group 'val' - "(?:[^\\"]|\\.)*" # Any doublequoted string - | # or - \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr - | # or - [""" + _LegalValueChars + r"""]* # Any word or empty string - ) # End of group 'val' - )? # End of optional value group - \s* # Any number of spaces. - (\s+|;|$) # Ending either at space, semicolon, or EOS. - """, re.ASCII) # May be removed if safe. - - -# At long last, here is the cookie class. Using this class is almost just like -# using a dictionary. See this module's docstring for example usage. -# -class BaseCookie(dict): - """A container class for a set of Morsels.""" - - def value_decode(self, val): - """real_value, coded_value = value_decode(STRING) - Called prior to setting a cookie's value from the network - representation. The VALUE is the value read from HTTP - header. - Override this function to modify the behavior of cookies. - """ - return val, val - - def value_encode(self, val): - """real_value, coded_value = value_encode(VALUE) - Called prior to setting a cookie's value from the dictionary - representation. The VALUE is the value being assigned. - Override this function to modify the behavior of cookies. - """ - strval = str(val) - return strval, strval - - def __init__(self, input=None): - if input: - self.load(input) - - def __set(self, key, real_value, coded_value): - """Private method for setting a cookie's value""" - M = self.get(key, Morsel()) - M.set(key, real_value, coded_value) - dict.__setitem__(self, key, M) - - def __setitem__(self, key, value): - """Dictionary style assignment.""" - if isinstance(value, Morsel): - # allow assignment of constructed Morsels (e.g. for pickling) - dict.__setitem__(self, key, value) - else: - rval, cval = self.value_encode(value) - self.__set(key, rval, cval) - - def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): - """Return a string suitable for HTTP.""" - result = [] - items = sorted(self.items()) - for key, value in items: - result.append(value.output(attrs, header)) - return sep.join(result) - - __str__ = output - - def __repr__(self): - l = [] - items = sorted(self.items()) - for key, value in items: - l.append('%s=%s' % (key, repr(value.value))) - return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l)) - - def js_output(self, attrs=None): - """Return a string suitable for JavaScript.""" - result = [] - items = sorted(self.items()) - for key, value in items: - result.append(value.js_output(attrs)) - return _nulljoin(result) - - def load(self, rawdata): - """Load cookies from a string (presumably HTTP_COOKIE) or - from a dictionary. Loading cookies from a dictionary 'd' - is equivalent to calling: - map(Cookie.__setitem__, d.keys(), d.values()) - """ - if isinstance(rawdata, str): - self.__parse_string(rawdata) - else: - # self.update() wouldn't call our custom __setitem__ - for key, value in rawdata.items(): - self[key] = value - return - - def __parse_string(self, str, patt=_CookiePattern): - i = 0 # Our starting point - n = len(str) # Length of string - parsed_items = [] # Parsed (type, key, value) triples - morsel_seen = False # A key=value pair was previously encountered - - TYPE_ATTRIBUTE = 1 - TYPE_KEYVALUE = 2 - - # We first parse the whole cookie string and reject it if it's - # syntactically invalid (this helps avoid some classes of injection - # attacks). - while 0 <= i < n: - # Start looking for a cookie - match = patt.match(str, i) - if not match: - # No more cookies - break - - key, value = match.group("key"), match.group("val") - i = match.end(0) - - if key[0] == "$": - if not morsel_seen: - # We ignore attributes which pertain to the cookie - # mechanism as a whole, such as "$Version". - # See RFC 2965. (Does anyone care?) - continue - parsed_items.append((TYPE_ATTRIBUTE, key[1:], value)) - elif key.lower() in Morsel._reserved: - if not morsel_seen: - # Invalid cookie string - return - if value is None: - if key.lower() in Morsel._flags: - parsed_items.append((TYPE_ATTRIBUTE, key, True)) - else: - # Invalid cookie string - return - else: - parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value))) - elif value is not None: - parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value))) - morsel_seen = True - else: - # Invalid cookie string - return - - # The cookie string is valid, apply it. - M = None # current morsel - for tp, key, value in parsed_items: - if tp == TYPE_ATTRIBUTE: - assert M is not None - M[key] = value - else: - assert tp == TYPE_KEYVALUE - rval, cval = value - self.__set(key, rval, cval) - M = self[key] - - -class SimpleCookie(BaseCookie): - """ - SimpleCookie supports strings as cookie values. When setting - the value using the dictionary assignment notation, SimpleCookie - calls the builtin str() to convert the value to a string. Values - received from HTTP are kept as strings. - """ - def value_decode(self, val): - return _unquote(val), val - - def value_encode(self, val): - strval = str(val) - return strval, _quote(strval) diff --git a/venv/lib/python3.6/site-packages/eventlet/green/http/server.py b/venv/lib/python3.6/site-packages/eventlet/green/http/server.py deleted file mode 100644 index 190bdb9..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/http/server.py +++ /dev/null @@ -1,1266 +0,0 @@ -# This is part of Python source code with Eventlet-specific modifications. -# -# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved -# -# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -# -------------------------------------------- -# -# 1. This LICENSE AGREEMENT is between the Python Software Foundation -# ("PSF"), and the Individual or Organization ("Licensee") accessing and -# otherwise using this software ("Python") in source or binary form and -# its associated documentation. -# -# 2. Subject to the terms and conditions of this License Agreement, PSF hereby -# grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -# analyze, test, perform and/or display publicly, prepare derivative works, -# distribute, and otherwise use Python alone or in any derivative version, -# provided, however, that PSF's License Agreement and PSF's notice of copyright, -# i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012, 2013, 2014, 2015, 2016 Python Software Foundation; All Rights -# Reserved" are retained in Python alone or in any derivative version prepared by -# Licensee. -# -# 3. In the event Licensee prepares a derivative work that is based on -# or incorporates Python or any part thereof, and wants to make -# the derivative work available to others as provided herein, then -# Licensee hereby agrees to include in any such work a brief summary of -# the changes made to Python. -# -# 4. PSF is making Python available to Licensee on an "AS IS" -# basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -# IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -# DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -# FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -# INFRINGE ANY THIRD PARTY RIGHTS. -# -# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -# FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -# A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -# OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. -# -# 6. This License Agreement will automatically terminate upon a material -# breach of its terms and conditions. -# -# 7. Nothing in this License Agreement shall be deemed to create any -# relationship of agency, partnership, or joint venture between PSF and -# Licensee. This License Agreement does not grant permission to use PSF -# trademarks or trade name in a trademark sense to endorse or promote -# products or services of Licensee, or any third party. -# -# 8. By copying, installing or otherwise using Python, Licensee -# agrees to be bound by the terms and conditions of this License -# Agreement. -"""HTTP server classes. - -Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see -SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and CGIHTTPRequestHandler for CGI scripts. - -It does, however, optionally implement HTTP/1.1 persistent connections, -as of version 0.3. - -Notes on CGIHTTPRequestHandler ------------------------------- - -This class implements GET and POST requests to cgi-bin scripts. - -If the os.fork() function is not present (e.g. on Windows), -subprocess.Popen() is used as a fallback, with slightly altered semantics. - -In all cases, the implementation is intentionally naive -- all -requests are executed synchronously. - -SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL --- it may execute arbitrary Python code or external programs. - -Note that status code 200 is sent prior to execution of a CGI script, so -scripts cannot send other status codes such as 302 (redirect). - -XXX To do: - -- log requests even later (to capture byte count) -- log user-agent header and other interesting goodies -- send error log to separate file -""" - - -# See also: -# -# HTTP Working Group T. Berners-Lee -# INTERNET-DRAFT R. T. Fielding -# H. Frystyk Nielsen -# Expires September 8, 1995 March 8, 1995 -# -# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt -# -# and -# -# Network Working Group R. Fielding -# Request for Comments: 2616 et al -# Obsoletes: 2068 June 1999 -# Category: Standards Track -# -# URL: http://www.faqs.org/rfcs/rfc2616.html - -# Log files -# --------- -# -# Here's a quote from the NCSA httpd docs about log file format. -# -# | The logfile format is as follows. Each line consists of: -# | -# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb -# | -# | host: Either the DNS name or the IP number of the remote client -# | rfc931: Any information returned by identd for this person, -# | - otherwise. -# | authuser: If user sent a userid for authentication, the user name, -# | - otherwise. -# | DD: Day -# | Mon: Month (calendar name) -# | YYYY: Year -# | hh: hour (24-hour format, the machine's timezone) -# | mm: minutes -# | ss: seconds -# | request: The first line of the HTTP request as sent by the client. -# | ddd: the status code returned by the server, - if not available. -# | bbbb: the total number of bytes sent, -# | *not including the HTTP/1.0 header*, - if not available -# | -# | You can determine the name of the file accessed through request. -# -# (Actually, the latter is only true if you know the server configuration -# at the time the request was made!) - -__version__ = "0.6" - -__all__ = [ - "HTTPServer", "BaseHTTPRequestHandler", - "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler", -] - -import email.utils -import html -import io -import mimetypes -import posixpath -import shutil -import sys -import urllib.parse -import copy -import argparse - -from eventlet.green import ( - os, - time, - select, - socket, - SocketServer as socketserver, - subprocess, -) -from eventlet.green.http import client as http_client, HTTPStatus - - -# Default error message template -DEFAULT_ERROR_MESSAGE = """\ - - - - - Error response - - -

Error response

-

Error code: %(code)d

-

Message: %(message)s.

-

Error code explanation: %(code)s - %(explain)s.

- - -""" - -DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" - -class HTTPServer(socketserver.TCPServer): - - allow_reuse_address = 1 # Seems to make sense in testing environment - - def server_bind(self): - """Override server_bind to store the server name.""" - socketserver.TCPServer.server_bind(self) - host, port = self.server_address[:2] - self.server_name = socket.getfqdn(host) - self.server_port = port - - -class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): - - """HTTP request handler base class. - - The following explanation of HTTP serves to guide you through the - code as well as to expose any misunderstandings I may have about - HTTP (so you don't need to read the code to figure out I'm wrong - :-). - - HTTP (HyperText Transfer Protocol) is an extensible protocol on - top of a reliable stream transport (e.g. TCP/IP). The protocol - recognizes three parts to a request: - - 1. One line identifying the request type and path - 2. An optional set of RFC-822-style headers - 3. An optional data part - - The headers and data are separated by a blank line. - - The first line of the request has the form - - - - where is a (case-sensitive) keyword such as GET or POST, - is a string containing path information for the request, - and should be the string "HTTP/1.0" or "HTTP/1.1". - is encoded using the URL encoding scheme (using %xx to signify - the ASCII character with hex code xx). - - The specification specifies that lines are separated by CRLF but - for compatibility with the widest range of clients recommends - servers also handle LF. Similarly, whitespace in the request line - is treated sensibly (allowing multiple spaces between components - and allowing trailing whitespace). - - Similarly, for output, lines ought to be separated by CRLF pairs - but most clients grok LF characters just fine. - - If the first line of the request has the form - - - - (i.e. is left out) then this is assumed to be an HTTP - 0.9 request; this form has no optional headers and data part and - the reply consists of just the data. - - The reply form of the HTTP 1.x protocol again has three parts: - - 1. One line giving the response code - 2. An optional set of RFC-822-style headers - 3. The data - - Again, the headers and data are separated by a blank line. - - The response code line has the form - - - - where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), - is a 3-digit response code indicating success or - failure of the request, and is an optional - human-readable string explaining what the response code means. - - This server parses the request and the headers, and then calls a - function specific to the request type (). Specifically, - a request SPAM will be handled by a method do_SPAM(). If no - such method exists the server sends an error response to the - client. If it exists, it is called with no arguments: - - do_SPAM() - - Note that the request name is case sensitive (i.e. SPAM and spam - are different requests). - - The various request details are stored in instance variables: - - - client_address is the client IP address in the form (host, - port); - - - command, path and version are the broken-down request line; - - - headers is an instance of email.message.Message (or a derived - class) containing the header information; - - - rfile is a file object open for reading positioned at the - start of the optional input data part; - - - wfile is a file object open for writing. - - IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! - - The first thing to be written must be the response line. Then - follow 0 or more header lines, then a blank line, and then the - actual data (if any). The meaning of the header lines depends on - the command executed by the server; in most cases, when data is - returned, there should be at least one header line of the form - - Content-type: / - - where and should be registered MIME types, - e.g. "text/html" or "text/plain". - - """ - - # The Python system version, truncated to its first component. - sys_version = "Python/" + sys.version.split()[0] - - # The server software version. You may want to override this. - # The format is multiple whitespace-separated strings, - # where each string is of the form name[/version]. - server_version = "BaseHTTP/" + __version__ - - error_message_format = DEFAULT_ERROR_MESSAGE - error_content_type = DEFAULT_ERROR_CONTENT_TYPE - - # The default request version. This only affects responses up until - # the point where the request line is parsed, so it mainly decides what - # the client gets back when sending a malformed request line. - # Most web servers default to HTTP 0.9, i.e. don't send a status line. - default_request_version = "HTTP/0.9" - - def parse_request(self): - """Parse a request (internal). - - The request should be stored in self.raw_requestline; the results - are in self.command, self.path, self.request_version and - self.headers. - - Return True for success, False for failure; on failure, an - error is sent back. - - """ - self.command = None # set in case of error on the first line - self.request_version = version = self.default_request_version - self.close_connection = True - requestline = str(self.raw_requestline, 'iso-8859-1') - requestline = requestline.rstrip('\r\n') - self.requestline = requestline - words = requestline.split() - if len(words) == 3: - command, path, version = words - try: - if version[:5] != 'HTTP/': - raise ValueError - base_version_number = version.split('/', 1)[1] - version_number = base_version_number.split(".") - # RFC 2145 section 3.1 says there can be only one "." and - # - major and minor numbers MUST be treated as - # separate integers; - # - HTTP/2.4 is a lower version than HTTP/2.13, which in - # turn is lower than HTTP/12.3; - # - Leading zeros MUST be ignored by recipients. - if len(version_number) != 2: - raise ValueError - version_number = int(version_number[0]), int(version_number[1]) - except (ValueError, IndexError): - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad request version (%r)" % version) - return False - if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": - self.close_connection = False - if version_number >= (2, 0): - self.send_error( - HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, - "Invalid HTTP version (%s)" % base_version_number) - return False - elif len(words) == 2: - command, path = words - self.close_connection = True - if command != 'GET': - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad HTTP/0.9 request type (%r)" % command) - return False - elif not words: - return False - else: - self.send_error( - HTTPStatus.BAD_REQUEST, - "Bad request syntax (%r)" % requestline) - return False - self.command, self.path, self.request_version = command, path, version - - # Examine the headers and look for a Connection directive. - try: - self.headers = http_client.parse_headers(self.rfile, - _class=self.MessageClass) - except http_client.LineTooLong as err: - self.send_error( - HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, - "Line too long", - str(err)) - return False - except http_client.HTTPException as err: - self.send_error( - HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE, - "Too many headers", - str(err) - ) - return False - - conntype = self.headers.get('Connection', "") - if conntype.lower() == 'close': - self.close_connection = True - elif (conntype.lower() == 'keep-alive' and - self.protocol_version >= "HTTP/1.1"): - self.close_connection = False - # Examine the headers and look for an Expect directive - expect = self.headers.get('Expect', "") - if (expect.lower() == "100-continue" and - self.protocol_version >= "HTTP/1.1" and - self.request_version >= "HTTP/1.1"): - if not self.handle_expect_100(): - return False - return True - - def handle_expect_100(self): - """Decide what to do with an "Expect: 100-continue" header. - - If the client is expecting a 100 Continue response, we must - respond with either a 100 Continue or a final response before - waiting for the request body. The default is to always respond - with a 100 Continue. You can behave differently (for example, - reject unauthorized requests) by overriding this method. - - This method should either return True (possibly after sending - a 100 Continue response) or send an error response and return - False. - - """ - self.send_response_only(HTTPStatus.CONTINUE) - self.end_headers() - return True - - def handle_one_request(self): - """Handle a single HTTP request. - - You normally don't need to override this method; see the class - __doc__ string for information on how to handle specific HTTP - commands such as GET and POST. - - """ - try: - self.raw_requestline = self.rfile.readline(65537) - if len(self.raw_requestline) > 65536: - self.requestline = '' - self.request_version = '' - self.command = '' - self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG) - return - if not self.raw_requestline: - self.close_connection = True - return - if not self.parse_request(): - # An error code has been sent, just exit - return - mname = 'do_' + self.command - if not hasattr(self, mname): - self.send_error( - HTTPStatus.NOT_IMPLEMENTED, - "Unsupported method (%r)" % self.command) - return - method = getattr(self, mname) - method() - self.wfile.flush() #actually send the response if not already done. - except socket.timeout as e: - #a read or a write timed out. Discard this connection - self.log_error("Request timed out: %r", e) - self.close_connection = True - return - - def handle(self): - """Handle multiple requests if necessary.""" - self.close_connection = True - - self.handle_one_request() - while not self.close_connection: - self.handle_one_request() - - def send_error(self, code, message=None, explain=None): - """Send and log an error reply. - - Arguments are - * code: an HTTP error code - 3 digits - * message: a simple optional 1 line reason phrase. - *( HTAB / SP / VCHAR / %x80-FF ) - defaults to short entry matching the response code - * explain: a detailed message defaults to the long entry - matching the response code. - - This sends an error response (so it must be called before any - output has been generated), logs the error, and finally sends - a piece of HTML explaining the error to the user. - - """ - - try: - shortmsg, longmsg = self.responses[code] - except KeyError: - shortmsg, longmsg = '???', '???' - if message is None: - message = shortmsg - if explain is None: - explain = longmsg - self.log_error("code %d, message %s", code, message) - self.send_response(code, message) - self.send_header('Connection', 'close') - - # Message body is omitted for cases described in: - # - RFC7230: 3.3. 1xx, 204(No Content), 304(Not Modified) - # - RFC7231: 6.3.6. 205(Reset Content) - body = None - if (code >= 200 and - code not in (HTTPStatus.NO_CONTENT, - HTTPStatus.RESET_CONTENT, - HTTPStatus.NOT_MODIFIED)): - # HTML encode to prevent Cross Site Scripting attacks - # (see bug #1100201) - content = (self.error_message_format % { - 'code': code, - 'message': html.escape(message, quote=False), - 'explain': html.escape(explain, quote=False) - }) - body = content.encode('UTF-8', 'replace') - self.send_header("Content-Type", self.error_content_type) - self.send_header('Content-Length', int(len(body))) - self.end_headers() - - if self.command != 'HEAD' and body: - self.wfile.write(body) - - def send_response(self, code, message=None): - """Add the response header to the headers buffer and log the - response code. - - Also send two standard headers with the server software - version and the current date. - - """ - self.log_request(code) - self.send_response_only(code, message) - self.send_header('Server', self.version_string()) - self.send_header('Date', self.date_time_string()) - - def send_response_only(self, code, message=None): - """Send the response header only.""" - if self.request_version != 'HTTP/0.9': - if message is None: - if code in self.responses: - message = self.responses[code][0] - else: - message = '' - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append(("%s %d %s\r\n" % - (self.protocol_version, code, message)).encode( - 'latin-1', 'strict')) - - def send_header(self, keyword, value): - """Send a MIME header to the headers buffer.""" - if self.request_version != 'HTTP/0.9': - if not hasattr(self, '_headers_buffer'): - self._headers_buffer = [] - self._headers_buffer.append( - ("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict')) - - if keyword.lower() == 'connection': - if value.lower() == 'close': - self.close_connection = True - elif value.lower() == 'keep-alive': - self.close_connection = False - - def end_headers(self): - """Send the blank line ending the MIME headers.""" - if self.request_version != 'HTTP/0.9': - self._headers_buffer.append(b"\r\n") - self.flush_headers() - - def flush_headers(self): - if hasattr(self, '_headers_buffer'): - self.wfile.write(b"".join(self._headers_buffer)) - self._headers_buffer = [] - - def log_request(self, code='-', size='-'): - """Log an accepted request. - - This is called by send_response(). - - """ - if isinstance(code, HTTPStatus): - code = code.value - self.log_message('"%s" %s %s', - self.requestline, str(code), str(size)) - - def log_error(self, format, *args): - """Log an error. - - This is called when a request cannot be fulfilled. By - default it passes the message on to log_message(). - - Arguments are the same as for log_message(). - - XXX This should go to the separate error log. - - """ - - self.log_message(format, *args) - - def log_message(self, format, *args): - """Log an arbitrary message. - - This is used by all other logging functions. Override - it if you have specific logging wishes. - - The first argument, FORMAT, is a format string for the - message to be logged. If the format string contains - any % escapes requiring parameters, they should be - specified as subsequent arguments (it's just like - printf!). - - The client ip and current date/time are prefixed to - every message. - - """ - - sys.stderr.write("%s - - [%s] %s\n" % - (self.address_string(), - self.log_date_time_string(), - format%args)) - - def version_string(self): - """Return the server software version string.""" - return self.server_version + ' ' + self.sys_version - - def date_time_string(self, timestamp=None): - """Return the current date and time formatted for a message header.""" - if timestamp is None: - timestamp = time.time() - return email.utils.formatdate(timestamp, usegmt=True) - - def log_date_time_string(self): - """Return the current time formatted for logging.""" - now = time.time() - year, month, day, hh, mm, ss, x, y, z = time.localtime(now) - s = "%02d/%3s/%04d %02d:%02d:%02d" % ( - day, self.monthname[month], year, hh, mm, ss) - return s - - weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - - monthname = [None, - 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', - 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] - - def address_string(self): - """Return the client address.""" - - return self.client_address[0] - - # Essentially static class variables - - # The version of the HTTP protocol we support. - # Set this to HTTP/1.1 to enable automatic keepalive - protocol_version = "HTTP/1.0" - - # MessageClass used to parse headers - MessageClass = http_client.HTTPMessage - - # hack to maintain backwards compatibility - responses = { - v: (v.phrase, v.description) - for v in HTTPStatus.__members__.values() - } - - -class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): - - """Simple HTTP request handler with GET and HEAD commands. - - This serves files from the current directory and any of its - subdirectories. The MIME type for files is determined by - calling the .guess_type() method. - - The GET and HEAD requests are identical except that the HEAD - request omits the actual contents of the file. - - """ - - server_version = "SimpleHTTP/" + __version__ - - def do_GET(self): - """Serve a GET request.""" - f = self.send_head() - if f: - try: - self.copyfile(f, self.wfile) - finally: - f.close() - - def do_HEAD(self): - """Serve a HEAD request.""" - f = self.send_head() - if f: - f.close() - - def send_head(self): - """Common code for GET and HEAD commands. - - This sends the response code and MIME headers. - - Return value is either a file object (which has to be copied - to the outputfile by the caller unless the command was HEAD, - and must be closed by the caller under all circumstances), or - None, in which case the caller has nothing further to do. - - """ - path = self.translate_path(self.path) - f = None - if os.path.isdir(path): - parts = urllib.parse.urlsplit(self.path) - if not parts.path.endswith('/'): - # redirect browser - doing basically what apache does - self.send_response(HTTPStatus.MOVED_PERMANENTLY) - new_parts = (parts[0], parts[1], parts[2] + '/', - parts[3], parts[4]) - new_url = urllib.parse.urlunsplit(new_parts) - self.send_header("Location", new_url) - self.end_headers() - return None - for index in "index.html", "index.htm": - index = os.path.join(path, index) - if os.path.exists(index): - path = index - break - else: - return self.list_directory(path) - ctype = self.guess_type(path) - try: - f = open(path, 'rb') - except OSError: - self.send_error(HTTPStatus.NOT_FOUND, "File not found") - return None - try: - self.send_response(HTTPStatus.OK) - self.send_header("Content-type", ctype) - fs = os.fstat(f.fileno()) - self.send_header("Content-Length", str(fs[6])) - self.send_header("Last-Modified", self.date_time_string(fs.st_mtime)) - self.end_headers() - return f - except: - f.close() - raise - - def list_directory(self, path): - """Helper to produce a directory listing (absent index.html). - - Return value is either a file object, or None (indicating an - error). In either case, the headers are sent, making the - interface the same as for send_head(). - - """ - try: - list = os.listdir(path) - except OSError: - self.send_error( - HTTPStatus.NOT_FOUND, - "No permission to list directory") - return None - list.sort(key=lambda a: a.lower()) - r = [] - try: - displaypath = urllib.parse.unquote(self.path, - errors='surrogatepass') - except UnicodeDecodeError: - displaypath = urllib.parse.unquote(path) - displaypath = html.escape(displaypath, quote=False) - enc = sys.getfilesystemencoding() - title = 'Directory listing for %s' % displaypath - r.append('') - r.append('\n') - r.append('' % enc) - r.append('%s\n' % title) - r.append('\n

%s

' % title) - r.append('
\n
    ') - for name in list: - fullname = os.path.join(path, name) - displayname = linkname = name - # Append / for directories or @ for symbolic links - if os.path.isdir(fullname): - displayname = name + "/" - linkname = name + "/" - if os.path.islink(fullname): - displayname = name + "@" - # Note: a link to a directory displays with @ and links with / - r.append('
  • %s
  • ' - % (urllib.parse.quote(linkname, - errors='surrogatepass'), - html.escape(displayname, quote=False))) - r.append('
\n
\n\n\n') - encoded = '\n'.join(r).encode(enc, 'surrogateescape') - f = io.BytesIO() - f.write(encoded) - f.seek(0) - self.send_response(HTTPStatus.OK) - self.send_header("Content-type", "text/html; charset=%s" % enc) - self.send_header("Content-Length", str(len(encoded))) - self.end_headers() - return f - - def translate_path(self, path): - """Translate a /-separated PATH to the local filename syntax. - - Components that mean special things to the local file system - (e.g. drive or directory names) are ignored. (XXX They should - probably be diagnosed.) - - """ - # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] - # Don't forget explicit trailing slash when normalizing. Issue17324 - trailing_slash = path.rstrip().endswith('/') - try: - path = urllib.parse.unquote(path, errors='surrogatepass') - except UnicodeDecodeError: - path = urllib.parse.unquote(path) - path = posixpath.normpath(path) - words = path.split('/') - words = filter(None, words) - path = os.getcwd() - for word in words: - if os.path.dirname(word) or word in (os.curdir, os.pardir): - # Ignore components that are not a simple file/directory name - continue - path = os.path.join(path, word) - if trailing_slash: - path += '/' - return path - - def copyfile(self, source, outputfile): - """Copy all data between two file objects. - - The SOURCE argument is a file object open for reading - (or anything with a read() method) and the DESTINATION - argument is a file object open for writing (or - anything with a write() method). - - The only reason for overriding this would be to change - the block size or perhaps to replace newlines by CRLF - -- note however that this the default server uses this - to copy binary data as well. - - """ - shutil.copyfileobj(source, outputfile) - - def guess_type(self, path): - """Guess the type of a file. - - Argument is a PATH (a filename). - - Return value is a string of the form type/subtype, - usable for a MIME Content-type header. - - The default implementation looks the file's extension - up in the table self.extensions_map, using application/octet-stream - as a default; however it would be permissible (if - slow) to look inside the data to make a better guess. - - """ - - base, ext = posixpath.splitext(path) - if ext in self.extensions_map: - return self.extensions_map[ext] - ext = ext.lower() - if ext in self.extensions_map: - return self.extensions_map[ext] - else: - return self.extensions_map[''] - - if not mimetypes.inited: - mimetypes.init() # try to read system mime.types - extensions_map = mimetypes.types_map.copy() - extensions_map.update({ - '': 'application/octet-stream', # Default - '.py': 'text/plain', - '.c': 'text/plain', - '.h': 'text/plain', - }) - - -# Utilities for CGIHTTPRequestHandler - -def _url_collapse_path(path): - """ - Given a URL path, remove extra '/'s and '.' path elements and collapse - any '..' references and returns a collapsed path. - - Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. - The utility of this function is limited to is_cgi method and helps - preventing some security attacks. - - Returns: The reconstituted URL, which will always start with a '/'. - - Raises: IndexError if too many '..' occur within the path. - - """ - # Query component should not be involved. - path, _, query = path.partition('?') - path = urllib.parse.unquote(path) - - # Similar to os.path.split(os.path.normpath(path)) but specific to URL - # path semantics rather than local operating system semantics. - path_parts = path.split('/') - head_parts = [] - for part in path_parts[:-1]: - if part == '..': - head_parts.pop() # IndexError if more '..' than prior parts - elif part and part != '.': - head_parts.append( part ) - if path_parts: - tail_part = path_parts.pop() - if tail_part: - if tail_part == '..': - head_parts.pop() - tail_part = '' - elif tail_part == '.': - tail_part = '' - else: - tail_part = '' - - if query: - tail_part = '?'.join((tail_part, query)) - - splitpath = ('/' + '/'.join(head_parts), tail_part) - collapsed_path = "/".join(splitpath) - - return collapsed_path - - - -nobody = None - -def nobody_uid(): - """Internal routine to get nobody's uid""" - global nobody - if nobody: - return nobody - try: - import pwd - except ImportError: - return -1 - try: - nobody = pwd.getpwnam('nobody')[2] - except KeyError: - nobody = 1 + max(x[2] for x in pwd.getpwall()) - return nobody - - -def executable(path): - """Test for executable file.""" - return os.access(path, os.X_OK) - - -class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): - - """Complete HTTP server with GET, HEAD and POST commands. - - GET and HEAD also support running CGI scripts. - - The POST command is *only* implemented for CGI scripts. - - """ - - # Determine platform specifics - have_fork = hasattr(os, 'fork') - - # Make rfile unbuffered -- we need to read one line and then pass - # the rest to a subprocess, so we can't use buffered input. - rbufsize = 0 - - def do_POST(self): - """Serve a POST request. - - This is only implemented for CGI scripts. - - """ - - if self.is_cgi(): - self.run_cgi() - else: - self.send_error( - HTTPStatus.NOT_IMPLEMENTED, - "Can only POST to CGI scripts") - - def send_head(self): - """Version of send_head that support CGI scripts""" - if self.is_cgi(): - return self.run_cgi() - else: - return SimpleHTTPRequestHandler.send_head(self) - - def is_cgi(self): - """Test whether self.path corresponds to a CGI script. - - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. - - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. - - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). - - """ - collapsed_path = _url_collapse_path(self.path) - dir_sep = collapsed_path.find('/', 1) - head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] - if head in self.cgi_directories: - self.cgi_info = head, tail - return True - return False - - - cgi_directories = ['/cgi-bin', '/htbin'] - - def is_executable(self, path): - """Test whether argument path is an executable file.""" - return executable(path) - - def is_python(self, path): - """Test whether argument path is a Python script.""" - head, tail = os.path.splitext(path) - return tail.lower() in (".py", ".pyw") - - def run_cgi(self): - """Execute a CGI script.""" - dir, rest = self.cgi_info - path = dir + '/' + rest - i = path.find('/', len(dir)+1) - while i >= 0: - nextdir = path[:i] - nextrest = path[i+1:] - - scriptdir = self.translate_path(nextdir) - if os.path.isdir(scriptdir): - dir, rest = nextdir, nextrest - i = path.find('/', len(dir)+1) - else: - break - - # find an explicit query string, if present. - rest, _, query = rest.partition('?') - - # dissect the part after the directory name into a script name & - # a possible additional path, to be stored in PATH_INFO. - i = rest.find('/') - if i >= 0: - script, rest = rest[:i], rest[i:] - else: - script, rest = rest, '' - - scriptname = dir + '/' + script - scriptfile = self.translate_path(scriptname) - if not os.path.exists(scriptfile): - self.send_error( - HTTPStatus.NOT_FOUND, - "No such CGI script (%r)" % scriptname) - return - if not os.path.isfile(scriptfile): - self.send_error( - HTTPStatus.FORBIDDEN, - "CGI script is not a plain file (%r)" % scriptname) - return - ispy = self.is_python(scriptname) - if self.have_fork or not ispy: - if not self.is_executable(scriptfile): - self.send_error( - HTTPStatus.FORBIDDEN, - "CGI script is not executable (%r)" % scriptname) - return - - # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html - # XXX Much of the following could be prepared ahead of time! - env = copy.deepcopy(os.environ) - env['SERVER_SOFTWARE'] = self.version_string() - env['SERVER_NAME'] = self.server.server_name - env['GATEWAY_INTERFACE'] = 'CGI/1.1' - env['SERVER_PROTOCOL'] = self.protocol_version - env['SERVER_PORT'] = str(self.server.server_port) - env['REQUEST_METHOD'] = self.command - uqrest = urllib.parse.unquote(rest) - env['PATH_INFO'] = uqrest - env['PATH_TRANSLATED'] = self.translate_path(uqrest) - env['SCRIPT_NAME'] = scriptname - if query: - env['QUERY_STRING'] = query - env['REMOTE_ADDR'] = self.client_address[0] - authorization = self.headers.get("authorization") - if authorization: - authorization = authorization.split() - if len(authorization) == 2: - import base64, binascii - env['AUTH_TYPE'] = authorization[0] - if authorization[0].lower() == "basic": - try: - authorization = authorization[1].encode('ascii') - authorization = base64.decodebytes(authorization).\ - decode('ascii') - except (binascii.Error, UnicodeError): - pass - else: - authorization = authorization.split(':') - if len(authorization) == 2: - env['REMOTE_USER'] = authorization[0] - # XXX REMOTE_IDENT - if self.headers.get('content-type') is None: - env['CONTENT_TYPE'] = self.headers.get_content_type() - else: - env['CONTENT_TYPE'] = self.headers['content-type'] - length = self.headers.get('content-length') - if length: - env['CONTENT_LENGTH'] = length - referer = self.headers.get('referer') - if referer: - env['HTTP_REFERER'] = referer - accept = [] - for line in self.headers.getallmatchingheaders('accept'): - if line[:1] in "\t\n\r ": - accept.append(line.strip()) - else: - accept = accept + line[7:].split(',') - env['HTTP_ACCEPT'] = ','.join(accept) - ua = self.headers.get('user-agent') - if ua: - env['HTTP_USER_AGENT'] = ua - co = filter(None, self.headers.get_all('cookie', [])) - cookie_str = ', '.join(co) - if cookie_str: - env['HTTP_COOKIE'] = cookie_str - # XXX Other HTTP_* headers - # Since we're setting the env in the parent, provide empty - # values to override previously set values - for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', - 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): - env.setdefault(k, "") - - self.send_response(HTTPStatus.OK, "Script output follows") - self.flush_headers() - - decoded_query = query.replace('+', ' ') - - if self.have_fork: - # Unix -- fork as we should - args = [script] - if '=' not in decoded_query: - args.append(decoded_query) - nobody = nobody_uid() - self.wfile.flush() # Always flush before forking - pid = os.fork() - if pid != 0: - # Parent - pid, sts = os.waitpid(pid, 0) - # throw away additional data [see bug #427345] - while select.select([self.rfile], [], [], 0)[0]: - if not self.rfile.read(1): - break - if sts: - self.log_error("CGI script exit status %#x", sts) - return - # Child - try: - try: - os.setuid(nobody) - except OSError: - pass - os.dup2(self.rfile.fileno(), 0) - os.dup2(self.wfile.fileno(), 1) - os.execve(scriptfile, args, env) - except: - self.server.handle_error(self.request, self.client_address) - os._exit(127) - - else: - # Non-Unix -- use subprocess - cmdline = [scriptfile] - if self.is_python(scriptfile): - interp = sys.executable - if interp.lower().endswith("w.exe"): - # On Windows, use python.exe, not pythonw.exe - interp = interp[:-5] + interp[-4:] - cmdline = [interp, '-u'] + cmdline - if '=' not in query: - cmdline.append(query) - self.log_message("command: %s", subprocess.list2cmdline(cmdline)) - try: - nbytes = int(length) - except (TypeError, ValueError): - nbytes = 0 - p = subprocess.Popen(cmdline, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env = env - ) - if self.command.lower() == "post" and nbytes > 0: - data = self.rfile.read(nbytes) - else: - data = None - # throw away additional data [see bug #427345] - while select.select([self.rfile._sock], [], [], 0)[0]: - if not self.rfile._sock.recv(1): - break - stdout, stderr = p.communicate(data) - self.wfile.write(stdout) - if stderr: - self.log_error('%s', stderr) - p.stderr.close() - p.stdout.close() - status = p.returncode - if status: - self.log_error("CGI script exit status %#x", status) - else: - self.log_message("CGI script exited OK") - - -def test(HandlerClass=BaseHTTPRequestHandler, - ServerClass=HTTPServer, protocol="HTTP/1.0", port=8000, bind=""): - """Test the HTTP request handler class. - - This runs an HTTP server on port 8000 (or the port argument). - - """ - server_address = (bind, port) - - HandlerClass.protocol_version = protocol - with ServerClass(server_address, HandlerClass) as httpd: - sa = httpd.socket.getsockname() - serve_message = "Serving HTTP on {host} port {port} (http://{host}:{port}/) ..." - print(serve_message.format(host=sa[0], port=sa[1])) - try: - httpd.serve_forever() - except KeyboardInterrupt: - print("\nKeyboard interrupt received, exiting.") - sys.exit(0) - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--cgi', action='store_true', - help='Run as CGI Server') - parser.add_argument('--bind', '-b', default='', metavar='ADDRESS', - help='Specify alternate bind address ' - '[default: all interfaces]') - parser.add_argument('port', action='store', - default=8000, type=int, - nargs='?', - help='Specify alternate port [default: 8000]') - args = parser.parse_args() - if args.cgi: - handler_class = CGIHTTPRequestHandler - else: - handler_class = SimpleHTTPRequestHandler - test(HandlerClass=handler_class, port=args.port, bind=args.bind) diff --git a/venv/lib/python3.6/site-packages/eventlet/green/httplib.py b/venv/lib/python3.6/site-packages/eventlet/green/httplib.py deleted file mode 100644 index 6330efa..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/httplib.py +++ /dev/null @@ -1,22 +0,0 @@ -from eventlet import patcher -from eventlet.green import socket -import six - -to_patch = [('socket', socket)] - -try: - from eventlet.green import ssl - to_patch.append(('ssl', ssl)) -except ImportError: - pass - -if six.PY2: - patcher.inject('httplib', globals(), *to_patch) -if six.PY3: - from eventlet.green.http import client - for name in dir(client): - if name not in patcher.__exclude: - globals()[name] = getattr(client, name) - -if __name__ == '__main__': - test() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/os.py b/venv/lib/python3.6/site-packages/eventlet/green/os.py deleted file mode 100644 index 959d15f..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/os.py +++ /dev/null @@ -1,111 +0,0 @@ -os_orig = __import__("os") -import errno -socket = __import__("socket") - -from eventlet import greenio -from eventlet.support import get_errno -from eventlet import greenthread -from eventlet import hubs -from eventlet.patcher import slurp_properties - -__all__ = os_orig.__all__ -__patched__ = ['fdopen', 'read', 'write', 'wait', 'waitpid', 'open'] - -slurp_properties( - os_orig, - globals(), - ignore=__patched__, - srckeys=dir(os_orig)) - - -def fdopen(fd, *args, **kw): - """fdopen(fd [, mode='r' [, bufsize]]) -> file_object - - Return an open file object connected to a file descriptor.""" - if not isinstance(fd, int): - raise TypeError('fd should be int, not %r' % fd) - try: - return greenio.GreenPipe(fd, *args, **kw) - except IOError as e: - raise OSError(*e.args) - -__original_read__ = os_orig.read - - -def read(fd, n): - """read(fd, buffersize) -> string - - Read a file descriptor.""" - while True: - try: - return __original_read__(fd, n) - except (OSError, IOError) as e: - if get_errno(e) != errno.EAGAIN: - raise - except socket.error as e: - if get_errno(e) == errno.EPIPE: - return '' - raise - try: - hubs.trampoline(fd, read=True) - except hubs.IOClosed: - return '' - -__original_write__ = os_orig.write - - -def write(fd, st): - """write(fd, string) -> byteswritten - - Write a string to a file descriptor. - """ - while True: - try: - return __original_write__(fd, st) - except (OSError, IOError) as e: - if get_errno(e) != errno.EAGAIN: - raise - except socket.error as e: - if get_errno(e) != errno.EPIPE: - raise - hubs.trampoline(fd, write=True) - - -def wait(): - """wait() -> (pid, status) - - Wait for completion of a child process.""" - return waitpid(0, 0) - -__original_waitpid__ = os_orig.waitpid - - -def waitpid(pid, options): - """waitpid(...) - waitpid(pid, options) -> (pid, status) - - Wait for completion of a given child process.""" - if options & os_orig.WNOHANG != 0: - return __original_waitpid__(pid, options) - else: - new_options = options | os_orig.WNOHANG - while True: - rpid, status = __original_waitpid__(pid, new_options) - if rpid and status >= 0: - return rpid, status - greenthread.sleep(0.01) - -__original_open__ = os_orig.open - - -def open(file, flags, mode=0o777, dir_fd=None): - """ Wrap os.open - This behaves identically, but collaborates with - the hub's notify_opened protocol. - """ - if dir_fd is not None: - fd = __original_open__(file, flags, mode, dir_fd=dir_fd) - else: - fd = __original_open__(file, flags, mode) - hubs.notify_opened(fd) - return fd diff --git a/venv/lib/python3.6/site-packages/eventlet/green/profile.py b/venv/lib/python3.6/site-packages/eventlet/green/profile.py deleted file mode 100644 index 9dfd750..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/profile.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (c) 2010, CCP Games -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# * Neither the name of CCP Games nor the -# names of its contributors may be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY CCP GAMES ``AS IS'' AND ANY -# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL CCP GAMES BE LIABLE FOR ANY -# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""This module is API-equivalent to the standard library :mod:`profile` module -lbut it is greenthread-aware as well as thread-aware. Use this module -to profile Eventlet-based applications in preference to either :mod:`profile` or :mod:`cProfile`. -FIXME: No testcases for this module. -""" - -profile_orig = __import__('profile') -__all__ = profile_orig.__all__ - -from eventlet.patcher import slurp_properties -slurp_properties(profile_orig, globals(), srckeys=dir(profile_orig)) - -import sys -import functools - -from eventlet import greenthread -from eventlet import patcher -import six - -thread = patcher.original(six.moves._thread.__name__) # non-monkeypatched module needed - - -# This class provides the start() and stop() functions -class Profile(profile_orig.Profile): - base = profile_orig.Profile - - def __init__(self, timer=None, bias=None): - self.current_tasklet = greenthread.getcurrent() - self.thread_id = thread.get_ident() - self.base.__init__(self, timer, bias) - self.sleeping = {} - - def __call__(self, *args): - """make callable, allowing an instance to be the profiler""" - self.dispatcher(*args) - - def _setup(self): - self._has_setup = True - self.cur = None - self.timings = {} - self.current_tasklet = greenthread.getcurrent() - self.thread_id = thread.get_ident() - self.simulate_call("profiler") - - def start(self, name="start"): - if getattr(self, "running", False): - return - self._setup() - self.simulate_call("start") - self.running = True - sys.setprofile(self.dispatcher) - - def stop(self): - sys.setprofile(None) - self.running = False - self.TallyTimings() - - # special cases for the original run commands, makin sure to - # clear the timer context. - def runctx(self, cmd, globals, locals): - if not getattr(self, "_has_setup", False): - self._setup() - try: - return profile_orig.Profile.runctx(self, cmd, globals, locals) - finally: - self.TallyTimings() - - def runcall(self, func, *args, **kw): - if not getattr(self, "_has_setup", False): - self._setup() - try: - return profile_orig.Profile.runcall(self, func, *args, **kw) - finally: - self.TallyTimings() - - def trace_dispatch_return_extend_back(self, frame, t): - """A hack function to override error checking in parent class. It - allows invalid returns (where frames weren't preveiously entered into - the profiler) which can happen for all the tasklets that suddenly start - to get monitored. This means that the time will eventually be attributed - to a call high in the chain, when there is a tasklet switch - """ - if isinstance(self.cur[-2], Profile.fake_frame): - return False - self.trace_dispatch_call(frame, 0) - return self.trace_dispatch_return(frame, t) - - def trace_dispatch_c_return_extend_back(self, frame, t): - # same for c return - if isinstance(self.cur[-2], Profile.fake_frame): - return False # ignore bogus returns - self.trace_dispatch_c_call(frame, 0) - return self.trace_dispatch_return(frame, t) - - def SwitchTasklet(self, t0, t1, t): - # tally the time spent in the old tasklet - pt, it, et, fn, frame, rcur = self.cur - cur = (pt, it + t, et, fn, frame, rcur) - - # we are switching to a new tasklet, store the old - self.sleeping[t0] = cur, self.timings - self.current_tasklet = t1 - - # find the new one - try: - self.cur, self.timings = self.sleeping.pop(t1) - except KeyError: - self.cur, self.timings = None, {} - self.simulate_call("profiler") - self.simulate_call("new_tasklet") - - def TallyTimings(self): - oldtimings = self.sleeping - self.sleeping = {} - - # first, unwind the main "cur" - self.cur = self.Unwind(self.cur, self.timings) - - # we must keep the timings dicts separate for each tasklet, since it contains - # the 'ns' item, recursion count of each function in that tasklet. This is - # used in the Unwind dude. - for tasklet, (cur, timings) in six.iteritems(oldtimings): - self.Unwind(cur, timings) - - for k, v in six.iteritems(timings): - if k not in self.timings: - self.timings[k] = v - else: - # accumulate all to the self.timings - cc, ns, tt, ct, callers = self.timings[k] - # ns should be 0 after unwinding - cc += v[0] - tt += v[2] - ct += v[3] - for k1, v1 in six.iteritems(v[4]): - callers[k1] = callers.get(k1, 0) + v1 - self.timings[k] = cc, ns, tt, ct, callers - - def Unwind(self, cur, timings): - "A function to unwind a 'cur' frame and tally the results" - "see profile.trace_dispatch_return() for details" - # also see simulate_cmd_complete() - while(cur[-1]): - rpt, rit, ret, rfn, frame, rcur = cur - frame_total = rit + ret - - if rfn in timings: - cc, ns, tt, ct, callers = timings[rfn] - else: - cc, ns, tt, ct, callers = 0, 0, 0, 0, {} - - if not ns: - ct = ct + frame_total - cc = cc + 1 - - if rcur: - ppt, pit, pet, pfn, pframe, pcur = rcur - else: - pfn = None - - if pfn in callers: - callers[pfn] = callers[pfn] + 1 # hack: gather more - elif pfn: - callers[pfn] = 1 - - timings[rfn] = cc, ns - 1, tt + rit, ct, callers - - ppt, pit, pet, pfn, pframe, pcur = rcur - rcur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur - cur = rcur - return cur - - -def ContextWrap(f): - @functools.wraps(f) - def ContextWrapper(self, arg, t): - current = greenthread.getcurrent() - if current != self.current_tasklet: - self.SwitchTasklet(self.current_tasklet, current, t) - t = 0.0 # the time was billed to the previous tasklet - return f(self, arg, t) - return ContextWrapper - - -# Add "return safety" to the dispatchers -Profile.dispatch = dict(profile_orig.Profile.dispatch, **{ - 'return': Profile.trace_dispatch_return_extend_back, - 'c_return': Profile.trace_dispatch_c_return_extend_back, -}) -# Add automatic tasklet detection to the callbacks. -Profile.dispatch = dict((k, ContextWrap(v)) for k, v in six.viewitems(Profile.dispatch)) - - -# run statements shamelessly stolen from profile.py -def run(statement, filename=None, sort=-1): - """Run statement under profiler optionally saving results in filename - - This function takes a single argument that can be passed to the - "exec" statement, and an optional file name. In all cases this - routine attempts to "exec" its first argument and gather profiling - statistics from the execution. If no file name is present, then this - function automatically prints a simple profiling report, sorted by the - standard name string (file/line/function-name) that is presented in - each line. - """ - prof = Profile() - try: - prof = prof.run(statement) - except SystemExit: - pass - if filename is not None: - prof.dump_stats(filename) - else: - return prof.print_stats(sort) - - -def runctx(statement, globals, locals, filename=None): - """Run statement under profiler, supplying your own globals and locals, - optionally saving results in filename. - - statement and filename have the same semantics as profile.run - """ - prof = Profile() - try: - prof = prof.runctx(statement, globals, locals) - except SystemExit: - pass - - if filename is not None: - prof.dump_stats(filename) - else: - return prof.print_stats() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/select.py b/venv/lib/python3.6/site-packages/eventlet/green/select.py deleted file mode 100644 index e293f36..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/select.py +++ /dev/null @@ -1,86 +0,0 @@ -import eventlet -from eventlet.hubs import get_hub -import six -__select = eventlet.patcher.original('select') -error = __select.error - - -__patched__ = ['select'] -__deleted__ = ['devpoll', 'poll', 'epoll', 'kqueue', 'kevent'] - - -def get_fileno(obj): - # The purpose of this function is to exactly replicate - # the behavior of the select module when confronted with - # abnormal filenos; the details are extensively tested in - # the stdlib test/test_select.py. - try: - f = obj.fileno - except AttributeError: - if not isinstance(obj, six.integer_types): - raise TypeError("Expected int or long, got %s" % type(obj)) - return obj - else: - rv = f() - if not isinstance(rv, six.integer_types): - raise TypeError("Expected int or long, got %s" % type(rv)) - return rv - - -def select(read_list, write_list, error_list, timeout=None): - # error checking like this is required by the stdlib unit tests - if timeout is not None: - try: - timeout = float(timeout) - except ValueError: - raise TypeError("Expected number for timeout") - hub = get_hub() - timers = [] - current = eventlet.getcurrent() - assert hub.greenlet is not current, 'do not call blocking functions from the mainloop' - ds = {} - for r in read_list: - ds[get_fileno(r)] = {'read': r} - for w in write_list: - ds.setdefault(get_fileno(w), {})['write'] = w - for e in error_list: - ds.setdefault(get_fileno(e), {})['error'] = e - - listeners = [] - - def on_read(d): - original = ds[get_fileno(d)]['read'] - current.switch(([original], [], [])) - - def on_write(d): - original = ds[get_fileno(d)]['write'] - current.switch(([], [original], [])) - - def on_timeout2(): - current.switch(([], [], [])) - - def on_timeout(): - # ensure that BaseHub.run() has a chance to call self.wait() - # at least once before timed out. otherwise the following code - # can time out erroneously. - # - # s1, s2 = socket.socketpair() - # print(select.select([], [s1], [], 0)) - timers.append(hub.schedule_call_global(0, on_timeout2)) - - if timeout is not None: - timers.append(hub.schedule_call_global(timeout, on_timeout)) - try: - for k, v in six.iteritems(ds): - if v.get('read'): - listeners.append(hub.add(hub.READ, k, on_read, current.throw, lambda: None)) - if v.get('write'): - listeners.append(hub.add(hub.WRITE, k, on_write, current.throw, lambda: None)) - try: - return hub.switch() - finally: - for l in listeners: - hub.remove(l) - finally: - for t in timers: - t.cancel() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/selectors.py b/venv/lib/python3.6/site-packages/eventlet/green/selectors.py deleted file mode 100644 index 81fc862..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/selectors.py +++ /dev/null @@ -1,34 +0,0 @@ -import sys - -from eventlet import patcher -from eventlet.green import select - -__patched__ = [ - 'DefaultSelector', - 'SelectSelector', -] - -# We only have green select so the options are: -# * leave it be and have selectors that block -# * try to pretend the "bad" selectors don't exist -# * replace all with SelectSelector for the price of possibly different -# performance characteristic and missing fileno() method (if someone -# uses it it'll result in a crash, we may want to implement it in the future) -# -# This module used to follow the third approach but just removing the offending -# selectors is less error prone and less confusing approach. -__deleted__ = [ - 'PollSelector', - 'EpollSelector', - 'DevpollSelector', - 'KqueueSelector', -] - -patcher.inject('selectors', globals(), ('select', select)) - -del patcher - -if sys.platform != 'win32': - SelectSelector._select = staticmethod(select.select) - -DefaultSelector = SelectSelector diff --git a/venv/lib/python3.6/site-packages/eventlet/green/socket.py b/venv/lib/python3.6/site-packages/eventlet/green/socket.py deleted file mode 100644 index 6a39caf..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/socket.py +++ /dev/null @@ -1,63 +0,0 @@ -import os -import sys - -__import__('eventlet.green._socket_nodns') -__socket = sys.modules['eventlet.green._socket_nodns'] - -__all__ = __socket.__all__ -__patched__ = __socket.__patched__ + [ - 'create_connection', - 'getaddrinfo', - 'gethostbyname', - 'gethostbyname_ex', - 'getnameinfo', -] - -from eventlet.patcher import slurp_properties -slurp_properties(__socket, globals(), srckeys=dir(__socket)) - - -if os.environ.get("EVENTLET_NO_GREENDNS", '').lower() != 'yes': - from eventlet.support import greendns - gethostbyname = greendns.gethostbyname - getaddrinfo = greendns.getaddrinfo - gethostbyname_ex = greendns.gethostbyname_ex - getnameinfo = greendns.getnameinfo - del greendns - - -def create_connection(address, - timeout=_GLOBAL_DEFAULT_TIMEOUT, - source_address=None): - """Connect to *address* and return the socket object. - - Convenience function. Connect to *address* (a 2-tuple ``(host, - port)``) and return the socket object. Passing the optional - *timeout* parameter will set the timeout on the socket instance - before attempting to connect. If no *timeout* is supplied, the - global default timeout setting returned by :func:`getdefaulttimeout` - is used. - """ - - err = "getaddrinfo returns an empty list" - host, port = address - for res in getaddrinfo(host, port, 0, SOCK_STREAM): - af, socktype, proto, canonname, sa = res - sock = None - try: - sock = socket(af, socktype, proto) - if timeout is not _GLOBAL_DEFAULT_TIMEOUT: - sock.settimeout(timeout) - if source_address: - sock.bind(source_address) - sock.connect(sa) - return sock - - except error as e: - err = e - if sock is not None: - sock.close() - - if not isinstance(err, error): - err = error(err) - raise err diff --git a/venv/lib/python3.6/site-packages/eventlet/green/ssl.py b/venv/lib/python3.6/site-packages/eventlet/green/ssl.py deleted file mode 100644 index 9504aef..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/ssl.py +++ /dev/null @@ -1,475 +0,0 @@ -__ssl = __import__('ssl') - -from eventlet.patcher import slurp_properties -slurp_properties(__ssl, globals(), srckeys=dir(__ssl)) - -import sys -from eventlet import greenio, hubs -from eventlet.greenio import ( - set_nonblocking, GreenSocket, CONNECT_ERR, CONNECT_SUCCESS, -) -from eventlet.hubs import trampoline, IOClosed -from eventlet.support import get_errno, PY33 -import six -from contextlib import contextmanager - -orig_socket = __import__('socket') -socket = orig_socket.socket -timeout_exc = SSLError - -__patched__ = [ - 'SSLSocket', 'SSLContext', 'wrap_socket', 'sslwrap_simple', - 'create_default_context', '_create_default_https_context'] - -_original_sslsocket = __ssl.SSLSocket -_original_wrap_socket = __ssl.wrap_socket -_original_sslcontext = getattr(__ssl, 'SSLContext', None) -_is_under_py_3_7 = sys.version_info < (3, 7) - - -@contextmanager -def _original_ssl_context(*args, **kwargs): - tmp_sslcontext = _original_wrap_socket.__globals__.get('SSLContext', None) - tmp_sslsocket = _original_sslsocket._create.__globals__.get('SSLSocket', None) - _original_sslsocket._create.__globals__['SSLSocket'] = _original_sslsocket - _original_wrap_socket.__globals__['SSLContext'] = _original_sslcontext - try: - yield - finally: - _original_wrap_socket.__globals__['SSLContext'] = tmp_sslcontext - _original_sslsocket._create.__globals__['SSLSocket'] = tmp_sslsocket - - -class GreenSSLSocket(_original_sslsocket): - """ This is a green version of the SSLSocket class from the ssl module added - in 2.6. For documentation on it, please see the Python standard - documentation. - - Python nonblocking ssl objects don't give errors when the other end - of the socket is closed (they do notice when the other end is shutdown, - though). Any write/read operations will simply hang if the socket is - closed from the other end. There is no obvious fix for this problem; - it appears to be a limitation of Python's ssl object implementation. - A workaround is to set a reasonable timeout on the socket using - settimeout(), and to close/reopen the connection when a timeout - occurs at an unexpected juncture in the code. - """ - def __new__(cls, sock=None, keyfile=None, certfile=None, - server_side=False, cert_reqs=CERT_NONE, - ssl_version=PROTOCOL_SSLv23, ca_certs=None, - do_handshake_on_connect=True, *args, **kw): - if _is_under_py_3_7: - return super(GreenSSLSocket, cls).__new__(cls) - else: - if not isinstance(sock, GreenSocket): - sock = GreenSocket(sock) - with _original_ssl_context(): - ret = _original_wrap_socket( - sock=sock.fd, - keyfile=keyfile, - certfile=certfile, - server_side=server_side, - cert_reqs=cert_reqs, - ssl_version=ssl_version, - ca_certs=ca_certs, - do_handshake_on_connect=False, - *args, **kw - ) - ret.keyfile = keyfile - ret.certfile = certfile - ret.cert_reqs = cert_reqs - ret.ssl_version = ssl_version - ret.ca_certs = ca_certs - ret.__class__ = GreenSSLSocket - return ret - - # we are inheriting from SSLSocket because its constructor calls - # do_handshake whose behavior we wish to override - def __init__(self, sock, keyfile=None, certfile=None, - server_side=False, cert_reqs=CERT_NONE, - ssl_version=PROTOCOL_SSLv23, ca_certs=None, - do_handshake_on_connect=True, *args, **kw): - if not isinstance(sock, GreenSocket): - sock = GreenSocket(sock) - self.act_non_blocking = sock.act_non_blocking - - if six.PY2: - # On Python 2 SSLSocket constructor queries the timeout, it'd break without - # this assignment - self._timeout = sock.gettimeout() - - if _is_under_py_3_7: - # nonblocking socket handshaking on connect got disabled so let's pretend it's disabled - # even when it's on - super(GreenSSLSocket, self).__init__( - sock.fd, keyfile, certfile, server_side, cert_reqs, ssl_version, - ca_certs, do_handshake_on_connect and six.PY2, *args, **kw) - # the superclass initializer trashes the methods so we remove - # the local-object versions of them and let the actual class - # methods shine through - # Note: This for Python 2 - try: - for fn in orig_socket._delegate_methods: - delattr(self, fn) - except AttributeError: - pass - - if six.PY3: - # Python 3 SSLSocket construction process overwrites the timeout so restore it - self._timeout = sock.gettimeout() - - # it also sets timeout to None internally apparently (tested with 3.4.2) - _original_sslsocket.settimeout(self, 0.0) - assert _original_sslsocket.gettimeout(self) == 0.0 - - # see note above about handshaking - self.do_handshake_on_connect = do_handshake_on_connect - if do_handshake_on_connect and self._connected: - self.do_handshake() - - def settimeout(self, timeout): - self._timeout = timeout - - def gettimeout(self): - return self._timeout - - def setblocking(self, flag): - if flag: - self.act_non_blocking = False - self._timeout = None - else: - self.act_non_blocking = True - self._timeout = 0.0 - - def _call_trampolining(self, func, *a, **kw): - if self.act_non_blocking: - return func(*a, **kw) - else: - while True: - try: - return func(*a, **kw) - except SSLError as exc: - if get_errno(exc) == SSL_ERROR_WANT_READ: - trampoline(self, - read=True, - timeout=self.gettimeout(), - timeout_exc=timeout_exc('timed out')) - elif get_errno(exc) == SSL_ERROR_WANT_WRITE: - trampoline(self, - write=True, - timeout=self.gettimeout(), - timeout_exc=timeout_exc('timed out')) - else: - raise - - def write(self, data): - """Write DATA to the underlying SSL channel. Returns - number of bytes of DATA actually transmitted.""" - return self._call_trampolining( - super(GreenSSLSocket, self).write, data) - - def read(self, *args, **kwargs): - """Read up to LEN bytes and return them. - Return zero-length string on EOF.""" - try: - return self._call_trampolining( - super(GreenSSLSocket, self).read, *args, **kwargs) - except IOClosed: - return b'' - - def send(self, data, flags=0): - if self._sslobj: - return self._call_trampolining( - super(GreenSSLSocket, self).send, data, flags) - else: - trampoline(self, write=True, timeout_exc=timeout_exc('timed out')) - return socket.send(self, data, flags) - - def sendto(self, data, addr, flags=0): - # *NOTE: gross, copied code from ssl.py becase it's not factored well enough to be used as-is - if self._sslobj: - raise ValueError("sendto not allowed on instances of %s" % - self.__class__) - else: - trampoline(self, write=True, timeout_exc=timeout_exc('timed out')) - return socket.sendto(self, data, addr, flags) - - def sendall(self, data, flags=0): - # *NOTE: gross, copied code from ssl.py becase it's not factored well enough to be used as-is - if self._sslobj: - if flags != 0: - raise ValueError( - "non-zero flags not allowed in calls to sendall() on %s" % - self.__class__) - amount = len(data) - count = 0 - data_to_send = data - while (count < amount): - v = self.send(data_to_send) - count += v - if v == 0: - trampoline(self, write=True, timeout_exc=timeout_exc('timed out')) - else: - data_to_send = data[count:] - return amount - else: - while True: - try: - return socket.sendall(self, data, flags) - except orig_socket.error as e: - if self.act_non_blocking: - raise - erno = get_errno(e) - if erno in greenio.SOCKET_BLOCKING: - trampoline(self, write=True, - timeout=self.gettimeout(), timeout_exc=timeout_exc('timed out')) - elif erno in greenio.SOCKET_CLOSED: - return '' - raise - - def recv(self, buflen=1024, flags=0): - return self._base_recv(buflen, flags, into=False) - - def recv_into(self, buffer, nbytes=None, flags=0): - # Copied verbatim from CPython - if buffer and nbytes is None: - nbytes = len(buffer) - elif nbytes is None: - nbytes = 1024 - # end of CPython code - - return self._base_recv(nbytes, flags, into=True, buffer_=buffer) - - def _base_recv(self, nbytes, flags, into, buffer_=None): - if into: - plain_socket_function = socket.recv_into - else: - plain_socket_function = socket.recv - - # *NOTE: gross, copied code from ssl.py becase it's not factored well enough to be used as-is - if self._sslobj: - if flags != 0: - raise ValueError( - "non-zero flags not allowed in calls to %s() on %s" % - plain_socket_function.__name__, self.__class__) - if into: - read = self.read(nbytes, buffer_) - else: - read = self.read(nbytes) - return read - else: - while True: - try: - args = [self, nbytes, flags] - if into: - args.insert(1, buffer_) - return plain_socket_function(*args) - except orig_socket.error as e: - if self.act_non_blocking: - raise - erno = get_errno(e) - if erno in greenio.SOCKET_BLOCKING: - try: - trampoline( - self, read=True, - timeout=self.gettimeout(), timeout_exc=timeout_exc('timed out')) - except IOClosed: - return b'' - elif erno in greenio.SOCKET_CLOSED: - return b'' - raise - - def recvfrom(self, addr, buflen=1024, flags=0): - if not self.act_non_blocking: - trampoline(self, read=True, timeout=self.gettimeout(), - timeout_exc=timeout_exc('timed out')) - return super(GreenSSLSocket, self).recvfrom(addr, buflen, flags) - - def recvfrom_into(self, buffer, nbytes=None, flags=0): - if not self.act_non_blocking: - trampoline(self, read=True, timeout=self.gettimeout(), - timeout_exc=timeout_exc('timed out')) - return super(GreenSSLSocket, self).recvfrom_into(buffer, nbytes, flags) - - def unwrap(self): - return GreenSocket(self._call_trampolining( - super(GreenSSLSocket, self).unwrap)) - - def do_handshake(self): - """Perform a TLS/SSL handshake.""" - return self._call_trampolining( - super(GreenSSLSocket, self).do_handshake) - - def _socket_connect(self, addr): - real_connect = socket.connect - if self.act_non_blocking: - return real_connect(self, addr) - else: - clock = hubs.get_hub().clock - # *NOTE: gross, copied code from greenio because it's not factored - # well enough to reuse - if self.gettimeout() is None: - while True: - try: - return real_connect(self, addr) - except orig_socket.error as exc: - if get_errno(exc) in CONNECT_ERR: - trampoline(self, write=True) - elif get_errno(exc) in CONNECT_SUCCESS: - return - else: - raise - else: - end = clock() + self.gettimeout() - while True: - try: - real_connect(self, addr) - except orig_socket.error as exc: - if get_errno(exc) in CONNECT_ERR: - trampoline( - self, write=True, - timeout=end - clock(), timeout_exc=timeout_exc('timed out')) - elif get_errno(exc) in CONNECT_SUCCESS: - return - else: - raise - if clock() >= end: - raise timeout_exc('timed out') - - def connect(self, addr): - """Connects to remote ADDR, and then wraps the connection in - an SSL channel.""" - # *NOTE: grrrrr copied this code from ssl.py because of the reference - # to socket.connect which we don't want to call directly - if self._sslobj: - raise ValueError("attempt to connect already-connected SSLSocket!") - self._socket_connect(addr) - server_side = False - try: - sslwrap = _ssl.sslwrap - except AttributeError: - # sslwrap was removed in 3.x and later in 2.7.9 - if six.PY2: - sslobj = self._context._wrap_socket(self._sock, server_side, ssl_sock=self) - else: - context = self.context if PY33 else self._context - sslobj = context._wrap_socket(self, server_side) - else: - sslobj = sslwrap(self._sock, server_side, self.keyfile, self.certfile, - self.cert_reqs, self.ssl_version, - self.ca_certs, *self.ciphers) - - try: - # This is added in Python 3.5, http://bugs.python.org/issue21965 - SSLObject - except NameError: - self._sslobj = sslobj - else: - if _is_under_py_3_7: - self._sslobj = SSLObject(sslobj, owner=self) - else: - self._sslobj = sslobj - - if self.do_handshake_on_connect: - self.do_handshake() - - def accept(self): - """Accepts a new connection from a remote client, and returns - a tuple containing that new connection wrapped with a server-side - SSL channel, and the address of the remote client.""" - # RDW grr duplication of code from greenio - if self.act_non_blocking: - newsock, addr = socket.accept(self) - else: - while True: - try: - newsock, addr = socket.accept(self) - set_nonblocking(newsock) - break - except orig_socket.error as e: - if get_errno(e) not in greenio.SOCKET_BLOCKING: - raise - trampoline(self, read=True, timeout=self.gettimeout(), - timeout_exc=timeout_exc('timed out')) - - new_ssl = type(self)( - newsock, - keyfile=self.keyfile, - certfile=self.certfile, - server_side=True, - cert_reqs=self.cert_reqs, - ssl_version=self.ssl_version, - ca_certs=self.ca_certs, - do_handshake_on_connect=False, - suppress_ragged_eofs=self.suppress_ragged_eofs) - return (new_ssl, addr) - - def dup(self): - raise NotImplementedError("Can't dup an ssl object") - - -SSLSocket = GreenSSLSocket - - -def wrap_socket(sock, *a, **kw): - return GreenSSLSocket(sock, *a, **kw) - - -if hasattr(__ssl, 'sslwrap_simple'): - def sslwrap_simple(sock, keyfile=None, certfile=None): - """A replacement for the old socket.ssl function. Designed - for compatibility with Python 2.5 and earlier. Will disappear in - Python 3.0.""" - ssl_sock = GreenSSLSocket(sock, keyfile=keyfile, certfile=certfile, - server_side=False, - cert_reqs=CERT_NONE, - ssl_version=PROTOCOL_SSLv23, - ca_certs=None) - return ssl_sock - - -if hasattr(__ssl, 'SSLContext'): - _original_sslcontext = __ssl.SSLContext - - class GreenSSLContext(_original_sslcontext): - __slots__ = () - - def wrap_socket(self, sock, *a, **kw): - return GreenSSLSocket(sock, *a, _context=self, **kw) - - # https://github.com/eventlet/eventlet/issues/371 - # Thanks to Gevent developers for sharing patch to this problem. - if hasattr(_original_sslcontext.options, 'setter'): - # In 3.6, these became properties. They want to access the - # property __set__ method in the superclass, and they do so by using - # super(SSLContext, SSLContext). But we rebind SSLContext when we monkey - # patch, which causes infinite recursion. - # https://github.com/python/cpython/commit/328067c468f82e4ec1b5c510a4e84509e010f296 - @_original_sslcontext.options.setter - def options(self, value): - super(_original_sslcontext, _original_sslcontext).options.__set__(self, value) - - @_original_sslcontext.verify_flags.setter - def verify_flags(self, value): - super(_original_sslcontext, _original_sslcontext).verify_flags.__set__(self, value) - - @_original_sslcontext.verify_mode.setter - def verify_mode(self, value): - super(_original_sslcontext, _original_sslcontext).verify_mode.__set__(self, value) - - SSLContext = GreenSSLContext - - if hasattr(__ssl, 'create_default_context'): - _original_create_default_context = __ssl.create_default_context - - def green_create_default_context(*a, **kw): - # We can't just monkey-patch on the green version of `wrap_socket` - # on to SSLContext instances, but SSLContext.create_default_context - # does a bunch of work. Rather than re-implementing it all, just - # switch out the __class__ to get our `wrap_socket` implementation - context = _original_create_default_context(*a, **kw) - context.__class__ = GreenSSLContext - return context - - create_default_context = green_create_default_context - _create_default_https_context = green_create_default_context diff --git a/venv/lib/python3.6/site-packages/eventlet/green/subprocess.py b/venv/lib/python3.6/site-packages/eventlet/green/subprocess.py deleted file mode 100644 index f021ced..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/subprocess.py +++ /dev/null @@ -1,143 +0,0 @@ -import errno -import sys -from types import FunctionType - -import eventlet -from eventlet import greenio -from eventlet import patcher -from eventlet.green import select, threading, time -import six - - -__patched__ = ['call', 'check_call', 'Popen'] -to_patch = [('select', select), ('threading', threading), ('time', time)] - -if sys.version_info > (3, 4): - from eventlet.green import selectors - to_patch.append(('selectors', selectors)) - -patcher.inject('subprocess', globals(), *to_patch) -subprocess_orig = patcher.original("subprocess") -subprocess_imported = sys.modules.get('subprocess', subprocess_orig) -mswindows = sys.platform == "win32" - - -if getattr(subprocess_orig, 'TimeoutExpired', None) is None: - # Backported from Python 3.3. - # https://bitbucket.org/eventlet/eventlet/issue/89 - class TimeoutExpired(Exception): - """This exception is raised when the timeout expires while waiting for - a child process. - """ - - def __init__(self, cmd, timeout, output=None): - self.cmd = cmd - self.timeout = timeout - self.output = output - - def __str__(self): - return ("Command '%s' timed out after %s seconds" % - (self.cmd, self.timeout)) -else: - TimeoutExpired = subprocess_imported.TimeoutExpired - - -# This is the meat of this module, the green version of Popen. -class Popen(subprocess_orig.Popen): - """eventlet-friendly version of subprocess.Popen""" - # We do not believe that Windows pipes support non-blocking I/O. At least, - # the Python file objects stored on our base-class object have no - # setblocking() method, and the Python fcntl module doesn't exist on - # Windows. (see eventlet.greenio.set_nonblocking()) As the sole purpose of - # this __init__() override is to wrap the pipes for eventlet-friendly - # non-blocking I/O, don't even bother overriding it on Windows. - if not mswindows: - def __init__(self, args, bufsize=0, *argss, **kwds): - self.args = args - # Forward the call to base-class constructor - subprocess_orig.Popen.__init__(self, args, 0, *argss, **kwds) - # Now wrap the pipes, if any. This logic is loosely borrowed from - # eventlet.processes.Process.run() method. - for attr in "stdin", "stdout", "stderr": - pipe = getattr(self, attr) - if pipe is not None and type(pipe) != greenio.GreenPipe: - # https://github.com/eventlet/eventlet/issues/243 - # AttributeError: '_io.TextIOWrapper' object has no attribute 'mode' - mode = getattr(pipe, 'mode', '') - if not mode: - if pipe.readable(): - mode += 'r' - if pipe.writable(): - mode += 'w' - # ValueError: can't have unbuffered text I/O - if bufsize == 0: - bufsize = -1 - wrapped_pipe = greenio.GreenPipe(pipe, mode, bufsize) - setattr(self, attr, wrapped_pipe) - __init__.__doc__ = subprocess_orig.Popen.__init__.__doc__ - - def wait(self, timeout=None, check_interval=0.01): - # Instead of a blocking OS call, this version of wait() uses logic - # borrowed from the eventlet 0.2 processes.Process.wait() method. - if timeout is not None: - endtime = time.time() + timeout - try: - while True: - status = self.poll() - if status is not None: - return status - if timeout is not None and time.time() > endtime: - raise TimeoutExpired(self.args, timeout) - eventlet.sleep(check_interval) - except OSError as e: - if e.errno == errno.ECHILD: - # no child process, this happens if the child process - # already died and has been cleaned up - return -1 - else: - raise - wait.__doc__ = subprocess_orig.Popen.wait.__doc__ - - if not mswindows: - # don't want to rewrite the original _communicate() method, we - # just want a version that uses eventlet.green.select.select() - # instead of select.select(). - _communicate = FunctionType( - six.get_function_code(six.get_unbound_function( - subprocess_orig.Popen._communicate)), - globals()) - try: - _communicate_with_select = FunctionType( - six.get_function_code(six.get_unbound_function( - subprocess_orig.Popen._communicate_with_select)), - globals()) - _communicate_with_poll = FunctionType( - six.get_function_code(six.get_unbound_function( - subprocess_orig.Popen._communicate_with_poll)), - globals()) - except AttributeError: - pass - - -# Borrow subprocess.call() and check_call(), but patch them so they reference -# OUR Popen class rather than subprocess.Popen. -def patched_function(function): - new_function = FunctionType(six.get_function_code(function), globals()) - if six.PY3: - new_function.__kwdefaults__ = function.__kwdefaults__ - new_function.__defaults__ = function.__defaults__ - return new_function - - -call = patched_function(subprocess_orig.call) -check_call = patched_function(subprocess_orig.check_call) -# check_output is Python 2.7+ -if hasattr(subprocess_orig, 'check_output'): - __patched__.append('check_output') - check_output = patched_function(subprocess_orig.check_output) -del patched_function - -# Keep exceptions identity. -# https://github.com/eventlet/eventlet/issues/413 -CalledProcessError = subprocess_imported.CalledProcessError -del subprocess_imported diff --git a/venv/lib/python3.6/site-packages/eventlet/green/thread.py b/venv/lib/python3.6/site-packages/eventlet/green/thread.py deleted file mode 100644 index 5800b30..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/thread.py +++ /dev/null @@ -1,114 +0,0 @@ -"""Implements the standard thread module, using greenthreads.""" -from six.moves import _thread as __thread -import six -from eventlet.support import greenlets as greenlet -from eventlet import greenthread -from eventlet.semaphore import Semaphore as LockType -import sys - - -__patched__ = ['get_ident', 'start_new_thread', 'start_new', 'allocate_lock', - 'allocate', 'exit', 'interrupt_main', 'stack_size', '_local', - 'LockType', '_count'] - -error = __thread.error -__threadcount = 0 - - -if six.PY3: - def _set_sentinel(): - # TODO this is a dummy code, reimplementing this may be needed: - # https://hg.python.org/cpython/file/b5e9bc4352e1/Modules/_threadmodule.c#l1203 - return allocate_lock() - - TIMEOUT_MAX = __thread.TIMEOUT_MAX - - -def _count(): - return __threadcount - - -def get_ident(gr=None): - if gr is None: - return id(greenlet.getcurrent()) - else: - return id(gr) - - -def __thread_body(func, args, kwargs): - global __threadcount - __threadcount += 1 - try: - func(*args, **kwargs) - finally: - __threadcount -= 1 - - -def start_new_thread(function, args=(), kwargs=None): - if (sys.version_info >= (3, 4) - and getattr(function, '__module__', '') == 'threading' - and hasattr(function, '__self__')): - # Since Python 3.4, threading.Thread uses an internal lock - # automatically released when the python thread state is deleted. - # With monkey patching, eventlet uses green threads without python - # thread state, so the lock is not automatically released. - # - # Wrap _bootstrap_inner() to release explicitly the thread state lock - # when the thread completes. - thread = function.__self__ - bootstrap_inner = thread._bootstrap_inner - - def wrap_bootstrap_inner(): - try: - bootstrap_inner() - finally: - # The lock can be cleared (ex: by a fork()) - if thread._tstate_lock is not None: - thread._tstate_lock.release() - - thread._bootstrap_inner = wrap_bootstrap_inner - - kwargs = kwargs or {} - g = greenthread.spawn_n(__thread_body, function, args, kwargs) - return get_ident(g) - - -start_new = start_new_thread - - -def allocate_lock(*a): - return LockType(1) - - -allocate = allocate_lock - - -def exit(): - raise greenlet.GreenletExit - - -exit_thread = __thread.exit_thread - - -def interrupt_main(): - curr = greenlet.getcurrent() - if curr.parent and not curr.parent.dead: - curr.parent.throw(KeyboardInterrupt()) - else: - raise KeyboardInterrupt() - - -if hasattr(__thread, 'stack_size'): - __original_stack_size__ = __thread.stack_size - - def stack_size(size=None): - if size is None: - return __original_stack_size__() - if size > __original_stack_size__(): - return __original_stack_size__(size) - else: - pass - # not going to decrease stack_size, because otherwise other greenlets in - # this thread will suffer - -from eventlet.corolocal import local as _local diff --git a/venv/lib/python3.6/site-packages/eventlet/green/threading.py b/venv/lib/python3.6/site-packages/eventlet/green/threading.py deleted file mode 100644 index fce9bca..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/threading.py +++ /dev/null @@ -1,134 +0,0 @@ -"""Implements the standard threading module, using greenthreads.""" -import eventlet -from eventlet.green import thread -from eventlet.green import time -from eventlet.support import greenlets as greenlet -import six - -__patched__ = ['_start_new_thread', '_allocate_lock', - '_sleep', 'local', 'stack_size', 'Lock', 'currentThread', - 'current_thread', '_after_fork', '_shutdown'] - -if six.PY2: - __patched__ += ['_get_ident'] -else: - __patched__ += ['get_ident', '_set_sentinel'] - -__orig_threading = eventlet.patcher.original('threading') -__threadlocal = __orig_threading.local() -__patched_enumerate = None - - -eventlet.patcher.inject( - 'threading', - globals(), - ('thread' if six.PY2 else '_thread', thread), - ('time', time)) - - -_count = 1 - - -class _GreenThread(object): - """Wrapper for GreenThread objects to provide Thread-like attributes - and methods""" - - def __init__(self, g): - global _count - self._g = g - self._name = 'GreenThread-%d' % _count - _count += 1 - - def __repr__(self): - return '<_GreenThread(%s, %r)>' % (self._name, self._g) - - def join(self, timeout=None): - return self._g.wait() - - def getName(self): - return self._name - get_name = getName - - def setName(self, name): - self._name = str(name) - set_name = setName - - name = property(getName, setName) - - ident = property(lambda self: id(self._g)) - - def isAlive(self): - return True - is_alive = isAlive - - daemon = property(lambda self: True) - - def isDaemon(self): - return self.daemon - is_daemon = isDaemon - - -__threading = None - - -def _fixup_thread(t): - # Some third-party packages (lockfile) will try to patch the - # threading.Thread class with a get_name attribute if it doesn't - # exist. Since we might return Thread objects from the original - # threading package that won't get patched, let's make sure each - # individual object gets patched too our patched threading.Thread - # class has been patched. This is why monkey patching can be bad... - global __threading - if not __threading: - __threading = __import__('threading') - - if (hasattr(__threading.Thread, 'get_name') and - not hasattr(t, 'get_name')): - t.get_name = t.getName - return t - - -def current_thread(): - global __patched_enumerate - g = greenlet.getcurrent() - if not g: - # Not currently in a greenthread, fall back to standard function - return _fixup_thread(__orig_threading.current_thread()) - - try: - active = __threadlocal.active - except AttributeError: - active = __threadlocal.active = {} - - g_id = id(g) - t = active.get(g_id) - if t is not None: - return t - - # FIXME: move import from function body to top - # (jaketesler@github) Furthermore, I was unable to have the current_thread() return correct results from - # threading.enumerate() unless the enumerate() function was a) imported at runtime using the gross __import__() call - # and b) was hot-patched using patch_function(). - # https://github.com/eventlet/eventlet/issues/172#issuecomment-379421165 - if __patched_enumerate is None: - __patched_enumerate = eventlet.patcher.patch_function(__import__('threading').enumerate) - found = [th for th in __patched_enumerate() if th.ident == g_id] - if found: - return found[0] - - # Add green thread to active if we can clean it up on exit - def cleanup(g): - del active[g_id] - try: - g.link(cleanup) - except AttributeError: - # Not a GreenThread type, so there's no way to hook into - # the green thread exiting. Fall back to the standard - # function then. - t = _fixup_thread(__orig_threading.current_thread()) - else: - t = active[g_id] = _GreenThread(g) - - return t - -currentThread = current_thread diff --git a/venv/lib/python3.6/site-packages/eventlet/green/time.py b/venv/lib/python3.6/site-packages/eventlet/green/time.py deleted file mode 100644 index 0fbe30e..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/time.py +++ /dev/null @@ -1,6 +0,0 @@ -__time = __import__('time') -from eventlet.patcher import slurp_properties -__patched__ = ['sleep'] -slurp_properties(__time, globals(), ignore=__patched__, srckeys=dir(__time)) -from eventlet.greenthread import sleep -sleep # silence pyflakes diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib/__init__.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib/__init__.py deleted file mode 100644 index bcfc349..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -from eventlet import patcher -from eventlet.green import socket -from eventlet.green import time -from eventlet.green import httplib -from eventlet.green import ftplib -import six - -if six.PY2: - to_patch = [('socket', socket), ('httplib', httplib), - ('time', time), ('ftplib', ftplib)] - try: - from eventlet.green import ssl - to_patch.append(('ssl', ssl)) - except ImportError: - pass - - patcher.inject('urllib', globals(), *to_patch) - try: - URLopener - except NameError: - patcher.inject('urllib.request', globals(), *to_patch) - - - # patch a bunch of things that have imports inside the - # function body; this is lame and hacky but I don't feel - # too bad because urllib is a hacky pile of junk that no - # one should be using anyhow - URLopener.open_http = patcher.patch_function(URLopener.open_http, ('httplib', httplib)) - if hasattr(URLopener, 'open_https'): - URLopener.open_https = patcher.patch_function(URLopener.open_https, ('httplib', httplib)) - - URLopener.open_ftp = patcher.patch_function(URLopener.open_ftp, ('ftplib', ftplib)) - ftpwrapper.init = patcher.patch_function(ftpwrapper.init, ('ftplib', ftplib)) - ftpwrapper.retrfile = patcher.patch_function(ftpwrapper.retrfile, ('ftplib', ftplib)) - - del patcher - - # Run test program when run as a script - if __name__ == '__main__': - main() diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib/error.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib/error.py deleted file mode 100644 index 6913813..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib/error.py +++ /dev/null @@ -1,4 +0,0 @@ -from eventlet import patcher -from eventlet.green.urllib import response -patcher.inject('urllib.error', globals(), ('urllib.response', response)) -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib/parse.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib/parse.py deleted file mode 100644 index f3a8924..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib/parse.py +++ /dev/null @@ -1,3 +0,0 @@ -from eventlet import patcher -patcher.inject('urllib.parse', globals()) -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib/request.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib/request.py deleted file mode 100644 index dca7863..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib/request.py +++ /dev/null @@ -1,50 +0,0 @@ -from eventlet import patcher -from eventlet.green import ftplib, http, os, socket, time -from eventlet.green.http import client as http_client -from eventlet.green.urllib import error, parse, response - -# TODO should we also have green email version? -# import email - - -to_patch = [ - # This (http module) is needed here, otherwise test__greenness hangs - # forever on Python 3 because parts of non-green http (including - # http.client) leak into our patched urllib.request. There may be a nicer - # way to handle this (I didn't dig too deep) but this does the job. Jakub - ('http', http), - - ('http.client', http_client), - ('os', os), - ('socket', socket), - ('time', time), - ('urllib.error', error), - ('urllib.parse', parse), - ('urllib.response', response), -] - -try: - from eventlet.green import ssl -except ImportError: - pass -else: - to_patch.append(('ssl', ssl)) - -patcher.inject('urllib.request', globals(), *to_patch) -del to_patch - -to_patch_in_functions = [('ftplib', ftplib)] -del ftplib - -FTPHandler.ftp_open = patcher.patch_function(FTPHandler.ftp_open, *to_patch_in_functions) -URLopener.open_ftp = patcher.patch_function(URLopener.open_ftp, *to_patch_in_functions) - -ftperrors = patcher.patch_function(ftperrors, *to_patch_in_functions) - -ftpwrapper.init = patcher.patch_function(ftpwrapper.init, *to_patch_in_functions) -ftpwrapper.retrfile = patcher.patch_function(ftpwrapper.retrfile, *to_patch_in_functions) - -del error -del parse -del response -del to_patch_in_functions diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib/response.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib/response.py deleted file mode 100644 index f9aaba5..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib/response.py +++ /dev/null @@ -1,3 +0,0 @@ -from eventlet import patcher -patcher.inject('urllib.response', globals()) -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/urllib2.py b/venv/lib/python3.6/site-packages/eventlet/green/urllib2.py deleted file mode 100644 index c53ecbb..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/urllib2.py +++ /dev/null @@ -1,20 +0,0 @@ -from eventlet import patcher -from eventlet.green import ftplib -from eventlet.green import httplib -from eventlet.green import socket -from eventlet.green import ssl -from eventlet.green import time -from eventlet.green import urllib - -patcher.inject( - 'urllib2', - globals(), - ('httplib', httplib), - ('socket', socket), - ('ssl', ssl), - ('time', time), - ('urllib', urllib)) - -FTPHandler.ftp_open = patcher.patch_function(FTPHandler.ftp_open, ('ftplib', ftplib)) - -del patcher diff --git a/venv/lib/python3.6/site-packages/eventlet/green/zmq.py b/venv/lib/python3.6/site-packages/eventlet/green/zmq.py deleted file mode 100644 index 1e41103..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/green/zmq.py +++ /dev/null @@ -1,465 +0,0 @@ -# coding: utf-8 -"""The :mod:`zmq` module wraps the :class:`Socket` and :class:`Context` -found in :mod:`pyzmq ` to be non blocking. -""" -__zmq__ = __import__('zmq') -import eventlet.hubs -from eventlet.patcher import slurp_properties -from eventlet.support import greenlets as greenlet - -__patched__ = ['Context', 'Socket'] -slurp_properties(__zmq__, globals(), ignore=__patched__) - -from collections import deque - -try: - # alias XREQ/XREP to DEALER/ROUTER if available - if not hasattr(__zmq__, 'XREQ'): - XREQ = DEALER - if not hasattr(__zmq__, 'XREP'): - XREP = ROUTER -except NameError: - pass - - -class LockReleaseError(Exception): - pass - - -class _QueueLock(object): - """A Lock that can be acquired by at most one thread. Any other - thread calling acquire will be blocked in a queue. When release - is called, the threads are awoken in the order they blocked, - one at a time. This lock can be required recursively by the same - thread.""" - - def __init__(self): - self._waiters = deque() - self._count = 0 - self._holder = None - self._hub = eventlet.hubs.get_hub() - - def __nonzero__(self): - return bool(self._count) - - __bool__ = __nonzero__ - - def __enter__(self): - self.acquire() - - def __exit__(self, type, value, traceback): - self.release() - - def acquire(self): - current = greenlet.getcurrent() - if (self._waiters or self._count > 0) and self._holder is not current: - # block until lock is free - self._waiters.append(current) - self._hub.switch() - w = self._waiters.popleft() - - assert w is current, 'Waiting threads woken out of order' - assert self._count == 0, 'After waking a thread, the lock must be unacquired' - - self._holder = current - self._count += 1 - - def release(self): - if self._count <= 0: - raise LockReleaseError("Cannot release unacquired lock") - - self._count -= 1 - if self._count == 0: - self._holder = None - if self._waiters: - # wake next - self._hub.schedule_call_global(0, self._waiters[0].switch) - - -class _BlockedThread(object): - """Is either empty, or represents a single blocked thread that - blocked itself by calling the block() method. The thread can be - awoken by calling wake(). Wake() can be called multiple times and - all but the first call will have no effect.""" - - def __init__(self): - self._blocked_thread = None - self._wakeupper = None - self._hub = eventlet.hubs.get_hub() - - def __nonzero__(self): - return self._blocked_thread is not None - - __bool__ = __nonzero__ - - def block(self, deadline=None): - if self._blocked_thread is not None: - raise Exception("Cannot block more than one thread on one BlockedThread") - self._blocked_thread = greenlet.getcurrent() - - if deadline is not None: - self._hub.schedule_call_local(deadline - self._hub.clock(), self.wake) - - try: - self._hub.switch() - finally: - self._blocked_thread = None - # cleanup the wakeup task - if self._wakeupper is not None: - # Important to cancel the wakeup task so it doesn't - # spuriously wake this greenthread later on. - self._wakeupper.cancel() - self._wakeupper = None - - def wake(self): - """Schedules the blocked thread to be awoken and return - True. If wake has already been called or if there is no - blocked thread, then this call has no effect and returns - False.""" - if self._blocked_thread is not None and self._wakeupper is None: - self._wakeupper = self._hub.schedule_call_global(0, self._blocked_thread.switch) - return True - return False - - -class Context(__zmq__.Context): - """Subclass of :class:`zmq.Context` - """ - - def socket(self, socket_type): - """Overridden method to ensure that the green version of socket is used - - Behaves the same as :meth:`zmq.Context.socket`, but ensures - that a :class:`Socket` with all of its send and recv methods set to be - non-blocking is returned - """ - if self.closed: - raise ZMQError(ENOTSUP) - return Socket(self, socket_type) - - -def _wraps(source_fn): - """A decorator that copies the __name__ and __doc__ from the given - function - """ - def wrapper(dest_fn): - dest_fn.__name__ = source_fn.__name__ - dest_fn.__doc__ = source_fn.__doc__ - return dest_fn - return wrapper - -# Implementation notes: Each socket in 0mq contains a pipe that the -# background IO threads use to communicate with the socket. These -# events are important because they tell the socket when it is able to -# send and when it has messages waiting to be received. The read end -# of the events pipe is the same FD that getsockopt(zmq.FD) returns. -# -# Events are read from the socket's event pipe only on the thread that -# the 0mq context is associated with, which is the native thread the -# greenthreads are running on, and the only operations that cause the -# events to be read and processed are send(), recv() and -# getsockopt(zmq.EVENTS). This means that after doing any of these -# three operations, the ability of the socket to send or receive a -# message without blocking may have changed, but after the events are -# read the FD is no longer readable so the hub may not signal our -# listener. -# -# If we understand that after calling send() a message might be ready -# to be received and that after calling recv() a message might be able -# to be sent, what should we do next? There are two approaches: -# -# 1. Always wake the other thread if there is one waiting. This -# wakeup may be spurious because the socket might not actually be -# ready for a send() or recv(). However, if a thread is in a -# tight-loop successfully calling send() or recv() then the wakeups -# are naturally batched and there's very little cost added to each -# send/recv call. -# -# or -# -# 2. Call getsockopt(zmq.EVENTS) and explicitly check if the other -# thread should be woken up. This avoids spurious wake-ups but may -# add overhead because getsockopt will cause all events to be -# processed, whereas send and recv throttle processing -# events. Admittedly, all of the events will need to be processed -# eventually, but it is likely faster to batch the processing. -# -# Which approach is better? I have no idea. -# -# TODO: -# - Support MessageTrackers and make MessageTracker.wait green - -_Socket = __zmq__.Socket -_Socket_recv = _Socket.recv -_Socket_send = _Socket.send -_Socket_send_multipart = _Socket.send_multipart -_Socket_recv_multipart = _Socket.recv_multipart -_Socket_send_string = _Socket.send_string -_Socket_recv_string = _Socket.recv_string -_Socket_send_pyobj = _Socket.send_pyobj -_Socket_recv_pyobj = _Socket.recv_pyobj -_Socket_send_json = _Socket.send_json -_Socket_recv_json = _Socket.recv_json -_Socket_getsockopt = _Socket.getsockopt - - -class Socket(_Socket): - """Green version of :class:`zmq.core.socket.Socket - - The following three methods are always overridden: - * send - * recv - * getsockopt - To ensure that the ``zmq.NOBLOCK`` flag is set and that sending or receiving - is deferred to the hub (using :func:`eventlet.hubs.trampoline`) if a - ``zmq.EAGAIN`` (retry) error is raised - - For some socket types, the following methods are also overridden: - * send_multipart - * recv_multipart - """ - - def __init__(self, context, socket_type): - super(Socket, self).__init__(context, socket_type) - - self.__dict__['_eventlet_send_event'] = _BlockedThread() - self.__dict__['_eventlet_recv_event'] = _BlockedThread() - self.__dict__['_eventlet_send_lock'] = _QueueLock() - self.__dict__['_eventlet_recv_lock'] = _QueueLock() - - def event(fd): - # Some events arrived at the zmq socket. This may mean - # there's a message that can be read or there's space for - # a message to be written. - send_wake = self._eventlet_send_event.wake() - recv_wake = self._eventlet_recv_event.wake() - if not send_wake and not recv_wake: - # if no waiting send or recv thread was woken up, then - # force the zmq socket's events to be processed to - # avoid repeated wakeups - _Socket_getsockopt(self, EVENTS) - - hub = eventlet.hubs.get_hub() - self.__dict__['_eventlet_listener'] = hub.add(hub.READ, - self.getsockopt(FD), - event, - lambda _: None, - lambda: None) - self.__dict__['_eventlet_clock'] = hub.clock - - @_wraps(_Socket.close) - def close(self, linger=None): - super(Socket, self).close(linger) - if self._eventlet_listener is not None: - eventlet.hubs.get_hub().remove(self._eventlet_listener) - self.__dict__['_eventlet_listener'] = None - # wake any blocked threads - self._eventlet_send_event.wake() - self._eventlet_recv_event.wake() - - @_wraps(_Socket.getsockopt) - def getsockopt(self, option): - result = _Socket_getsockopt(self, option) - if option == EVENTS: - # Getting the events causes the zmq socket to process - # events which may mean a msg can be sent or received. If - # there is a greenthread blocked and waiting for events, - # it will miss the edge-triggered read event, so wake it - # up. - if (result & POLLOUT): - self._eventlet_send_event.wake() - if (result & POLLIN): - self._eventlet_recv_event.wake() - return result - - @_wraps(_Socket.send) - def send(self, msg, flags=0, copy=True, track=False): - """A send method that's safe to use when multiple greenthreads - are calling send, send_multipart, recv and recv_multipart on - the same socket. - """ - if flags & NOBLOCK: - result = _Socket_send(self, msg, flags, copy, track) - # Instead of calling both wake methods, could call - # self.getsockopt(EVENTS) which would trigger wakeups if - # needed. - self._eventlet_send_event.wake() - self._eventlet_recv_event.wake() - return result - - # TODO: pyzmq will copy the message buffer and create Message - # objects under some circumstances. We could do that work here - # once to avoid doing it every time the send is retried. - flags |= NOBLOCK - with self._eventlet_send_lock: - while True: - try: - return _Socket_send(self, msg, flags, copy, track) - except ZMQError as e: - if e.errno == EAGAIN: - self._eventlet_send_event.block() - else: - raise - finally: - # The call to send processes 0mq events and may - # make the socket ready to recv. Wake the next - # receiver. (Could check EVENTS for POLLIN here) - self._eventlet_recv_event.wake() - - @_wraps(_Socket.send_multipart) - def send_multipart(self, msg_parts, flags=0, copy=True, track=False): - """A send_multipart method that's safe to use when multiple - greenthreads are calling send, send_multipart, recv and - recv_multipart on the same socket. - """ - if flags & NOBLOCK: - return _Socket_send_multipart(self, msg_parts, flags, copy, track) - - # acquire lock here so the subsequent calls to send for the - # message parts after the first don't block - with self._eventlet_send_lock: - return _Socket_send_multipart(self, msg_parts, flags, copy, track) - - @_wraps(_Socket.send_string) - def send_string(self, u, flags=0, copy=True, encoding='utf-8'): - """A send_string method that's safe to use when multiple - greenthreads are calling send, send_string, recv and - recv_string on the same socket. - """ - if flags & NOBLOCK: - return _Socket_send_string(self, u, flags, copy, encoding) - - # acquire lock here so the subsequent calls to send for the - # message parts after the first don't block - with self._eventlet_send_lock: - return _Socket_send_string(self, u, flags, copy, encoding) - - @_wraps(_Socket.send_pyobj) - def send_pyobj(self, obj, flags=0, protocol=2): - """A send_pyobj method that's safe to use when multiple - greenthreads are calling send, send_pyobj, recv and - recv_pyobj on the same socket. - """ - if flags & NOBLOCK: - return _Socket_send_pyobj(self, obj, flags, protocol) - - # acquire lock here so the subsequent calls to send for the - # message parts after the first don't block - with self._eventlet_send_lock: - return _Socket_send_pyobj(self, obj, flags, protocol) - - @_wraps(_Socket.send_json) - def send_json(self, obj, flags=0, **kwargs): - """A send_json method that's safe to use when multiple - greenthreads are calling send, send_json, recv and - recv_json on the same socket. - """ - if flags & NOBLOCK: - return _Socket_send_json(self, obj, flags, **kwargs) - - # acquire lock here so the subsequent calls to send for the - # message parts after the first don't block - with self._eventlet_send_lock: - return _Socket_send_json(self, obj, flags, **kwargs) - - @_wraps(_Socket.recv) - def recv(self, flags=0, copy=True, track=False): - """A recv method that's safe to use when multiple greenthreads - are calling send, send_multipart, recv and recv_multipart on - the same socket. - """ - if flags & NOBLOCK: - msg = _Socket_recv(self, flags, copy, track) - # Instead of calling both wake methods, could call - # self.getsockopt(EVENTS) which would trigger wakeups if - # needed. - self._eventlet_send_event.wake() - self._eventlet_recv_event.wake() - return msg - - deadline = None - if hasattr(__zmq__, 'RCVTIMEO'): - sock_timeout = self.getsockopt(__zmq__.RCVTIMEO) - if sock_timeout == -1: - pass - elif sock_timeout > 0: - deadline = self._eventlet_clock() + sock_timeout / 1000.0 - else: - raise ValueError(sock_timeout) - - flags |= NOBLOCK - with self._eventlet_recv_lock: - while True: - try: - return _Socket_recv(self, flags, copy, track) - except ZMQError as e: - if e.errno == EAGAIN: - # zmq in its wisdom decided to reuse EAGAIN for timeouts - if deadline is not None and self._eventlet_clock() > deadline: - e.is_timeout = True - raise - - self._eventlet_recv_event.block(deadline=deadline) - else: - raise - finally: - # The call to recv processes 0mq events and may - # make the socket ready to send. Wake the next - # receiver. (Could check EVENTS for POLLOUT here) - self._eventlet_send_event.wake() - - @_wraps(_Socket.recv_multipart) - def recv_multipart(self, flags=0, copy=True, track=False): - """A recv_multipart method that's safe to use when multiple - greenthreads are calling send, send_multipart, recv and - recv_multipart on the same socket. - """ - if flags & NOBLOCK: - return _Socket_recv_multipart(self, flags, copy, track) - - # acquire lock here so the subsequent calls to recv for the - # message parts after the first don't block - with self._eventlet_recv_lock: - return _Socket_recv_multipart(self, flags, copy, track) - - @_wraps(_Socket.recv_string) - def recv_string(self, flags=0, encoding='utf-8'): - """A recv_string method that's safe to use when multiple - greenthreads are calling send, send_string, recv and - recv_string on the same socket. - """ - if flags & NOBLOCK: - return _Socket_recv_string(self, flags, encoding) - - # acquire lock here so the subsequent calls to recv for the - # message parts after the first don't block - with self._eventlet_recv_lock: - return _Socket_recv_string(self, flags, encoding) - - @_wraps(_Socket.recv_json) - def recv_json(self, flags=0, **kwargs): - """A recv_json method that's safe to use when multiple - greenthreads are calling send, send_json, recv and - recv_json on the same socket. - """ - if flags & NOBLOCK: - return _Socket_recv_json(self, flags, **kwargs) - - # acquire lock here so the subsequent calls to recv for the - # message parts after the first don't block - with self._eventlet_recv_lock: - return _Socket_recv_json(self, flags, **kwargs) - - @_wraps(_Socket.recv_pyobj) - def recv_pyobj(self, flags=0): - """A recv_pyobj method that's safe to use when multiple - greenthreads are calling send, send_pyobj, recv and - recv_pyobj on the same socket. - """ - if flags & NOBLOCK: - return _Socket_recv_pyobj(self, flags) - - # acquire lock here so the subsequent calls to recv for the - # message parts after the first don't block - with self._eventlet_recv_lock: - return _Socket_recv_pyobj(self, flags) diff --git a/venv/lib/python3.6/site-packages/eventlet/greenio/__init__.py b/venv/lib/python3.6/site-packages/eventlet/greenio/__init__.py deleted file mode 100644 index f6c5247..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenio/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -import six - -from eventlet.greenio.base import * # noqa - -if six.PY2: - from eventlet.greenio.py2 import * # noqa -else: - from eventlet.greenio.py3 import * # noqa diff --git a/venv/lib/python3.6/site-packages/eventlet/greenio/base.py b/venv/lib/python3.6/site-packages/eventlet/greenio/base.py deleted file mode 100644 index ce53b54..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenio/base.py +++ /dev/null @@ -1,506 +0,0 @@ -import errno -import os -import socket -import sys -import time -import warnings - -import eventlet -from eventlet.hubs import trampoline, notify_opened, IOClosed -from eventlet.support import get_errno -import six - -__all__ = [ - 'GreenSocket', '_GLOBAL_DEFAULT_TIMEOUT', 'set_nonblocking', - 'SOCKET_BLOCKING', 'SOCKET_CLOSED', 'CONNECT_ERR', 'CONNECT_SUCCESS', - 'shutdown_safe', 'SSL', - 'socket_timeout', -] - -BUFFER_SIZE = 4096 -CONNECT_ERR = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK)) -CONNECT_SUCCESS = set((0, errno.EISCONN)) -if sys.platform[:3] == "win": - CONNECT_ERR.add(errno.WSAEINVAL) # Bug 67 - -if six.PY2: - _python2_fileobject = socket._fileobject - -_original_socket = eventlet.patcher.original('socket').socket - - -socket_timeout = eventlet.timeout.wrap_is_timeout(socket.timeout) - - -def socket_connect(descriptor, address): - """ - Attempts to connect to the address, returns the descriptor if it succeeds, - returns None if it needs to trampoline, and raises any exceptions. - """ - err = descriptor.connect_ex(address) - if err in CONNECT_ERR: - return None - if err not in CONNECT_SUCCESS: - raise socket.error(err, errno.errorcode[err]) - return descriptor - - -def socket_checkerr(descriptor): - err = descriptor.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) - if err not in CONNECT_SUCCESS: - raise socket.error(err, errno.errorcode[err]) - - -def socket_accept(descriptor): - """ - Attempts to accept() on the descriptor, returns a client,address tuple - if it succeeds; returns None if it needs to trampoline, and raises - any exceptions. - """ - try: - return descriptor.accept() - except socket.error as e: - if get_errno(e) == errno.EWOULDBLOCK: - return None - raise - - -if sys.platform[:3] == "win": - # winsock sometimes throws ENOTCONN - SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK,)) - SOCKET_CLOSED = set((errno.ECONNRESET, errno.ENOTCONN, errno.ESHUTDOWN)) -else: - # oddly, on linux/darwin, an unconnected socket is expected to block, - # so we treat ENOTCONN the same as EWOULDBLOCK - SOCKET_BLOCKING = set((errno.EAGAIN, errno.EWOULDBLOCK, errno.ENOTCONN)) - SOCKET_CLOSED = set((errno.ECONNRESET, errno.ESHUTDOWN, errno.EPIPE)) - - -def set_nonblocking(fd): - """ - Sets the descriptor to be nonblocking. Works on many file-like - objects as well as sockets. Only sockets can be nonblocking on - Windows, however. - """ - try: - setblocking = fd.setblocking - except AttributeError: - # fd has no setblocking() method. It could be that this version of - # Python predates socket.setblocking(). In that case, we can still set - # the flag "by hand" on the underlying OS fileno using the fcntl - # module. - try: - import fcntl - except ImportError: - # Whoops, Windows has no fcntl module. This might not be a socket - # at all, but rather a file-like object with no setblocking() - # method. In particular, on Windows, pipes don't support - # non-blocking I/O and therefore don't have that method. Which - # means fcntl wouldn't help even if we could load it. - raise NotImplementedError("set_nonblocking() on a file object " - "with no setblocking() method " - "(Windows pipes don't support non-blocking I/O)") - # We managed to import fcntl. - fileno = fd.fileno() - orig_flags = fcntl.fcntl(fileno, fcntl.F_GETFL) - new_flags = orig_flags | os.O_NONBLOCK - if new_flags != orig_flags: - fcntl.fcntl(fileno, fcntl.F_SETFL, new_flags) - else: - # socket supports setblocking() - setblocking(0) - - -try: - from socket import _GLOBAL_DEFAULT_TIMEOUT -except ImportError: - _GLOBAL_DEFAULT_TIMEOUT = object() - - -class GreenSocket(object): - """ - Green version of socket.socket class, that is intended to be 100% - API-compatible. - - It also recognizes the keyword parameter, 'set_nonblocking=True'. - Pass False to indicate that socket is already in non-blocking mode - to save syscalls. - """ - - # This placeholder is to prevent __getattr__ from creating an infinite call loop - fd = None - - def __init__(self, family=socket.AF_INET, *args, **kwargs): - should_set_nonblocking = kwargs.pop('set_nonblocking', True) - if isinstance(family, six.integer_types): - fd = _original_socket(family, *args, **kwargs) - # Notify the hub that this is a newly-opened socket. - notify_opened(fd.fileno()) - else: - fd = family - - # import timeout from other socket, if it was there - try: - self._timeout = fd.gettimeout() or socket.getdefaulttimeout() - except AttributeError: - self._timeout = socket.getdefaulttimeout() - - # Filter fd.fileno() != -1 so that won't call set non-blocking on - # closed socket - if should_set_nonblocking and fd.fileno() != -1: - set_nonblocking(fd) - self.fd = fd - # when client calls setblocking(0) or settimeout(0) the socket must - # act non-blocking - self.act_non_blocking = False - - # Copy some attributes from underlying real socket. - # This is the easiest way that i found to fix - # https://bitbucket.org/eventlet/eventlet/issue/136 - # Only `getsockopt` is required to fix that issue, others - # are just premature optimization to save __getattr__ call. - self.bind = fd.bind - self.close = fd.close - self.fileno = fd.fileno - self.getsockname = fd.getsockname - self.getsockopt = fd.getsockopt - self.listen = fd.listen - self.setsockopt = fd.setsockopt - self.shutdown = fd.shutdown - self._closed = False - - @property - def _sock(self): - return self - - if six.PY3: - def _get_io_refs(self): - return self.fd._io_refs - - def _set_io_refs(self, value): - self.fd._io_refs = value - - _io_refs = property(_get_io_refs, _set_io_refs) - - # Forward unknown attributes to fd, cache the value for future use. - # I do not see any simple attribute which could be changed - # so caching everything in self is fine. - # If we find such attributes - only attributes having __get__ might be cached. - # For now - I do not want to complicate it. - def __getattr__(self, name): - if self.fd is None: - raise AttributeError(name) - attr = getattr(self.fd, name) - setattr(self, name, attr) - return attr - - def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None): - """ We need to trampoline via the event hub. - We catch any signal back from the hub indicating that the operation we - were waiting on was associated with a filehandle that's since been - invalidated. - """ - if self._closed: - # If we did any logging, alerting to a second trampoline attempt on a closed - # socket here would be useful. - raise IOClosed() - try: - return trampoline(fd, read=read, write=write, timeout=timeout, - timeout_exc=timeout_exc, - mark_as_closed=self._mark_as_closed) - except IOClosed: - # This socket's been obsoleted. De-fang it. - self._mark_as_closed() - raise - - def accept(self): - if self.act_non_blocking: - res = self.fd.accept() - notify_opened(res[0].fileno()) - return res - fd = self.fd - _timeout_exc = socket_timeout('timed out') - while True: - res = socket_accept(fd) - if res is not None: - client, addr = res - notify_opened(client.fileno()) - set_nonblocking(client) - return type(self)(client), addr - self._trampoline(fd, read=True, timeout=self.gettimeout(), timeout_exc=_timeout_exc) - - def _mark_as_closed(self): - """ Mark this socket as being closed """ - self._closed = True - - def __del__(self): - # This is in case self.close is not assigned yet (currently the constructor does it) - close = getattr(self, 'close', None) - if close is not None: - close() - - def connect(self, address): - if self.act_non_blocking: - return self.fd.connect(address) - fd = self.fd - _timeout_exc = socket_timeout('timed out') - if self.gettimeout() is None: - while not socket_connect(fd, address): - try: - self._trampoline(fd, write=True) - except IOClosed: - raise socket.error(errno.EBADFD) - socket_checkerr(fd) - else: - end = time.time() + self.gettimeout() - while True: - if socket_connect(fd, address): - return - if time.time() >= end: - raise _timeout_exc - timeout = end - time.time() - try: - self._trampoline(fd, write=True, timeout=timeout, timeout_exc=_timeout_exc) - except IOClosed: - # ... we need some workable errno here. - raise socket.error(errno.EBADFD) - socket_checkerr(fd) - - def connect_ex(self, address): - if self.act_non_blocking: - return self.fd.connect_ex(address) - fd = self.fd - if self.gettimeout() is None: - while not socket_connect(fd, address): - try: - self._trampoline(fd, write=True) - socket_checkerr(fd) - except socket.error as ex: - return get_errno(ex) - except IOClosed: - return errno.EBADFD - else: - end = time.time() + self.gettimeout() - timeout_exc = socket.timeout(errno.EAGAIN) - while True: - try: - if socket_connect(fd, address): - return 0 - if time.time() >= end: - raise timeout_exc - self._trampoline(fd, write=True, timeout=end - time.time(), - timeout_exc=timeout_exc) - socket_checkerr(fd) - except socket.error as ex: - return get_errno(ex) - except IOClosed: - return errno.EBADFD - - def dup(self, *args, **kw): - sock = self.fd.dup(*args, **kw) - newsock = type(self)(sock, set_nonblocking=False) - newsock.settimeout(self.gettimeout()) - return newsock - - if six.PY3: - def makefile(self, *args, **kwargs): - return _original_socket.makefile(self, *args, **kwargs) - else: - def makefile(self, *args, **kwargs): - dupped = self.dup() - res = _python2_fileobject(dupped, *args, **kwargs) - if hasattr(dupped, "_drop"): - dupped._drop() - # Making the close function of dupped None so that when garbage collector - # kicks in and tries to call del, which will ultimately call close, _drop - # doesn't get called on dupped twice as it has been already explicitly called in - # previous line - dupped.close = None - return res - - def makeGreenFile(self, *args, **kw): - warnings.warn("makeGreenFile has been deprecated, please use " - "makefile instead", DeprecationWarning, stacklevel=2) - return self.makefile(*args, **kw) - - def _read_trampoline(self): - self._trampoline( - self.fd, - read=True, - timeout=self.gettimeout(), - timeout_exc=socket_timeout('timed out')) - - def _recv_loop(self, recv_meth, empty_val, *args): - fd = self.fd - if self.act_non_blocking: - return recv_meth(*args) - - while True: - try: - # recv: bufsize=0? - # recv_into: buffer is empty? - # This is needed because behind the scenes we use sockets in - # nonblocking mode and builtin recv* methods. Attempting to read - # 0 bytes from a nonblocking socket using a builtin recv* method - # does not raise a timeout exception. Since we're simulating - # a blocking socket here we need to produce a timeout exception - # if needed, hence the call to trampoline. - if not args[0]: - self._read_trampoline() - return recv_meth(*args) - except socket.error as e: - if get_errno(e) in SOCKET_BLOCKING: - pass - elif get_errno(e) in SOCKET_CLOSED: - return empty_val - else: - raise - - try: - self._read_trampoline() - except IOClosed as e: - # Perhaps we should return '' instead? - raise EOFError() - - def recv(self, bufsize, flags=0): - return self._recv_loop(self.fd.recv, b'', bufsize, flags) - - def recvfrom(self, bufsize, flags=0): - return self._recv_loop(self.fd.recvfrom, b'', bufsize, flags) - - def recv_into(self, buffer, nbytes=0, flags=0): - return self._recv_loop(self.fd.recv_into, 0, buffer, nbytes, flags) - - def recvfrom_into(self, buffer, nbytes=0, flags=0): - return self._recv_loop(self.fd.recvfrom_into, 0, buffer, nbytes, flags) - - def _send_loop(self, send_method, data, *args): - if self.act_non_blocking: - return send_method(data, *args) - - _timeout_exc = socket_timeout('timed out') - while True: - try: - return send_method(data, *args) - except socket.error as e: - eno = get_errno(e) - if eno == errno.ENOTCONN or eno not in SOCKET_BLOCKING: - raise - - try: - self._trampoline(self.fd, write=True, timeout=self.gettimeout(), - timeout_exc=_timeout_exc) - except IOClosed: - raise socket.error(errno.ECONNRESET, 'Connection closed by another thread') - - def send(self, data, flags=0): - return self._send_loop(self.fd.send, data, flags) - - def sendto(self, data, *args): - return self._send_loop(self.fd.sendto, data, *args) - - def sendall(self, data, flags=0): - tail = self.send(data, flags) - len_data = len(data) - while tail < len_data: - tail += self.send(data[tail:], flags) - - def setblocking(self, flag): - if flag: - self.act_non_blocking = False - self._timeout = None - else: - self.act_non_blocking = True - self._timeout = 0.0 - - def settimeout(self, howlong): - if howlong is None or howlong == _GLOBAL_DEFAULT_TIMEOUT: - self.setblocking(True) - return - try: - f = howlong.__float__ - except AttributeError: - raise TypeError('a float is required') - howlong = f() - if howlong < 0.0: - raise ValueError('Timeout value out of range') - if howlong == 0.0: - self.act_non_blocking = True - self._timeout = 0.0 - else: - self.act_non_blocking = False - self._timeout = howlong - - def gettimeout(self): - return self._timeout - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - if "__pypy__" in sys.builtin_module_names: - def _reuse(self): - getattr(self.fd, '_sock', self.fd)._reuse() - - def _drop(self): - getattr(self.fd, '_sock', self.fd)._drop() - - -def _operation_on_closed_file(*args, **kwargs): - raise ValueError("I/O operation on closed file") - - -greenpipe_doc = """ - GreenPipe is a cooperative replacement for file class. - It will cooperate on pipes. It will block on regular file. - Differneces from file class: - - mode is r/w property. Should re r/o - - encoding property not implemented - - write/writelines will not raise TypeError exception when non-string data is written - it will write str(data) instead - - Universal new lines are not supported and newlines property not implementeded - - file argument can be descriptor, file name or file object. - """ - -# import SSL module here so we can refer to greenio.SSL.exceptionclass -try: - from OpenSSL import SSL -except ImportError: - # pyOpenSSL not installed, define exceptions anyway for convenience - class SSL(object): - class WantWriteError(Exception): - pass - - class WantReadError(Exception): - pass - - class ZeroReturnError(Exception): - pass - - class SysCallError(Exception): - pass - - -def shutdown_safe(sock): - """Shuts down the socket. This is a convenience method for - code that wants to gracefully handle regular sockets, SSL.Connection - sockets from PyOpenSSL and ssl.SSLSocket objects from Python 2.7 interchangeably. - Both types of ssl socket require a shutdown() before close, - but they have different arity on their shutdown method. - - Regular sockets don't need a shutdown before close, but it doesn't hurt. - """ - try: - try: - # socket, ssl.SSLSocket - return sock.shutdown(socket.SHUT_RDWR) - except TypeError: - # SSL.Connection - return sock.shutdown() - except socket.error as e: - # we don't care if the socket is already closed; - # this will often be the case in an http server context - if get_errno(e) not in (errno.ENOTCONN, errno.EBADF, errno.ENOTSOCK): - raise diff --git a/venv/lib/python3.6/site-packages/eventlet/greenio/py2.py b/venv/lib/python3.6/site-packages/eventlet/greenio/py2.py deleted file mode 100644 index 20338aa..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenio/py2.py +++ /dev/null @@ -1,227 +0,0 @@ -import errno -import os - -from eventlet.greenio.base import ( - _operation_on_closed_file, - greenpipe_doc, - set_nonblocking, - socket, - SOCKET_BLOCKING, -) -from eventlet.hubs import trampoline, notify_close, notify_opened, IOClosed -from eventlet.support import get_errno -import six - -__all__ = ['_fileobject', 'GreenPipe'] - -_fileobject = socket._fileobject - - -class GreenPipe(_fileobject): - - __doc__ = greenpipe_doc - - def __init__(self, f, mode='r', bufsize=-1): - if not isinstance(f, six.string_types + (int, file)): - raise TypeError('f(ile) should be int, str, unicode or file, not %r' % f) - - if isinstance(f, six.string_types): - f = open(f, mode, 0) - - if isinstance(f, int): - fileno = f - self._name = "" % fileno - else: - fileno = os.dup(f.fileno()) - self._name = f.name - if f.mode != mode: - raise ValueError('file.mode %r does not match mode parameter %r' % (f.mode, mode)) - self._name = f.name - f.close() - - super(GreenPipe, self).__init__(_SocketDuckForFd(fileno), mode, bufsize) - set_nonblocking(self) - self.softspace = 0 - - @property - def name(self): - return self._name - - def __repr__(self): - return "<%s %s %r, mode %r at 0x%x>" % ( - self.closed and 'closed' or 'open', - self.__class__.__name__, - self.name, - self.mode, - (id(self) < 0) and (sys.maxint + id(self)) or id(self)) - - def close(self): - super(GreenPipe, self).close() - for method in [ - 'fileno', 'flush', 'isatty', 'next', 'read', 'readinto', - 'readline', 'readlines', 'seek', 'tell', 'truncate', - 'write', 'xreadlines', '__iter__', '__next__', 'writelines']: - setattr(self, method, _operation_on_closed_file) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - def _get_readahead_len(self): - return len(self._rbuf.getvalue()) - - def _clear_readahead_buf(self): - len = self._get_readahead_len() - if len > 0: - self.read(len) - - def tell(self): - self.flush() - try: - return os.lseek(self.fileno(), 0, 1) - self._get_readahead_len() - except OSError as e: - raise IOError(*e.args) - - def seek(self, offset, whence=0): - self.flush() - if whence == 1 and offset == 0: # tell synonym - return self.tell() - if whence == 1: # adjust offset by what is read ahead - offset -= self._get_readahead_len() - try: - rv = os.lseek(self.fileno(), offset, whence) - except OSError as e: - raise IOError(*e.args) - else: - self._clear_readahead_buf() - return rv - - if getattr(file, "truncate", None): # not all OSes implement truncate - def truncate(self, size=-1): - self.flush() - if size == -1: - size = self.tell() - try: - rv = os.ftruncate(self.fileno(), size) - except OSError as e: - raise IOError(*e.args) - else: - self.seek(size) # move position&clear buffer - return rv - - def isatty(self): - try: - return os.isatty(self.fileno()) - except OSError as e: - raise IOError(*e.args) - - -class _SocketDuckForFd(object): - """Class implementing all socket method used by _fileobject - in cooperative manner using low level os I/O calls. - """ - _refcount = 0 - - def __init__(self, fileno): - self._fileno = fileno - notify_opened(fileno) - self._closed = False - - def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None): - if self._closed: - # Don't trampoline if we're already closed. - raise IOClosed() - try: - return trampoline(fd, read=read, write=write, timeout=timeout, - timeout_exc=timeout_exc, - mark_as_closed=self._mark_as_closed) - except IOClosed: - # Our fileno has been obsoleted. Defang ourselves to - # prevent spurious closes. - self._mark_as_closed() - raise - - def _mark_as_closed(self): - current = self._closed - self._closed = True - return current - - @property - def _sock(self): - return self - - def fileno(self): - return self._fileno - - def recv(self, buflen): - while True: - try: - data = os.read(self._fileno, buflen) - return data - except OSError as e: - if get_errno(e) not in SOCKET_BLOCKING: - raise IOError(*e.args) - self._trampoline(self, read=True) - - def recv_into(self, buf, nbytes=0, flags=0): - if nbytes == 0: - nbytes = len(buf) - data = self.recv(nbytes) - buf[:nbytes] = data - return len(data) - - def send(self, data): - while True: - try: - return os.write(self._fileno, data) - except OSError as e: - if get_errno(e) not in SOCKET_BLOCKING: - raise IOError(*e.args) - else: - trampoline(self, write=True) - - def sendall(self, data): - len_data = len(data) - os_write = os.write - fileno = self._fileno - try: - total_sent = os_write(fileno, data) - except OSError as e: - if get_errno(e) != errno.EAGAIN: - raise IOError(*e.args) - total_sent = 0 - while total_sent < len_data: - self._trampoline(self, write=True) - try: - total_sent += os_write(fileno, data[total_sent:]) - except OSError as e: - if get_errno(e) != errno. EAGAIN: - raise IOError(*e.args) - - def __del__(self): - self._close() - - def _close(self): - was_closed = self._mark_as_closed() - if was_closed: - return - notify_close(self._fileno) - try: - os.close(self._fileno) - except: - # os.close may fail if __init__ didn't complete - # (i.e file dscriptor passed to popen was invalid - pass - - def __repr__(self): - return "%s:%d" % (self.__class__.__name__, self._fileno) - - def _reuse(self): - self._refcount += 1 - - def _drop(self): - self._refcount -= 1 - if self._refcount == 0: - self._close() diff --git a/venv/lib/python3.6/site-packages/eventlet/greenio/py3.py b/venv/lib/python3.6/site-packages/eventlet/greenio/py3.py deleted file mode 100644 index 7a75b52..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenio/py3.py +++ /dev/null @@ -1,214 +0,0 @@ -import _pyio as _original_pyio -import errno -import os as _original_os -import socket as _original_socket -from io import ( - BufferedRandom as _OriginalBufferedRandom, - BufferedReader as _OriginalBufferedReader, - BufferedWriter as _OriginalBufferedWriter, - DEFAULT_BUFFER_SIZE, - TextIOWrapper as _OriginalTextIOWrapper, - IOBase as _OriginalIOBase, -) -from types import FunctionType - -from eventlet.greenio.base import ( - _operation_on_closed_file, - greenpipe_doc, - set_nonblocking, - SOCKET_BLOCKING, -) -from eventlet.hubs import notify_close, notify_opened, IOClosed, trampoline -from eventlet.support import get_errno -import six - -__all__ = ['_fileobject', 'GreenPipe'] - -# TODO get rid of this, it only seems like the original _fileobject -_fileobject = _original_socket.SocketIO - -# Large part of the following code is copied from the original -# eventlet.greenio module - - -class GreenFileIO(_OriginalIOBase): - def __init__(self, name, mode='r', closefd=True, opener=None): - if isinstance(name, int): - fileno = name - self._name = "" % fileno - else: - assert isinstance(name, six.string_types) - with open(name, mode) as fd: - self._name = fd.name - fileno = _original_os.dup(fd.fileno()) - - notify_opened(fileno) - self._fileno = fileno - self._mode = mode - self._closed = False - set_nonblocking(self) - self._seekable = None - - @property - def closed(self): - return self._closed - - def seekable(self): - if self._seekable is None: - try: - _original_os.lseek(self._fileno, 0, _original_os.SEEK_CUR) - except IOError as e: - if get_errno(e) == errno.ESPIPE: - self._seekable = False - else: - raise - else: - self._seekable = True - - return self._seekable - - def readable(self): - return 'r' in self._mode or '+' in self._mode - - def writable(self): - return 'w' in self._mode or '+' in self._mode - - def fileno(self): - return self._fileno - - def read(self, size=-1): - if size == -1: - return self.readall() - - while True: - try: - return _original_os.read(self._fileno, size) - except OSError as e: - if get_errno(e) not in SOCKET_BLOCKING: - raise IOError(*e.args) - self._trampoline(self, read=True) - - def readall(self): - buf = [] - while True: - try: - chunk = _original_os.read(self._fileno, DEFAULT_BUFFER_SIZE) - if chunk == b'': - return b''.join(buf) - buf.append(chunk) - except OSError as e: - if get_errno(e) not in SOCKET_BLOCKING: - raise IOError(*e.args) - self._trampoline(self, read=True) - - def readinto(self, b): - up_to = len(b) - data = self.read(up_to) - bytes_read = len(data) - b[:bytes_read] = data - return bytes_read - - def isatty(self): - try: - return _original_os.isatty(self.fileno()) - except OSError as e: - raise IOError(*e.args) - - def _trampoline(self, fd, read=False, write=False, timeout=None, timeout_exc=None): - if self._closed: - # Don't trampoline if we're already closed. - raise IOClosed() - try: - return trampoline(fd, read=read, write=write, timeout=timeout, - timeout_exc=timeout_exc, - mark_as_closed=self._mark_as_closed) - except IOClosed: - # Our fileno has been obsoleted. Defang ourselves to - # prevent spurious closes. - self._mark_as_closed() - raise - - def _mark_as_closed(self): - """ Mark this socket as being closed """ - self._closed = True - - def write(self, data): - view = memoryview(data) - datalen = len(data) - offset = 0 - while offset < datalen: - try: - written = _original_os.write(self._fileno, view[offset:]) - except OSError as e: - if get_errno(e) not in SOCKET_BLOCKING: - raise IOError(*e.args) - trampoline(self, write=True) - else: - offset += written - return offset - - def close(self): - if not self._closed: - self._closed = True - _original_os.close(self._fileno) - notify_close(self._fileno) - for method in [ - 'fileno', 'flush', 'isatty', 'next', 'read', 'readinto', - 'readline', 'readlines', 'seek', 'tell', 'truncate', - 'write', 'xreadlines', '__iter__', '__next__', 'writelines']: - setattr(self, method, _operation_on_closed_file) - - def truncate(self, size=-1): - if size == -1: - size = self.tell() - try: - rv = _original_os.ftruncate(self._fileno, size) - except OSError as e: - raise IOError(*e.args) - else: - self.seek(size) # move position&clear buffer - return rv - - def seek(self, offset, whence=_original_os.SEEK_SET): - try: - return _original_os.lseek(self._fileno, offset, whence) - except OSError as e: - raise IOError(*e.args) - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - -_open_environment = dict(globals()) -_open_environment.update(dict( - BufferedRandom=_OriginalBufferedRandom, - BufferedWriter=_OriginalBufferedWriter, - BufferedReader=_OriginalBufferedReader, - TextIOWrapper=_OriginalTextIOWrapper, - FileIO=GreenFileIO, - os=_original_os, -)) - -_open = FunctionType( - six.get_function_code(_original_pyio.open), - _open_environment, -) - - -def GreenPipe(name, mode="r", buffering=-1, encoding=None, errors=None, - newline=None, closefd=True, opener=None): - try: - fileno = name.fileno() - except AttributeError: - pass - else: - fileno = _original_os.dup(fileno) - name.close() - name = fileno - - return _open(name, mode, buffering, encoding, errors, newline, closefd, opener) - -GreenPipe.__doc__ = greenpipe_doc diff --git a/venv/lib/python3.6/site-packages/eventlet/greenpool.py b/venv/lib/python3.6/site-packages/eventlet/greenpool.py deleted file mode 100644 index 952659c..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenpool.py +++ /dev/null @@ -1,257 +0,0 @@ -import traceback - -import eventlet -from eventlet import queue -from eventlet.support import greenlets as greenlet -import six - -__all__ = ['GreenPool', 'GreenPile'] - -DEBUG = True - - -class GreenPool(object): - """The GreenPool class is a pool of green threads. - """ - - def __init__(self, size=1000): - try: - size = int(size) - except ValueError as e: - msg = 'GreenPool() expect size :: int, actual: {0} {1}'.format(type(size), str(e)) - raise TypeError(msg) - if size < 0: - msg = 'GreenPool() expect size >= 0, actual: {0}'.format(repr(size)) - raise ValueError(msg) - self.size = size - self.coroutines_running = set() - self.sem = eventlet.Semaphore(size) - self.no_coros_running = eventlet.Event() - - def resize(self, new_size): - """ Change the max number of greenthreads doing work at any given time. - - If resize is called when there are more than *new_size* greenthreads - already working on tasks, they will be allowed to complete but no new - tasks will be allowed to get launched until enough greenthreads finish - their tasks to drop the overall quantity below *new_size*. Until - then, the return value of free() will be negative. - """ - size_delta = new_size - self.size - self.sem.counter += size_delta - self.size = new_size - - def running(self): - """ Returns the number of greenthreads that are currently executing - functions in the GreenPool.""" - return len(self.coroutines_running) - - def free(self): - """ Returns the number of greenthreads available for use. - - If zero or less, the next call to :meth:`spawn` or :meth:`spawn_n` will - block the calling greenthread until a slot becomes available.""" - return self.sem.counter - - def spawn(self, function, *args, **kwargs): - """Run the *function* with its arguments in its own green thread. - Returns the :class:`GreenThread ` - object that is running the function, which can be used to retrieve the - results. - - If the pool is currently at capacity, ``spawn`` will block until one of - the running greenthreads completes its task and frees up a slot. - - This function is reentrant; *function* can call ``spawn`` on the same - pool without risk of deadlocking the whole thing. - """ - # if reentering an empty pool, don't try to wait on a coroutine freeing - # itself -- instead, just execute in the current coroutine - current = eventlet.getcurrent() - if self.sem.locked() and current in self.coroutines_running: - # a bit hacky to use the GT without switching to it - gt = eventlet.greenthread.GreenThread(current) - gt.main(function, args, kwargs) - return gt - else: - self.sem.acquire() - gt = eventlet.spawn(function, *args, **kwargs) - if not self.coroutines_running: - self.no_coros_running = eventlet.Event() - self.coroutines_running.add(gt) - gt.link(self._spawn_done) - return gt - - def _spawn_n_impl(self, func, args, kwargs, coro): - try: - try: - func(*args, **kwargs) - except (KeyboardInterrupt, SystemExit, greenlet.GreenletExit): - raise - except: - if DEBUG: - traceback.print_exc() - finally: - if coro is None: - return - else: - coro = eventlet.getcurrent() - self._spawn_done(coro) - - def spawn_n(self, function, *args, **kwargs): - """Create a greenthread to run the *function*, the same as - :meth:`spawn`. The difference is that :meth:`spawn_n` returns - None; the results of *function* are not retrievable. - """ - # if reentering an empty pool, don't try to wait on a coroutine freeing - # itself -- instead, just execute in the current coroutine - current = eventlet.getcurrent() - if self.sem.locked() and current in self.coroutines_running: - self._spawn_n_impl(function, args, kwargs, None) - else: - self.sem.acquire() - g = eventlet.spawn_n( - self._spawn_n_impl, - function, args, kwargs, True) - if not self.coroutines_running: - self.no_coros_running = eventlet.Event() - self.coroutines_running.add(g) - - def waitall(self): - """Waits until all greenthreads in the pool are finished working.""" - assert eventlet.getcurrent() not in self.coroutines_running, \ - "Calling waitall() from within one of the " \ - "GreenPool's greenthreads will never terminate." - if self.running(): - self.no_coros_running.wait() - - def _spawn_done(self, coro): - self.sem.release() - if coro is not None: - self.coroutines_running.remove(coro) - # if done processing (no more work is waiting for processing), - # we can finish off any waitall() calls that might be pending - if self.sem.balance == self.size: - self.no_coros_running.send(None) - - def waiting(self): - """Return the number of greenthreads waiting to spawn. - """ - if self.sem.balance < 0: - return -self.sem.balance - else: - return 0 - - def _do_map(self, func, it, gi): - for args in it: - gi.spawn(func, *args) - gi.done_spawning() - - def starmap(self, function, iterable): - """This is the same as :func:`itertools.starmap`, except that *func* is - executed in a separate green thread for each item, with the concurrency - limited by the pool's size. In operation, starmap consumes a constant - amount of memory, proportional to the size of the pool, and is thus - suited for iterating over extremely long input lists. - """ - if function is None: - function = lambda *a: a - # We use a whole separate greenthread so its spawn() calls can block - # without blocking OUR caller. On the other hand, we must assume that - # our caller will immediately start trying to iterate over whatever we - # return. If that were a GreenPile, our caller would always see an - # empty sequence because the hub hasn't even entered _do_map() yet -- - # _do_map() hasn't had a chance to spawn a single greenthread on this - # GreenPool! A GreenMap is safe to use with different producer and - # consumer greenthreads, because it doesn't raise StopIteration until - # the producer has explicitly called done_spawning(). - gi = GreenMap(self.size) - eventlet.spawn_n(self._do_map, function, iterable, gi) - return gi - - def imap(self, function, *iterables): - """This is the same as :func:`itertools.imap`, and has the same - concurrency and memory behavior as :meth:`starmap`. - - It's quite convenient for, e.g., farming out jobs from a file:: - - def worker(line): - return do_something(line) - pool = GreenPool() - for result in pool.imap(worker, open("filename", 'r')): - print(result) - """ - return self.starmap(function, six.moves.zip(*iterables)) - - -class GreenPile(object): - """GreenPile is an abstraction representing a bunch of I/O-related tasks. - - Construct a GreenPile with an existing GreenPool object. The GreenPile will - then use that pool's concurrency as it processes its jobs. There can be - many GreenPiles associated with a single GreenPool. - - A GreenPile can also be constructed standalone, not associated with any - GreenPool. To do this, construct it with an integer size parameter instead - of a GreenPool. - - It is not advisable to iterate over a GreenPile in a different greenthread - than the one which is calling spawn. The iterator will exit early in that - situation. - """ - - def __init__(self, size_or_pool=1000): - if isinstance(size_or_pool, GreenPool): - self.pool = size_or_pool - else: - self.pool = GreenPool(size_or_pool) - self.waiters = queue.LightQueue() - self.counter = 0 - - def spawn(self, func, *args, **kw): - """Runs *func* in its own green thread, with the result available by - iterating over the GreenPile object.""" - self.counter += 1 - try: - gt = self.pool.spawn(func, *args, **kw) - self.waiters.put(gt) - except: - self.counter -= 1 - raise - - def __iter__(self): - return self - - def next(self): - """Wait for the next result, suspending the current greenthread until it - is available. Raises StopIteration when there are no more results.""" - if self.counter == 0: - raise StopIteration() - return self._next() - __next__ = next - - def _next(self): - try: - return self.waiters.get().wait() - finally: - self.counter -= 1 - - -# this is identical to GreenPile but it blocks on spawn if the results -# aren't consumed, and it doesn't generate its own StopIteration exception, -# instead relying on the spawning process to send one in when it's done -class GreenMap(GreenPile): - def __init__(self, size_or_pool): - super(GreenMap, self).__init__(size_or_pool) - self.waiters = queue.LightQueue(maxsize=self.pool.size) - - def done_spawning(self): - self.spawn(lambda: StopIteration()) - - def next(self): - val = self._next() - if isinstance(val, StopIteration): - raise val - else: - return val - __next__ = next diff --git a/venv/lib/python3.6/site-packages/eventlet/greenthread.py b/venv/lib/python3.6/site-packages/eventlet/greenthread.py deleted file mode 100644 index c26d5a1..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/greenthread.py +++ /dev/null @@ -1,302 +0,0 @@ -from collections import deque -import sys - -from eventlet import event -from eventlet import hubs -from eventlet import support -from eventlet import timeout -from eventlet.hubs import timer -from eventlet.support import greenlets as greenlet -import six -import warnings - -__all__ = ['getcurrent', 'sleep', 'spawn', 'spawn_n', - 'kill', - 'spawn_after', 'spawn_after_local', 'GreenThread'] - -getcurrent = greenlet.getcurrent - - -def sleep(seconds=0): - """Yield control to another eligible coroutine until at least *seconds* have - elapsed. - - *seconds* may be specified as an integer, or a float if fractional seconds - are desired. Calling :func:`~greenthread.sleep` with *seconds* of 0 is the - canonical way of expressing a cooperative yield. For example, if one is - looping over a large list performing an expensive calculation without - calling any socket methods, it's a good idea to call ``sleep(0)`` - occasionally; otherwise nothing else will run. - """ - hub = hubs.get_hub() - current = getcurrent() - assert hub.greenlet is not current, 'do not call blocking functions from the mainloop' - timer = hub.schedule_call_global(seconds, current.switch) - try: - hub.switch() - finally: - timer.cancel() - - -def spawn(func, *args, **kwargs): - """Create a greenthread to run ``func(*args, **kwargs)``. Returns a - :class:`GreenThread` object which you can use to get the results of the - call. - - Execution control returns immediately to the caller; the created greenthread - is merely scheduled to be run at the next available opportunity. - Use :func:`spawn_after` to arrange for greenthreads to be spawned - after a finite delay. - """ - hub = hubs.get_hub() - g = GreenThread(hub.greenlet) - hub.schedule_call_global(0, g.switch, func, args, kwargs) - return g - - -def spawn_n(func, *args, **kwargs): - """Same as :func:`spawn`, but returns a ``greenlet`` object from - which it is not possible to retrieve either a return value or - whether it raised any exceptions. This is faster than - :func:`spawn`; it is fastest if there are no keyword arguments. - - If an exception is raised in the function, spawn_n prints a stack - trace; the print can be disabled by calling - :func:`eventlet.debug.hub_exceptions` with False. - """ - return _spawn_n(0, func, args, kwargs)[1] - - -def spawn_after(seconds, func, *args, **kwargs): - """Spawns *func* after *seconds* have elapsed. It runs as scheduled even if - the current greenthread has completed. - - *seconds* may be specified as an integer, or a float if fractional seconds - are desired. The *func* will be called with the given *args* and - keyword arguments *kwargs*, and will be executed within its own greenthread. - - The return value of :func:`spawn_after` is a :class:`GreenThread` object, - which can be used to retrieve the results of the call. - - To cancel the spawn and prevent *func* from being called, - call :meth:`GreenThread.cancel` on the return value of :func:`spawn_after`. - This will not abort the function if it's already started running, which is - generally the desired behavior. If terminating *func* regardless of whether - it's started or not is the desired behavior, call :meth:`GreenThread.kill`. - """ - hub = hubs.get_hub() - g = GreenThread(hub.greenlet) - hub.schedule_call_global(seconds, g.switch, func, args, kwargs) - return g - - -def spawn_after_local(seconds, func, *args, **kwargs): - """Spawns *func* after *seconds* have elapsed. The function will NOT be - called if the current greenthread has exited. - - *seconds* may be specified as an integer, or a float if fractional seconds - are desired. The *func* will be called with the given *args* and - keyword arguments *kwargs*, and will be executed within its own greenthread. - - The return value of :func:`spawn_after` is a :class:`GreenThread` object, - which can be used to retrieve the results of the call. - - To cancel the spawn and prevent *func* from being called, - call :meth:`GreenThread.cancel` on the return value. This will not abort the - function if it's already started running. If terminating *func* regardless - of whether it's started or not is the desired behavior, call - :meth:`GreenThread.kill`. - """ - hub = hubs.get_hub() - g = GreenThread(hub.greenlet) - hub.schedule_call_local(seconds, g.switch, func, args, kwargs) - return g - - -def call_after_global(seconds, func, *args, **kwargs): - warnings.warn( - "call_after_global is renamed to spawn_after, which" - "has the same signature and semantics (plus a bit extra). Please do a" - " quick search-and-replace on your codebase, thanks!", - DeprecationWarning, stacklevel=2) - return _spawn_n(seconds, func, args, kwargs)[0] - - -def call_after_local(seconds, function, *args, **kwargs): - warnings.warn( - "call_after_local is renamed to spawn_after_local, which" - "has the same signature and semantics (plus a bit extra).", - DeprecationWarning, stacklevel=2) - hub = hubs.get_hub() - g = greenlet.greenlet(function, parent=hub.greenlet) - t = hub.schedule_call_local(seconds, g.switch, *args, **kwargs) - return t - - -call_after = call_after_local - - -def exc_after(seconds, *throw_args): - warnings.warn("Instead of exc_after, which is deprecated, use " - "Timeout(seconds, exception)", - DeprecationWarning, stacklevel=2) - if seconds is None: # dummy argument, do nothing - return timer.Timer(seconds, lambda: None) - hub = hubs.get_hub() - return hub.schedule_call_local(seconds, getcurrent().throw, *throw_args) - -# deprecate, remove -TimeoutError, with_timeout = ( - support.wrap_deprecated(old, new)(fun) for old, new, fun in ( - ('greenthread.TimeoutError', 'Timeout', timeout.Timeout), - ('greenthread.with_timeout', 'with_timeout', timeout.with_timeout), - )) - - -def _spawn_n(seconds, func, args, kwargs): - hub = hubs.get_hub() - g = greenlet.greenlet(func, parent=hub.greenlet) - t = hub.schedule_call_global(seconds, g.switch, *args, **kwargs) - return t, g - - -class GreenThread(greenlet.greenlet): - """The GreenThread class is a type of Greenlet which has the additional - property of being able to retrieve the return value of the main function. - Do not construct GreenThread objects directly; call :func:`spawn` to get one. - """ - - def __init__(self, parent): - greenlet.greenlet.__init__(self, self.main, parent) - self._exit_event = event.Event() - self._resolving_links = False - self._exit_funcs = None - - def wait(self): - """ Returns the result of the main function of this GreenThread. If the - result is a normal return value, :meth:`wait` returns it. If it raised - an exception, :meth:`wait` will raise the same exception (though the - stack trace will unavoidably contain some frames from within the - greenthread module).""" - return self._exit_event.wait() - - def link(self, func, *curried_args, **curried_kwargs): - """ Set up a function to be called with the results of the GreenThread. - - The function must have the following signature:: - - def func(gt, [curried args/kwargs]): - - When the GreenThread finishes its run, it calls *func* with itself - and with the `curried arguments `_ supplied - at link-time. If the function wants to retrieve the result of the GreenThread, - it should call wait() on its first argument. - - Note that *func* is called within execution context of - the GreenThread, so it is possible to interfere with other linked - functions by doing things like switching explicitly to another - greenthread. - """ - if self._exit_funcs is None: - self._exit_funcs = deque() - self._exit_funcs.append((func, curried_args, curried_kwargs)) - if self._exit_event.ready(): - self._resolve_links() - - def unlink(self, func, *curried_args, **curried_kwargs): - """ remove linked function set by :meth:`link` - - Remove successfully return True, otherwise False - """ - if not self._exit_funcs: - return False - try: - self._exit_funcs.remove((func, curried_args, curried_kwargs)) - return True - except ValueError: - return False - - def main(self, function, args, kwargs): - try: - result = function(*args, **kwargs) - except: - self._exit_event.send_exception(*sys.exc_info()) - self._resolve_links() - raise - else: - self._exit_event.send(result) - self._resolve_links() - - def _resolve_links(self): - # ca and ckw are the curried function arguments - if self._resolving_links: - return - if not self._exit_funcs: - return - self._resolving_links = True - try: - while self._exit_funcs: - f, ca, ckw = self._exit_funcs.popleft() - f(self, *ca, **ckw) - finally: - self._resolving_links = False - - def kill(self, *throw_args): - """Kills the greenthread using :func:`kill`. After being killed - all calls to :meth:`wait` will raise *throw_args* (which default - to :class:`greenlet.GreenletExit`).""" - return kill(self, *throw_args) - - def cancel(self, *throw_args): - """Kills the greenthread using :func:`kill`, but only if it hasn't - already started running. After being canceled, - all calls to :meth:`wait` will raise *throw_args* (which default - to :class:`greenlet.GreenletExit`).""" - return cancel(self, *throw_args) - - -def cancel(g, *throw_args): - """Like :func:`kill`, but only terminates the greenthread if it hasn't - already started execution. If the grenthread has already started - execution, :func:`cancel` has no effect.""" - if not g: - kill(g, *throw_args) - - -def kill(g, *throw_args): - """Terminates the target greenthread by raising an exception into it. - Whatever that greenthread might be doing; be it waiting for I/O or another - primitive, it sees an exception right away. - - By default, this exception is GreenletExit, but a specific exception - may be specified. *throw_args* should be the same as the arguments to - raise; either an exception instance or an exc_info tuple. - - Calling :func:`kill` causes the calling greenthread to cooperatively yield. - """ - if g.dead: - return - hub = hubs.get_hub() - if not g: - # greenlet hasn't started yet and therefore throw won't work - # on its own; semantically we want it to be as though the main - # method never got called - def just_raise(*a, **kw): - if throw_args: - six.reraise(throw_args[0], throw_args[1], throw_args[2]) - else: - raise greenlet.GreenletExit() - g.run = just_raise - if isinstance(g, GreenThread): - # it's a GreenThread object, so we want to call its main - # method to take advantage of the notification - try: - g.main(just_raise, (), {}) - except: - pass - current = getcurrent() - if current is not hub.greenlet: - # arrange to wake the caller back up immediately - hub.ensure_greenlet() - hub.schedule_call_global(0, current.switch) - g.throw(*throw_args) diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/__init__.py b/venv/lib/python3.6/site-packages/eventlet/hubs/__init__.py deleted file mode 100644 index 867628c..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/__init__.py +++ /dev/null @@ -1,190 +0,0 @@ -import importlib -import inspect -import os -import warnings - -from eventlet import patcher -from eventlet.support import greenlets as greenlet -import six - - -__all__ = ["use_hub", "get_hub", "get_default_hub", "trampoline"] - -threading = patcher.original('threading') -_threadlocal = threading.local() - - -# order is important, get_default_hub returns first available from here -builtin_hub_names = ('epolls', 'kqueue', 'poll', 'selects') -builtin_hub_modules = tuple(importlib.import_module('eventlet.hubs.' + name) for name in builtin_hub_names) - - -class HubError(Exception): - pass - - -def get_default_hub(): - """Select the default hub implementation based on what multiplexing - libraries are installed. The order that the hubs are tried is: - - * epoll - * kqueue - * poll - * select - - It won't automatically select the pyevent hub, because it's not - python-thread-safe. - - .. include:: ../doc/common.txt - .. note :: |internal| - """ - for mod in builtin_hub_modules: - if mod.is_available(): - return mod - - raise HubError('no built-in hubs are available: {}'.format(builtin_hub_modules)) - - -def use_hub(mod=None): - """Use the module *mod*, containing a class called Hub, as the - event hub. Usually not required; the default hub is usually fine. - - `mod` can be an actual hub class, a module, a string, or None. - - If `mod` is a class, use it directly. - If `mod` is a module, use `module.Hub` class - If `mod` is a string and contains either '.' or ':' - then `use_hub` uses 'package.subpackage.module:Class' convention, - otherwise imports `eventlet.hubs.mod`. - If `mod` is None, `use_hub` uses the default hub. - - Only call use_hub during application initialization, - because it resets the hub's state and any existing - timers or listeners will never be resumed. - - These two threadlocal attributes are not part of Eventlet public API: - - `threadlocal.Hub` (capital H) is hub constructor, used when no hub is currently active - - `threadlocal.hub` (lowercase h) is active hub instance - """ - if mod is None: - mod = os.environ.get('EVENTLET_HUB', None) - if mod is None: - mod = get_default_hub() - if hasattr(_threadlocal, 'hub'): - del _threadlocal.hub - - classname = '' - if isinstance(mod, six.string_types): - assert mod.strip(), "Need to specify a hub" - if '.' in mod or ':' in mod: - modulename, _, classname = mod.strip().partition(':') - else: - modulename = 'eventlet.hubs.' + mod - mod = importlib.import_module(modulename) - - if hasattr(mod, 'is_available'): - if not mod.is_available(): - raise Exception('selected hub is not available on this system mod={}'.format(mod)) - else: - msg = '''Please provide `is_available()` function in your custom Eventlet hub {mod}. -It must return bool: whether hub supports current platform. See eventlet/hubs/{{epoll,kqueue}} for example. -'''.format(mod=mod) - warnings.warn(msg, DeprecationWarning, stacklevel=3) - - hubclass = mod - if not inspect.isclass(mod): - hubclass = getattr(mod, classname or 'Hub') - - _threadlocal.Hub = hubclass - - -def get_hub(): - """Get the current event hub singleton object. - - .. note :: |internal| - """ - try: - hub = _threadlocal.hub - except AttributeError: - try: - _threadlocal.Hub - except AttributeError: - use_hub() - hub = _threadlocal.hub = _threadlocal.Hub() - return hub - - -# Lame middle file import because complex dependencies in import graph -from eventlet import timeout - - -def trampoline(fd, read=None, write=None, timeout=None, - timeout_exc=timeout.Timeout, - mark_as_closed=None): - """Suspend the current coroutine until the given socket object or file - descriptor is ready to *read*, ready to *write*, or the specified - *timeout* elapses, depending on arguments specified. - - To wait for *fd* to be ready to read, pass *read* ``=True``; ready to - write, pass *write* ``=True``. To specify a timeout, pass the *timeout* - argument in seconds. - - If the specified *timeout* elapses before the socket is ready to read or - write, *timeout_exc* will be raised instead of ``trampoline()`` - returning normally. - - .. note :: |internal| - """ - t = None - hub = get_hub() - current = greenlet.getcurrent() - assert hub.greenlet is not current, 'do not call blocking functions from the mainloop' - assert not ( - read and write), 'not allowed to trampoline for reading and writing' - try: - fileno = fd.fileno() - except AttributeError: - fileno = fd - if timeout is not None: - def _timeout(exc): - # This is only useful to insert debugging - current.throw(exc) - t = hub.schedule_call_global(timeout, _timeout, timeout_exc) - try: - if read: - listener = hub.add(hub.READ, fileno, current.switch, current.throw, mark_as_closed) - elif write: - listener = hub.add(hub.WRITE, fileno, current.switch, current.throw, mark_as_closed) - try: - return hub.switch() - finally: - hub.remove(listener) - finally: - if t is not None: - t.cancel() - - -def notify_close(fd): - """ - A particular file descriptor has been explicitly closed. Register for any - waiting listeners to be notified on the next run loop. - """ - hub = get_hub() - hub.notify_close(fd) - - -def notify_opened(fd): - """ - Some file descriptors may be closed 'silently' - that is, by the garbage - collector, by an external library, etc. When the OS returns a file descriptor - from an open call (or something similar), this may be the only indication we - have that the FD has been closed and then recycled. - We let the hub know that the old file descriptor is dead; any stuck listeners - will be disabled and notified in turn. - """ - hub = get_hub() - hub.mark_as_reopened(fd) - - -class IOClosed(IOError): - pass diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/epolls.py b/venv/lib/python3.6/site-packages/eventlet/hubs/epolls.py deleted file mode 100644 index 07fec14..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/epolls.py +++ /dev/null @@ -1,31 +0,0 @@ -import errno -from eventlet import patcher, support -from eventlet.hubs import hub, poll -select = patcher.original('select') - - -def is_available(): - return hasattr(select, 'epoll') - - -# NOTE: we rely on the fact that the epoll flag constants -# are identical in value to the poll constants -class Hub(poll.Hub): - def __init__(self, clock=None): - super(Hub, self).__init__(clock=clock) - self.poll = select.epoll() - - def add(self, evtype, fileno, cb, tb, mac): - oldlisteners = bool(self.listeners[self.READ].get(fileno) or - self.listeners[self.WRITE].get(fileno)) - # not super() to avoid double register() - listener = hub.BaseHub.add(self, evtype, fileno, cb, tb, mac) - try: - self.register(fileno, new=not oldlisteners) - except IOError as ex: # ignore EEXIST, #80 - if support.get_errno(ex) != errno.EEXIST: - raise - return listener - - def do_poll(self, seconds): - return self.poll.poll(seconds) diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/hub.py b/venv/lib/python3.6/site-packages/eventlet/hubs/hub.py deleted file mode 100644 index 8871082..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/hub.py +++ /dev/null @@ -1,486 +0,0 @@ -import errno -import heapq -import math -import signal -import sys -import traceback - -arm_alarm = None -if hasattr(signal, 'setitimer'): - def alarm_itimer(seconds): - signal.setitimer(signal.ITIMER_REAL, seconds) - arm_alarm = alarm_itimer -else: - try: - import itimer - arm_alarm = itimer.alarm - except ImportError: - def alarm_signal(seconds): - signal.alarm(math.ceil(seconds)) - arm_alarm = alarm_signal - -import eventlet.hubs -from eventlet.hubs import timer -from eventlet.support import greenlets as greenlet, clear_sys_exc_info -import monotonic -import six - -g_prevent_multiple_readers = True - -READ = "read" -WRITE = "write" - - -def closed_callback(fileno): - """ Used to de-fang a callback that may be triggered by a loop in BaseHub.wait - """ - # No-op. - pass - - -class FdListener(object): - - def __init__(self, evtype, fileno, cb, tb, mark_as_closed): - """ The following are required: - cb - the standard callback, which will switch into the - listening greenlet to indicate that the event waited upon - is ready - tb - a 'throwback'. This is typically greenlet.throw, used - to raise a signal into the target greenlet indicating that - an event was obsoleted by its underlying filehandle being - repurposed. - mark_as_closed - if any listener is obsoleted, this is called - (in the context of some other client greenlet) to alert - underlying filehandle-wrapping objects that they've been - closed. - """ - assert (evtype is READ or evtype is WRITE) - self.evtype = evtype - self.fileno = fileno - self.cb = cb - self.tb = tb - self.mark_as_closed = mark_as_closed - self.spent = False - self.greenlet = greenlet.getcurrent() - - def __repr__(self): - return "%s(%r, %r, %r, %r)" % (type(self).__name__, self.evtype, self.fileno, - self.cb, self.tb) - __str__ = __repr__ - - def defang(self): - self.cb = closed_callback - if self.mark_as_closed is not None: - self.mark_as_closed() - self.spent = True - - -noop = FdListener(READ, 0, lambda x: None, lambda x: None, None) - - -# in debug mode, track the call site that created the listener - - -class DebugListener(FdListener): - - def __init__(self, evtype, fileno, cb, tb, mark_as_closed): - self.where_called = traceback.format_stack() - self.greenlet = greenlet.getcurrent() - super(DebugListener, self).__init__(evtype, fileno, cb, tb, mark_as_closed) - - def __repr__(self): - return "DebugListener(%r, %r, %r, %r, %r, %r)\n%sEndDebugFdListener" % ( - self.evtype, - self.fileno, - self.cb, - self.tb, - self.mark_as_closed, - self.greenlet, - ''.join(self.where_called)) - __str__ = __repr__ - - -def alarm_handler(signum, frame): - import inspect - raise RuntimeError("Blocking detector ALARMED at" + str(inspect.getframeinfo(frame))) - - -class BaseHub(object): - """ Base hub class for easing the implementation of subclasses that are - specific to a particular underlying event architecture. """ - - SYSTEM_EXCEPTIONS = (KeyboardInterrupt, SystemExit) - - READ = READ - WRITE = WRITE - - def __init__(self, clock=None): - self.listeners = {READ: {}, WRITE: {}} - self.secondaries = {READ: {}, WRITE: {}} - self.closed = [] - - if clock is None: - clock = monotonic.monotonic - self.clock = clock - - self.greenlet = greenlet.greenlet(self.run) - self.stopping = False - self.running = False - self.timers = [] - self.next_timers = [] - self.lclass = FdListener - self.timers_canceled = 0 - self.debug_exceptions = True - self.debug_blocking = False - self.debug_blocking_resolution = 1 - - def block_detect_pre(self): - # shortest alarm we can possibly raise is one second - tmp = signal.signal(signal.SIGALRM, alarm_handler) - if tmp != alarm_handler: - self._old_signal_handler = tmp - - arm_alarm(self.debug_blocking_resolution) - - def block_detect_post(self): - if (hasattr(self, "_old_signal_handler") and - self._old_signal_handler): - signal.signal(signal.SIGALRM, self._old_signal_handler) - signal.alarm(0) - - def add(self, evtype, fileno, cb, tb, mark_as_closed): - """ Signals an intent to or write a particular file descriptor. - - The *evtype* argument is either the constant READ or WRITE. - - The *fileno* argument is the file number of the file of interest. - - The *cb* argument is the callback which will be called when the file - is ready for reading/writing. - - The *tb* argument is the throwback used to signal (into the greenlet) - that the file was closed. - - The *mark_as_closed* is used in the context of the event hub to - prepare a Python object as being closed, pre-empting further - close operations from accidentally shutting down the wrong OS thread. - """ - listener = self.lclass(evtype, fileno, cb, tb, mark_as_closed) - bucket = self.listeners[evtype] - if fileno in bucket: - if g_prevent_multiple_readers: - raise RuntimeError( - "Second simultaneous %s on fileno %s " - "detected. Unless you really know what you're doing, " - "make sure that only one greenthread can %s any " - "particular socket. Consider using a pools.Pool. " - "If you do know what you're doing and want to disable " - "this error, call " - "eventlet.debug.hub_prevent_multiple_readers(False) - MY THREAD=%s; " - "THAT THREAD=%s" % ( - evtype, fileno, evtype, cb, bucket[fileno])) - # store off the second listener in another structure - self.secondaries[evtype].setdefault(fileno, []).append(listener) - else: - bucket[fileno] = listener - return listener - - def _obsolete(self, fileno): - """ We've received an indication that 'fileno' has been obsoleted. - Any current listeners must be defanged, and notifications to - their greenlets queued up to send. - """ - found = False - for evtype, bucket in six.iteritems(self.secondaries): - if fileno in bucket: - for listener in bucket[fileno]: - found = True - self.closed.append(listener) - listener.defang() - del bucket[fileno] - - # For the primary listeners, we actually need to call remove, - # which may modify the underlying OS polling objects. - for evtype, bucket in six.iteritems(self.listeners): - if fileno in bucket: - listener = bucket[fileno] - found = True - self.closed.append(listener) - self.remove(listener) - listener.defang() - - return found - - def notify_close(self, fileno): - """ We might want to do something when a fileno is closed. - However, currently it suffices to obsolete listeners only - when we detect an old fileno being recycled, on open. - """ - pass - - def remove(self, listener): - if listener.spent: - # trampoline may trigger this in its finally section. - return - - fileno = listener.fileno - evtype = listener.evtype - self.listeners[evtype].pop(fileno, None) - # migrate a secondary listener to be the primary listener - if fileno in self.secondaries[evtype]: - sec = self.secondaries[evtype].get(fileno, None) - if not sec: - return - self.listeners[evtype][fileno] = sec.pop(0) - if not sec: - del self.secondaries[evtype][fileno] - - def mark_as_reopened(self, fileno): - """ If a file descriptor is returned by the OS as the result of some - open call (or equivalent), that signals that it might be being - recycled. - - Catch the case where the fd was previously in use. - """ - self._obsolete(fileno) - - def remove_descriptor(self, fileno): - """ Completely remove all listeners for this fileno. For internal use - only.""" - listeners = [] - listeners.append(self.listeners[READ].pop(fileno, noop)) - listeners.append(self.listeners[WRITE].pop(fileno, noop)) - listeners.extend(self.secondaries[READ].pop(fileno, ())) - listeners.extend(self.secondaries[WRITE].pop(fileno, ())) - for listener in listeners: - try: - listener.cb(fileno) - except Exception: - self.squelch_generic_exception(sys.exc_info()) - - def close_one(self): - """ Triggered from the main run loop. If a listener's underlying FD was - closed somehow, throw an exception back to the trampoline, which should - be able to manage it appropriately. - """ - listener = self.closed.pop() - if not listener.greenlet.dead: - # There's no point signalling a greenlet that's already dead. - listener.tb(eventlet.hubs.IOClosed(errno.ENOTCONN, "Operation on closed file")) - - def ensure_greenlet(self): - if self.greenlet.dead: - # create new greenlet sharing same parent as original - new = greenlet.greenlet(self.run, self.greenlet.parent) - # need to assign as parent of old greenlet - # for those greenlets that are currently - # children of the dead hub and may subsequently - # exit without further switching to hub. - self.greenlet.parent = new - self.greenlet = new - - def switch(self): - cur = greenlet.getcurrent() - assert cur is not self.greenlet, 'Cannot switch to MAINLOOP from MAINLOOP' - switch_out = getattr(cur, 'switch_out', None) - if switch_out is not None: - try: - switch_out() - except: - self.squelch_generic_exception(sys.exc_info()) - self.ensure_greenlet() - try: - if self.greenlet.parent is not cur: - cur.parent = self.greenlet - except ValueError: - pass # gets raised if there is a greenlet parent cycle - clear_sys_exc_info() - return self.greenlet.switch() - - def squelch_exception(self, fileno, exc_info): - traceback.print_exception(*exc_info) - sys.stderr.write("Removing descriptor: %r\n" % (fileno,)) - sys.stderr.flush() - try: - self.remove_descriptor(fileno) - except Exception as e: - sys.stderr.write("Exception while removing descriptor! %r\n" % (e,)) - sys.stderr.flush() - - def wait(self, seconds=None): - raise NotImplementedError("Implement this in a subclass") - - def default_sleep(self): - return 60.0 - - def sleep_until(self): - t = self.timers - if not t: - return None - return t[0][0] - - def run(self, *a, **kw): - """Run the runloop until abort is called. - """ - # accept and discard variable arguments because they will be - # supplied if other greenlets have run and exited before the - # hub's greenlet gets a chance to run - if self.running: - raise RuntimeError("Already running!") - try: - self.running = True - self.stopping = False - while not self.stopping: - while self.closed: - # We ditch all of these first. - self.close_one() - self.prepare_timers() - if self.debug_blocking: - self.block_detect_pre() - self.fire_timers(self.clock()) - if self.debug_blocking: - self.block_detect_post() - self.prepare_timers() - wakeup_when = self.sleep_until() - if wakeup_when is None: - sleep_time = self.default_sleep() - else: - sleep_time = wakeup_when - self.clock() - if sleep_time > 0: - self.wait(sleep_time) - else: - self.wait(0) - else: - self.timers_canceled = 0 - del self.timers[:] - del self.next_timers[:] - finally: - self.running = False - self.stopping = False - - def abort(self, wait=False): - """Stop the runloop. If run is executing, it will exit after - completing the next runloop iteration. - - Set *wait* to True to cause abort to switch to the hub immediately and - wait until it's finished processing. Waiting for the hub will only - work from the main greenthread; all other greenthreads will become - unreachable. - """ - if self.running: - self.stopping = True - if wait: - assert self.greenlet is not greenlet.getcurrent( - ), "Can't abort with wait from inside the hub's greenlet." - # schedule an immediate timer just so the hub doesn't sleep - self.schedule_call_global(0, lambda: None) - # switch to it; when done the hub will switch back to its parent, - # the main greenlet - self.switch() - - def squelch_generic_exception(self, exc_info): - if self.debug_exceptions: - traceback.print_exception(*exc_info) - sys.stderr.flush() - clear_sys_exc_info() - - def squelch_timer_exception(self, timer, exc_info): - if self.debug_exceptions: - traceback.print_exception(*exc_info) - sys.stderr.flush() - clear_sys_exc_info() - - def add_timer(self, timer): - scheduled_time = self.clock() + timer.seconds - self.next_timers.append((scheduled_time, timer)) - return scheduled_time - - def timer_canceled(self, timer): - self.timers_canceled += 1 - len_timers = len(self.timers) + len(self.next_timers) - if len_timers > 1000 and len_timers / 2 <= self.timers_canceled: - self.timers_canceled = 0 - self.timers = [t for t in self.timers if not t[1].called] - self.next_timers = [t for t in self.next_timers if not t[1].called] - heapq.heapify(self.timers) - - def prepare_timers(self): - heappush = heapq.heappush - t = self.timers - for item in self.next_timers: - if item[1].called: - self.timers_canceled -= 1 - else: - heappush(t, item) - del self.next_timers[:] - - def schedule_call_local(self, seconds, cb, *args, **kw): - """Schedule a callable to be called after 'seconds' seconds have - elapsed. Cancel the timer if greenlet has exited. - seconds: The number of seconds to wait. - cb: The callable to call after the given time. - *args: Arguments to pass to the callable when called. - **kw: Keyword arguments to pass to the callable when called. - """ - t = timer.LocalTimer(seconds, cb, *args, **kw) - self.add_timer(t) - return t - - def schedule_call_global(self, seconds, cb, *args, **kw): - """Schedule a callable to be called after 'seconds' seconds have - elapsed. The timer will NOT be canceled if the current greenlet has - exited before the timer fires. - seconds: The number of seconds to wait. - cb: The callable to call after the given time. - *args: Arguments to pass to the callable when called. - **kw: Keyword arguments to pass to the callable when called. - """ - t = timer.Timer(seconds, cb, *args, **kw) - self.add_timer(t) - return t - - def fire_timers(self, when): - t = self.timers - heappop = heapq.heappop - - while t: - next = t[0] - - exp = next[0] - timer = next[1] - - if when < exp: - break - - heappop(t) - - try: - if timer.called: - self.timers_canceled -= 1 - else: - timer() - except self.SYSTEM_EXCEPTIONS: - raise - except: - self.squelch_timer_exception(timer, sys.exc_info()) - clear_sys_exc_info() - - # for debugging: - - def get_readers(self): - return self.listeners[READ].values() - - def get_writers(self): - return self.listeners[WRITE].values() - - def get_timers_count(hub): - return len(hub.timers) + len(hub.next_timers) - - def set_debug_listeners(self, value): - if value: - self.lclass = DebugListener - else: - self.lclass = FdListener - - def set_timer_exceptions(self, value): - self.debug_exceptions = value diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/kqueue.py b/venv/lib/python3.6/site-packages/eventlet/hubs/kqueue.py deleted file mode 100644 index bad4a87..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/kqueue.py +++ /dev/null @@ -1,112 +0,0 @@ -import os -import sys -from eventlet import patcher, support -from eventlet.hubs import hub -import six -select = patcher.original('select') -time = patcher.original('time') - - -def is_available(): - return hasattr(select, 'kqueue') - - -class Hub(hub.BaseHub): - MAX_EVENTS = 100 - - def __init__(self, clock=None): - self.FILTERS = { - hub.READ: select.KQ_FILTER_READ, - hub.WRITE: select.KQ_FILTER_WRITE, - } - super(Hub, self).__init__(clock) - self._events = {} - self._init_kqueue() - - def _init_kqueue(self): - self.kqueue = select.kqueue() - self._pid = os.getpid() - - def _reinit_kqueue(self): - self.kqueue.close() - self._init_kqueue() - events = [e for i in six.itervalues(self._events) - for e in six.itervalues(i)] - self.kqueue.control(events, 0, 0) - - def _control(self, events, max_events, timeout): - try: - return self.kqueue.control(events, max_events, timeout) - except (OSError, IOError): - # have we forked? - if os.getpid() != self._pid: - self._reinit_kqueue() - return self.kqueue.control(events, max_events, timeout) - raise - - def add(self, evtype, fileno, cb, tb, mac): - listener = super(Hub, self).add(evtype, fileno, cb, tb, mac) - events = self._events.setdefault(fileno, {}) - if evtype not in events: - try: - event = select.kevent(fileno, self.FILTERS.get(evtype), select.KQ_EV_ADD) - self._control([event], 0, 0) - events[evtype] = event - except ValueError: - super(Hub, self).remove(listener) - raise - return listener - - def _delete_events(self, events): - del_events = [ - select.kevent(e.ident, e.filter, select.KQ_EV_DELETE) - for e in events - ] - self._control(del_events, 0, 0) - - def remove(self, listener): - super(Hub, self).remove(listener) - evtype = listener.evtype - fileno = listener.fileno - if not self.listeners[evtype].get(fileno): - event = self._events[fileno].pop(evtype, None) - if event is None: - return - try: - self._delete_events((event,)) - except OSError: - pass - - def remove_descriptor(self, fileno): - super(Hub, self).remove_descriptor(fileno) - try: - events = self._events.pop(fileno).values() - self._delete_events(events) - except KeyError: - pass - except OSError: - pass - - def wait(self, seconds=None): - readers = self.listeners[self.READ] - writers = self.listeners[self.WRITE] - - if not readers and not writers: - if seconds: - time.sleep(seconds) - return - result = self._control([], self.MAX_EVENTS, seconds) - SYSTEM_EXCEPTIONS = self.SYSTEM_EXCEPTIONS - for event in result: - fileno = event.ident - evfilt = event.filter - try: - if evfilt == select.KQ_FILTER_READ: - readers.get(fileno, hub.noop).cb(fileno) - if evfilt == select.KQ_FILTER_WRITE: - writers.get(fileno, hub.noop).cb(fileno) - except SYSTEM_EXCEPTIONS: - raise - except: - self.squelch_exception(fileno, sys.exc_info()) - support.clear_sys_exc_info() diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/poll.py b/venv/lib/python3.6/site-packages/eventlet/hubs/poll.py deleted file mode 100644 index 1bbd401..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/poll.py +++ /dev/null @@ -1,119 +0,0 @@ -import errno -import sys - -from eventlet import patcher, support -from eventlet.hubs import hub -select = patcher.original('select') -time = patcher.original('time') - - -def is_available(): - return hasattr(select, 'poll') - - -class Hub(hub.BaseHub): - def __init__(self, clock=None): - super(Hub, self).__init__(clock) - self.EXC_MASK = select.POLLERR | select.POLLHUP - self.READ_MASK = select.POLLIN | select.POLLPRI - self.WRITE_MASK = select.POLLOUT - self.poll = select.poll() - - def add(self, evtype, fileno, cb, tb, mac): - listener = super(Hub, self).add(evtype, fileno, cb, tb, mac) - self.register(fileno, new=True) - return listener - - def remove(self, listener): - super(Hub, self).remove(listener) - self.register(listener.fileno) - - def register(self, fileno, new=False): - mask = 0 - if self.listeners[self.READ].get(fileno): - mask |= self.READ_MASK | self.EXC_MASK - if self.listeners[self.WRITE].get(fileno): - mask |= self.WRITE_MASK | self.EXC_MASK - try: - if mask: - if new: - self.poll.register(fileno, mask) - else: - try: - self.poll.modify(fileno, mask) - except (IOError, OSError): - self.poll.register(fileno, mask) - else: - try: - self.poll.unregister(fileno) - except (KeyError, IOError, OSError): - # raised if we try to remove a fileno that was - # already removed/invalid - pass - except ValueError: - # fileno is bad, issue 74 - self.remove_descriptor(fileno) - raise - - def remove_descriptor(self, fileno): - super(Hub, self).remove_descriptor(fileno) - try: - self.poll.unregister(fileno) - except (KeyError, ValueError, IOError, OSError): - # raised if we try to remove a fileno that was - # already removed/invalid - pass - - def do_poll(self, seconds): - # poll.poll expects integral milliseconds - return self.poll.poll(int(seconds * 1000.0)) - - def wait(self, seconds=None): - readers = self.listeners[self.READ] - writers = self.listeners[self.WRITE] - - if not readers and not writers: - if seconds: - time.sleep(seconds) - return - try: - presult = self.do_poll(seconds) - except (IOError, select.error) as e: - if support.get_errno(e) == errno.EINTR: - return - raise - SYSTEM_EXCEPTIONS = self.SYSTEM_EXCEPTIONS - - if self.debug_blocking: - self.block_detect_pre() - - # Accumulate the listeners to call back to prior to - # triggering any of them. This is to keep the set - # of callbacks in sync with the events we've just - # polled for. It prevents one handler from invalidating - # another. - callbacks = set() - noop = hub.noop # shave getattr - for fileno, event in presult: - if event & self.READ_MASK: - callbacks.add((readers.get(fileno, noop), fileno)) - if event & self.WRITE_MASK: - callbacks.add((writers.get(fileno, noop), fileno)) - if event & select.POLLNVAL: - self.remove_descriptor(fileno) - continue - if event & self.EXC_MASK: - callbacks.add((readers.get(fileno, noop), fileno)) - callbacks.add((writers.get(fileno, noop), fileno)) - - for listener, fileno in callbacks: - try: - listener.cb(fileno) - except SYSTEM_EXCEPTIONS: - raise - except: - self.squelch_exception(fileno, sys.exc_info()) - support.clear_sys_exc_info() - - if self.debug_blocking: - self.block_detect_post() diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/pyevent.py b/venv/lib/python3.6/site-packages/eventlet/hubs/pyevent.py deleted file mode 100644 index 8367a65..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/pyevent.py +++ /dev/null @@ -1,187 +0,0 @@ -import sys -import traceback -import types - -from eventlet.support import greenlets as greenlet -import six -from eventlet.hubs.hub import BaseHub, READ, WRITE - -try: - import event -except ImportError: - event = None - - -def is_available(): - return event is not None - - -class event_wrapper(object): - - def __init__(self, impl=None, seconds=None): - self.impl = impl - self.seconds = seconds - - def __repr__(self): - if self.impl is not None: - return repr(self.impl) - else: - return object.__repr__(self) - - def __str__(self): - if self.impl is not None: - return str(self.impl) - else: - return object.__str__(self) - - def cancel(self): - if self.impl is not None: - self.impl.delete() - self.impl = None - - @property - def pending(self): - return bool(self.impl and self.impl.pending()) - - -class Hub(BaseHub): - - SYSTEM_EXCEPTIONS = (KeyboardInterrupt, SystemExit) - - def __init__(self): - super(Hub, self).__init__() - event.init() - - self.signal_exc_info = None - self.signal( - 2, - lambda signalnum, frame: self.greenlet.parent.throw(KeyboardInterrupt)) - self.events_to_add = [] - - def dispatch(self): - loop = event.loop - while True: - for e in self.events_to_add: - if e is not None and e.impl is not None and e.seconds is not None: - e.impl.add(e.seconds) - e.seconds = None - self.events_to_add = [] - result = loop() - - if getattr(event, '__event_exc', None) is not None: - # only have to do this because of bug in event.loop - t = getattr(event, '__event_exc') - setattr(event, '__event_exc', None) - assert getattr(event, '__event_exc') is None - six.reraise(t[0], t[1], t[2]) - - if result != 0: - return result - - def run(self): - while True: - try: - self.dispatch() - except greenlet.GreenletExit: - break - except self.SYSTEM_EXCEPTIONS: - raise - except: - if self.signal_exc_info is not None: - self.schedule_call_global( - 0, greenlet.getcurrent().parent.throw, *self.signal_exc_info) - self.signal_exc_info = None - else: - self.squelch_timer_exception(None, sys.exc_info()) - - def abort(self, wait=True): - self.schedule_call_global(0, self.greenlet.throw, greenlet.GreenletExit) - if wait: - assert self.greenlet is not greenlet.getcurrent( - ), "Can't abort with wait from inside the hub's greenlet." - self.switch() - - def _getrunning(self): - return bool(self.greenlet) - - def _setrunning(self, value): - pass # exists for compatibility with BaseHub - running = property(_getrunning, _setrunning) - - def add(self, evtype, fileno, real_cb, real_tb, mac): - # this is stupid: pyevent won't call a callback unless it's a function, - # so we have to force it to be one here - if isinstance(real_cb, types.BuiltinMethodType): - def cb(_d): - real_cb(_d) - else: - cb = real_cb - - if evtype is READ: - evt = event.read(fileno, cb, fileno) - elif evtype is WRITE: - evt = event.write(fileno, cb, fileno) - - return super(Hub, self).add(evtype, fileno, evt, real_tb, mac) - - def signal(self, signalnum, handler): - def wrapper(): - try: - handler(signalnum, None) - except: - self.signal_exc_info = sys.exc_info() - event.abort() - return event_wrapper(event.signal(signalnum, wrapper)) - - def remove(self, listener): - super(Hub, self).remove(listener) - listener.cb.delete() - - def remove_descriptor(self, fileno): - for lcontainer in six.itervalues(self.listeners): - listener = lcontainer.pop(fileno, None) - if listener: - try: - listener.cb.delete() - except self.SYSTEM_EXCEPTIONS: - raise - except: - traceback.print_exc() - - def schedule_call_local(self, seconds, cb, *args, **kwargs): - current = greenlet.getcurrent() - if current is self.greenlet: - return self.schedule_call_global(seconds, cb, *args, **kwargs) - event_impl = event.event(_scheduled_call_local, (cb, args, kwargs, current)) - wrapper = event_wrapper(event_impl, seconds=seconds) - self.events_to_add.append(wrapper) - return wrapper - - schedule_call = schedule_call_local - - def schedule_call_global(self, seconds, cb, *args, **kwargs): - event_impl = event.event(_scheduled_call, (cb, args, kwargs)) - wrapper = event_wrapper(event_impl, seconds=seconds) - self.events_to_add.append(wrapper) - return wrapper - - def _version_info(self): - baseversion = event.__version__ - return baseversion - - -def _scheduled_call(event_impl, handle, evtype, arg): - cb, args, kwargs = arg - try: - cb(*args, **kwargs) - finally: - event_impl.delete() - - -def _scheduled_call_local(event_impl, handle, evtype, arg): - cb, args, kwargs, caller_greenlet = arg - try: - if not caller_greenlet.dead: - cb(*args, **kwargs) - finally: - event_impl.delete() diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/selects.py b/venv/lib/python3.6/site-packages/eventlet/hubs/selects.py deleted file mode 100644 index 0ead5b8..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/selects.py +++ /dev/null @@ -1,64 +0,0 @@ -import errno -import sys -from eventlet import patcher, support -from eventlet.hubs import hub -select = patcher.original('select') -time = patcher.original('time') - -try: - BAD_SOCK = set((errno.EBADF, errno.WSAENOTSOCK)) -except AttributeError: - BAD_SOCK = set((errno.EBADF,)) - - -def is_available(): - return hasattr(select, 'select') - - -class Hub(hub.BaseHub): - def _remove_bad_fds(self): - """ Iterate through fds, removing the ones that are bad per the - operating system. - """ - all_fds = list(self.listeners[self.READ]) + list(self.listeners[self.WRITE]) - for fd in all_fds: - try: - select.select([fd], [], [], 0) - except select.error as e: - if support.get_errno(e) in BAD_SOCK: - self.remove_descriptor(fd) - - def wait(self, seconds=None): - readers = self.listeners[self.READ] - writers = self.listeners[self.WRITE] - if not readers and not writers: - if seconds: - time.sleep(seconds) - return - reader_fds = list(readers) - writer_fds = list(writers) - all_fds = reader_fds + writer_fds - try: - r, w, er = select.select(reader_fds, writer_fds, all_fds, seconds) - except select.error as e: - if support.get_errno(e) == errno.EINTR: - return - elif support.get_errno(e) in BAD_SOCK: - self._remove_bad_fds() - return - else: - raise - - for fileno in er: - readers.get(fileno, hub.noop).cb(fileno) - writers.get(fileno, hub.noop).cb(fileno) - - for listeners, events in ((readers, r), (writers, w)): - for fileno in events: - try: - listeners.get(fileno, hub.noop).cb(fileno) - except self.SYSTEM_EXCEPTIONS: - raise - except: - self.squelch_exception(fileno, sys.exc_info()) - support.clear_sys_exc_info() diff --git a/venv/lib/python3.6/site-packages/eventlet/hubs/timer.py b/venv/lib/python3.6/site-packages/eventlet/hubs/timer.py deleted file mode 100644 index 1dfd561..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/hubs/timer.py +++ /dev/null @@ -1,106 +0,0 @@ -import traceback - -import eventlet.hubs -from eventlet.support import greenlets as greenlet -import six - -""" If true, captures a stack trace for each timer when constructed. This is -useful for debugging leaking timers, to find out where the timer was set up. """ -_g_debug = False - - -class Timer(object): - def __init__(self, seconds, cb, *args, **kw): - """Create a timer. - seconds: The minimum number of seconds to wait before calling - cb: The callback to call when the timer has expired - *args: The arguments to pass to cb - **kw: The keyword arguments to pass to cb - - This timer will not be run unless it is scheduled in a runloop by - calling timer.schedule() or runloop.add_timer(timer). - """ - self.seconds = seconds - self.tpl = cb, args, kw - self.called = False - if _g_debug: - self.traceback = six.StringIO() - traceback.print_stack(file=self.traceback) - - @property - def pending(self): - return not self.called - - def __repr__(self): - secs = getattr(self, 'seconds', None) - cb, args, kw = getattr(self, 'tpl', (None, None, None)) - retval = "Timer(%s, %s, *%s, **%s)" % ( - secs, cb, args, kw) - if _g_debug and hasattr(self, 'traceback'): - retval += '\n' + self.traceback.getvalue() - return retval - - def copy(self): - cb, args, kw = self.tpl - return self.__class__(self.seconds, cb, *args, **kw) - - def schedule(self): - """Schedule this timer to run in the current runloop. - """ - self.called = False - self.scheduled_time = eventlet.hubs.get_hub().add_timer(self) - return self - - def __call__(self, *args): - if not self.called: - self.called = True - cb, args, kw = self.tpl - try: - cb(*args, **kw) - finally: - try: - del self.tpl - except AttributeError: - pass - - def cancel(self): - """Prevent this timer from being called. If the timer has already - been called or canceled, has no effect. - """ - if not self.called: - self.called = True - eventlet.hubs.get_hub().timer_canceled(self) - try: - del self.tpl - except AttributeError: - pass - - # No default ordering in 3.x. heapq uses < - # FIXME should full set be added? - def __lt__(self, other): - return id(self) < id(other) - - -class LocalTimer(Timer): - - def __init__(self, *args, **kwargs): - self.greenlet = greenlet.getcurrent() - Timer.__init__(self, *args, **kwargs) - - @property - def pending(self): - if self.greenlet is None or self.greenlet.dead: - return False - return not self.called - - def __call__(self, *args): - if not self.called: - self.called = True - if self.greenlet is not None and self.greenlet.dead: - return - cb, args, kw = self.tpl - cb(*args, **kw) - - def cancel(self): - self.greenlet = None - Timer.cancel(self) diff --git a/venv/lib/python3.6/site-packages/eventlet/patcher.py b/venv/lib/python3.6/site-packages/eventlet/patcher.py deleted file mode 100644 index b8f8c16..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/patcher.py +++ /dev/null @@ -1,485 +0,0 @@ -import imp -import sys - -import eventlet -import six - - -__all__ = ['inject', 'import_patched', 'monkey_patch', 'is_monkey_patched'] - -__exclude = set(('__builtins__', '__file__', '__name__')) - - -class SysModulesSaver(object): - """Class that captures some subset of the current state of - sys.modules. Pass in an iterator of module names to the - constructor.""" - - def __init__(self, module_names=()): - self._saved = {} - imp.acquire_lock() - self.save(*module_names) - - def save(self, *module_names): - """Saves the named modules to the object.""" - for modname in module_names: - self._saved[modname] = sys.modules.get(modname, None) - - def restore(self): - """Restores the modules that the saver knows about into - sys.modules. - """ - try: - for modname, mod in six.iteritems(self._saved): - if mod is not None: - sys.modules[modname] = mod - else: - try: - del sys.modules[modname] - except KeyError: - pass - finally: - imp.release_lock() - - -def inject(module_name, new_globals, *additional_modules): - """Base method for "injecting" greened modules into an imported module. It - imports the module specified in *module_name*, arranging things so - that the already-imported modules in *additional_modules* are used when - *module_name* makes its imports. - - **Note:** This function does not create or change any sys.modules item, so - if your greened module use code like 'sys.modules["your_module_name"]', you - need to update sys.modules by yourself. - - *new_globals* is either None or a globals dictionary that gets populated - with the contents of the *module_name* module. This is useful when creating - a "green" version of some other module. - - *additional_modules* should be a collection of two-element tuples, of the - form (, ). If it's not specified, a default selection of - name/module pairs is used, which should cover all use cases but may be - slower because there are inevitably redundant or unnecessary imports. - """ - patched_name = '__patched_module_' + module_name - if patched_name in sys.modules: - # returning already-patched module so as not to destroy existing - # references to patched modules - return sys.modules[patched_name] - - if not additional_modules: - # supply some defaults - additional_modules = ( - _green_os_modules() + - _green_select_modules() + - _green_socket_modules() + - _green_thread_modules() + - _green_time_modules()) - # _green_MySQLdb()) # enable this after a short baking-in period - - # after this we are gonna screw with sys.modules, so capture the - # state of all the modules we're going to mess with, and lock - saver = SysModulesSaver([name for name, m in additional_modules]) - saver.save(module_name) - - # Cover the target modules so that when you import the module it - # sees only the patched versions - for name, mod in additional_modules: - sys.modules[name] = mod - - # Remove the old module from sys.modules and reimport it while - # the specified modules are in place - sys.modules.pop(module_name, None) - # Also remove sub modules and reimport. Use copy the keys to list - # because of the pop operations will change the content of sys.modules - # within th loop - for imported_module_name in list(sys.modules.keys()): - if imported_module_name.startswith(module_name + '.'): - sys.modules.pop(imported_module_name, None) - try: - module = __import__(module_name, {}, {}, module_name.split('.')[:-1]) - - if new_globals is not None: - # Update the given globals dictionary with everything from this new module - for name in dir(module): - if name not in __exclude: - new_globals[name] = getattr(module, name) - - # Keep a reference to the new module to prevent it from dying - sys.modules[patched_name] = module - finally: - saver.restore() # Put the original modules back - - return module - - -def import_patched(module_name, *additional_modules, **kw_additional_modules): - """Imports a module in a way that ensures that the module uses "green" - versions of the standard library modules, so that everything works - nonblockingly. - - The only required argument is the name of the module to be imported. - """ - return inject( - module_name, - None, - *additional_modules + tuple(kw_additional_modules.items())) - - -def patch_function(func, *additional_modules): - """Decorator that returns a version of the function that patches - some modules for the duration of the function call. This is - deeply gross and should only be used for functions that import - network libraries within their function bodies that there is no - way of getting around.""" - if not additional_modules: - # supply some defaults - additional_modules = ( - _green_os_modules() + - _green_select_modules() + - _green_socket_modules() + - _green_thread_modules() + - _green_time_modules()) - - def patched(*args, **kw): - saver = SysModulesSaver() - for name, mod in additional_modules: - saver.save(name) - sys.modules[name] = mod - try: - return func(*args, **kw) - finally: - saver.restore() - return patched - - -def _original_patch_function(func, *module_names): - """Kind of the contrapositive of patch_function: decorates a - function such that when it's called, sys.modules is populated only - with the unpatched versions of the specified modules. Unlike - patch_function, only the names of the modules need be supplied, - and there are no defaults. This is a gross hack; tell your kids not - to import inside function bodies!""" - def patched(*args, **kw): - saver = SysModulesSaver(module_names) - for name in module_names: - sys.modules[name] = original(name) - try: - return func(*args, **kw) - finally: - saver.restore() - return patched - - -def original(modname): - """ This returns an unpatched version of a module; this is useful for - Eventlet itself (i.e. tpool).""" - # note that it's not necessary to temporarily install unpatched - # versions of all patchable modules during the import of the - # module; this is because none of them import each other, except - # for threading which imports thread - original_name = '__original_module_' + modname - if original_name in sys.modules: - return sys.modules.get(original_name) - - # re-import the "pure" module and store it in the global _originals - # dict; be sure to restore whatever module had that name already - saver = SysModulesSaver((modname,)) - sys.modules.pop(modname, None) - # some rudimentary dependency checking -- fortunately the modules - # we're working on don't have many dependencies so we can just do - # some special-casing here - if six.PY2: - deps = {'threading': 'thread', 'Queue': 'threading'} - if six.PY3: - deps = {'threading': '_thread', 'queue': 'threading'} - if modname in deps: - dependency = deps[modname] - saver.save(dependency) - sys.modules[dependency] = original(dependency) - try: - real_mod = __import__(modname, {}, {}, modname.split('.')[:-1]) - if modname in ('Queue', 'queue') and not hasattr(real_mod, '_threading'): - # tricky hack: Queue's constructor in <2.7 imports - # threading on every instantiation; therefore we wrap - # it so that it always gets the original threading - real_mod.Queue.__init__ = _original_patch_function( - real_mod.Queue.__init__, - 'threading') - # save a reference to the unpatched module so it doesn't get lost - sys.modules[original_name] = real_mod - finally: - saver.restore() - - return sys.modules[original_name] - -already_patched = {} - - -def monkey_patch(**on): - """Globally patches certain system modules to be greenthread-friendly. - - The keyword arguments afford some control over which modules are patched. - If no keyword arguments are supplied, all possible modules are patched. - If keywords are set to True, only the specified modules are patched. E.g., - ``monkey_patch(socket=True, select=True)`` patches only the select and - socket modules. Most arguments patch the single module of the same name - (os, time, select). The exceptions are socket, which also patches the ssl - module if present; and thread, which patches thread, threading, and Queue. - - It's safe to call monkey_patch multiple times. - """ - - # Workaround for import cycle observed as following in monotonic - # RuntimeError: no suitable implementation for this system - # see https://github.com/eventlet/eventlet/issues/401#issuecomment-325015989 - # - # Make sure the hub is completely imported before any - # monkey-patching, or we risk recursion if the process of importing - # the hub calls into monkey-patched modules. - eventlet.hubs.get_hub() - - accepted_args = set(('os', 'select', 'socket', - 'thread', 'time', 'psycopg', 'MySQLdb', - 'builtins', 'subprocess')) - # To make sure only one of them is passed here - assert not ('__builtin__' in on and 'builtins' in on) - try: - b = on.pop('__builtin__') - except KeyError: - pass - else: - on['builtins'] = b - - default_on = on.pop("all", None) - - for k in six.iterkeys(on): - if k not in accepted_args: - raise TypeError("monkey_patch() got an unexpected " - "keyword argument %r" % k) - if default_on is None: - default_on = not (True in on.values()) - for modname in accepted_args: - if modname == 'MySQLdb': - # MySQLdb is only on when explicitly patched for the moment - on.setdefault(modname, False) - if modname == 'builtins': - on.setdefault(modname, False) - on.setdefault(modname, default_on) - - if on['thread'] and not already_patched.get('thread'): - _green_existing_locks() - - modules_to_patch = [] - for name, modules_function in [ - ('os', _green_os_modules), - ('select', _green_select_modules), - ('socket', _green_socket_modules), - ('thread', _green_thread_modules), - ('time', _green_time_modules), - ('MySQLdb', _green_MySQLdb), - ('builtins', _green_builtins), - ('subprocess', _green_subprocess_modules), - ]: - if on[name] and not already_patched.get(name): - modules_to_patch += modules_function() - already_patched[name] = True - - if on['psycopg'] and not already_patched.get('psycopg'): - try: - from eventlet.support import psycopg2_patcher - psycopg2_patcher.make_psycopg_green() - already_patched['psycopg'] = True - except ImportError: - # note that if we get an importerror from trying to - # monkeypatch psycopg, we will continually retry it - # whenever monkey_patch is called; this should not be a - # performance problem but it allows is_monkey_patched to - # tell us whether or not we succeeded - pass - - imp.acquire_lock() - try: - for name, mod in modules_to_patch: - orig_mod = sys.modules.get(name) - if orig_mod is None: - orig_mod = __import__(name) - for attr_name in mod.__patched__: - patched_attr = getattr(mod, attr_name, None) - if patched_attr is not None: - setattr(orig_mod, attr_name, patched_attr) - deleted = getattr(mod, '__deleted__', []) - for attr_name in deleted: - if hasattr(orig_mod, attr_name): - delattr(orig_mod, attr_name) - finally: - imp.release_lock() - - if sys.version_info >= (3, 3): - import importlib._bootstrap - thread = original('_thread') - # importlib must use real thread locks, not eventlet.Semaphore - importlib._bootstrap._thread = thread - - # Issue #185: Since Python 3.3, threading.RLock is implemented in C and - # so call a C function to get the thread identifier, instead of calling - # threading.get_ident(). Force the Python implementation of RLock which - # calls threading.get_ident() and so is compatible with eventlet. - import threading - threading.RLock = threading._PyRLock - - -def is_monkey_patched(module): - """Returns True if the given module is monkeypatched currently, False if - not. *module* can be either the module itself or its name. - - Based entirely off the name of the module, so if you import a - module some other way than with the import keyword (including - import_patched), this might not be correct about that particular - module.""" - return module in already_patched or \ - getattr(module, '__name__', None) in already_patched - - -def _green_existing_locks(): - """Make locks created before monkey-patching safe. - - RLocks rely on a Lock and on Python 2, if an unpatched Lock blocks, it - blocks the native thread. We need to replace these with green Locks. - - This was originally noticed in the stdlib logging module.""" - import gc - import threading - import eventlet.green.thread - lock_type = type(threading.Lock()) - rlock_type = type(threading.RLock()) - if sys.version_info[0] >= 3: - pyrlock_type = type(threading._PyRLock()) - # We're monkey-patching so there can't be any greenlets yet, ergo our thread - # ID is the only valid owner possible. - tid = eventlet.green.thread.get_ident() - for obj in gc.get_objects(): - if isinstance(obj, rlock_type): - if (sys.version_info[0] == 2 and - isinstance(obj._RLock__block, lock_type)): - _fix_py2_rlock(obj, tid) - elif (sys.version_info[0] >= 3 and - not isinstance(obj, pyrlock_type)): - _fix_py3_rlock(obj) - - -def _fix_py2_rlock(rlock, tid): - import eventlet.green.threading - old = rlock._RLock__block - new = eventlet.green.threading.Lock() - rlock._RLock__block = new - if old.locked(): - new.acquire() - rlock._RLock__owner = tid - - -def _fix_py3_rlock(old): - import gc - import threading - new = threading._PyRLock() - while old._is_owned(): - old.release() - new.acquire() - if old._is_owned(): - new.acquire() - gc.collect() - for ref in gc.get_referrers(old): - try: - ref_vars = vars(ref) - except TypeError: - pass - else: - for k, v in ref_vars.items(): - if v == old: - setattr(ref, k, new) - - -def _green_os_modules(): - from eventlet.green import os - return [('os', os)] - - -def _green_select_modules(): - from eventlet.green import select - modules = [('select', select)] - - if sys.version_info >= (3, 4): - from eventlet.green import selectors - modules.append(('selectors', selectors)) - - return modules - - -def _green_socket_modules(): - from eventlet.green import socket - try: - from eventlet.green import ssl - return [('socket', socket), ('ssl', ssl)] - except ImportError: - return [('socket', socket)] - - -def _green_subprocess_modules(): - from eventlet.green import subprocess - return [('subprocess', subprocess)] - - -def _green_thread_modules(): - from eventlet.green import Queue - from eventlet.green import thread - from eventlet.green import threading - if six.PY2: - return [('Queue', Queue), ('thread', thread), ('threading', threading)] - if six.PY3: - return [('queue', Queue), ('_thread', thread), ('threading', threading)] - - -def _green_time_modules(): - from eventlet.green import time - return [('time', time)] - - -def _green_MySQLdb(): - try: - from eventlet.green import MySQLdb - return [('MySQLdb', MySQLdb)] - except ImportError: - return [] - - -def _green_builtins(): - try: - from eventlet.green import builtin - return [('__builtin__' if six.PY2 else 'builtins', builtin)] - except ImportError: - return [] - - -def slurp_properties(source, destination, ignore=[], srckeys=None): - """Copy properties from *source* (assumed to be a module) to - *destination* (assumed to be a dict). - - *ignore* lists properties that should not be thusly copied. - *srckeys* is a list of keys to copy, if the source's __all__ is - untrustworthy. - """ - if srckeys is None: - srckeys = source.__all__ - destination.update(dict([ - (name, getattr(source, name)) - for name in srckeys - if not (name.startswith('__') or name in ignore) - ])) - - -if __name__ == "__main__": - sys.argv.pop(0) - monkey_patch() - with open(sys.argv[0]) as f: - code = compile(f.read(), sys.argv[0], 'exec') - exec(code) diff --git a/venv/lib/python3.6/site-packages/eventlet/pools.py b/venv/lib/python3.6/site-packages/eventlet/pools.py deleted file mode 100644 index ee9b77b..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/pools.py +++ /dev/null @@ -1,186 +0,0 @@ -from __future__ import print_function - -import collections -from contextlib import contextmanager - -from eventlet import queue - - -__all__ = ['Pool', 'TokenPool'] - - -class Pool(object): - """ - Pool class implements resource limitation and construction. - - There are two ways of using Pool: passing a `create` argument or - subclassing. In either case you must provide a way to create - the resource. - - When using `create` argument, pass a function with no arguments:: - - http_pool = pools.Pool(create=httplib2.Http) - - If you need to pass arguments, build a nullary function with either - `lambda` expression:: - - http_pool = pools.Pool(create=lambda: httplib2.Http(timeout=90)) - - or :func:`functools.partial`:: - - from functools import partial - http_pool = pools.Pool(create=partial(httplib2.Http, timeout=90)) - - When subclassing, define only the :meth:`create` method - to implement the desired resource:: - - class MyPool(pools.Pool): - def create(self): - return MyObject() - - If using 2.5 or greater, the :meth:`item` method acts as a context manager; - that's the best way to use it:: - - with mypool.item() as thing: - thing.dostuff() - - The maximum size of the pool can be modified at runtime via - the :meth:`resize` method. - - Specifying a non-zero *min-size* argument pre-populates the pool with - *min_size* items. *max-size* sets a hard limit to the size of the pool -- - it cannot contain any more items than *max_size*, and if there are already - *max_size* items 'checked out' of the pool, the pool will cause any - greenthread calling :meth:`get` to cooperatively yield until an item - is :meth:`put` in. - """ - - def __init__(self, min_size=0, max_size=4, order_as_stack=False, create=None): - """*order_as_stack* governs the ordering of the items in the free pool. - If ``False`` (the default), the free items collection (of items that - were created and were put back in the pool) acts as a round-robin, - giving each item approximately equal utilization. If ``True``, the - free pool acts as a FILO stack, which preferentially re-uses items that - have most recently been used. - """ - self.min_size = min_size - self.max_size = max_size - self.order_as_stack = order_as_stack - self.current_size = 0 - self.channel = queue.LightQueue(0) - self.free_items = collections.deque() - if create is not None: - self.create = create - - for x in range(min_size): - self.current_size += 1 - self.free_items.append(self.create()) - - def get(self): - """Return an item from the pool, when one is available. This may - cause the calling greenthread to block. - """ - if self.free_items: - return self.free_items.popleft() - self.current_size += 1 - if self.current_size <= self.max_size: - try: - created = self.create() - except: - self.current_size -= 1 - raise - return created - self.current_size -= 1 # did not create - return self.channel.get() - - @contextmanager - def item(self): - """ Get an object out of the pool, for use with with statement. - - >>> from eventlet import pools - >>> pool = pools.TokenPool(max_size=4) - >>> with pool.item() as obj: - ... print("got token") - ... - got token - >>> pool.free() - 4 - """ - obj = self.get() - try: - yield obj - finally: - self.put(obj) - - def put(self, item): - """Put an item back into the pool, when done. This may - cause the putting greenthread to block. - """ - if self.current_size > self.max_size: - self.current_size -= 1 - return - - if self.waiting(): - try: - self.channel.put(item, block=False) - return - except queue.Full: - pass - - if self.order_as_stack: - self.free_items.appendleft(item) - else: - self.free_items.append(item) - - def resize(self, new_size): - """Resize the pool to *new_size*. - - Adjusting this number does not affect existing items checked out of - the pool, nor on any greenthreads who are waiting for an item to free - up. Some indeterminate number of :meth:`get`/:meth:`put` - cycles will be necessary before the new maximum size truly matches - the actual operation of the pool. - """ - self.max_size = new_size - - def free(self): - """Return the number of free items in the pool. This corresponds - to the number of :meth:`get` calls needed to empty the pool. - """ - return len(self.free_items) + self.max_size - self.current_size - - def waiting(self): - """Return the number of routines waiting for a pool item. - """ - return max(0, self.channel.getting() - self.channel.putting()) - - def create(self): - """Generate a new pool item. In order for the pool to - function, either this method must be overriden in a subclass - or the pool must be constructed with the `create` argument. - It accepts no arguments and returns a single instance of - whatever thing the pool is supposed to contain. - - In general, :meth:`create` is called whenever the pool exceeds its - previous high-water mark of concurrently-checked-out-items. In other - words, in a new pool with *min_size* of 0, the very first call - to :meth:`get` will result in a call to :meth:`create`. If the first - caller calls :meth:`put` before some other caller calls :meth:`get`, - then the first item will be returned, and :meth:`create` will not be - called a second time. - """ - raise NotImplementedError("Implement in subclass") - - -class Token(object): - pass - - -class TokenPool(Pool): - """A pool which gives out tokens (opaque unique objects), which indicate - that the coroutine which holds the token has a right to consume some - limited resource. - """ - - def create(self): - return Token() diff --git a/venv/lib/python3.6/site-packages/eventlet/queue.py b/venv/lib/python3.6/site-packages/eventlet/queue.py deleted file mode 100644 index b61c2f8..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/queue.py +++ /dev/null @@ -1,492 +0,0 @@ -# Copyright (c) 2009 Denis Bilenko, denis.bilenko at gmail com -# Copyright (c) 2010 Eventlet Contributors (see AUTHORS) -# and licensed under the MIT license: -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -"""Synchronized queues. - -The :mod:`eventlet.queue` module implements multi-producer, multi-consumer -queues that work across greenlets, with the API similar to the classes found in -the standard :mod:`Queue` and :class:`multiprocessing ` -modules. - -A major difference is that queues in this module operate as channels when -initialized with *maxsize* of zero. In such case, both :meth:`Queue.empty` -and :meth:`Queue.full` return ``True`` and :meth:`Queue.put` always blocks until -a call to :meth:`Queue.get` retrieves the item. - -An interesting difference, made possible because of greenthreads, is -that :meth:`Queue.qsize`, :meth:`Queue.empty`, and :meth:`Queue.full` *can* be -used as indicators of whether the subsequent :meth:`Queue.get` -or :meth:`Queue.put` will not block. The new methods :meth:`Queue.getting` -and :meth:`Queue.putting` report on the number of greenthreads blocking -in :meth:`put ` or :meth:`get ` respectively. -""" -from __future__ import print_function - -import sys -import heapq -import collections -import traceback - -from eventlet.event import Event -from eventlet.greenthread import getcurrent -from eventlet.hubs import get_hub -import six -from six.moves import queue as Stdlib_Queue -from eventlet.timeout import Timeout - - -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'LightQueue', 'Full', 'Empty'] - -_NONE = object() -Full = six.moves.queue.Full -Empty = six.moves.queue.Empty - - -class Waiter(object): - """A low level synchronization class. - - Wrapper around greenlet's ``switch()`` and ``throw()`` calls that makes them safe: - - * switching will occur only if the waiting greenlet is executing :meth:`wait` - method currently. Otherwise, :meth:`switch` and :meth:`throw` are no-ops. - * any error raised in the greenlet is handled inside :meth:`switch` and :meth:`throw` - - The :meth:`switch` and :meth:`throw` methods must only be called from the :class:`Hub` greenlet. - The :meth:`wait` method must be called from a greenlet other than :class:`Hub`. - """ - __slots__ = ['greenlet'] - - def __init__(self): - self.greenlet = None - - def __repr__(self): - if self.waiting: - waiting = ' waiting' - else: - waiting = '' - return '<%s at %s%s greenlet=%r>' % ( - type(self).__name__, hex(id(self)), waiting, self.greenlet, - ) - - def __str__(self): - """ - >>> print(Waiter()) - - """ - if self.waiting: - waiting = ' waiting' - else: - waiting = '' - return '<%s%s greenlet=%s>' % (type(self).__name__, waiting, self.greenlet) - - def __nonzero__(self): - return self.greenlet is not None - - __bool__ = __nonzero__ - - @property - def waiting(self): - return self.greenlet is not None - - def switch(self, value=None): - """Wake up the greenlet that is calling wait() currently (if there is one). - Can only be called from Hub's greenlet. - """ - assert getcurrent() is get_hub( - ).greenlet, "Can only use Waiter.switch method from the mainloop" - if self.greenlet is not None: - try: - self.greenlet.switch(value) - except Exception: - traceback.print_exc() - - def throw(self, *throw_args): - """Make greenlet calling wait() wake up (if there is a wait()). - Can only be called from Hub's greenlet. - """ - assert getcurrent() is get_hub( - ).greenlet, "Can only use Waiter.switch method from the mainloop" - if self.greenlet is not None: - try: - self.greenlet.throw(*throw_args) - except Exception: - traceback.print_exc() - - # XXX should be renamed to get() ? and the whole class is called Receiver? - def wait(self): - """Wait until switch() or throw() is called. - """ - assert self.greenlet is None, 'This Waiter is already used by %r' % (self.greenlet, ) - self.greenlet = getcurrent() - try: - return get_hub().switch() - finally: - self.greenlet = None - - -class LightQueue(object): - """ - This is a variant of Queue that behaves mostly like the standard - :class:`Stdlib_Queue`. It differs by not supporting the - :meth:`task_done ` or - :meth:`join ` methods, and is a little faster for - not having that overhead. - """ - - def __init__(self, maxsize=None): - if maxsize is None or maxsize < 0: # None is not comparable in 3.x - self.maxsize = None - else: - self.maxsize = maxsize - self.getters = set() - self.putters = set() - self._event_unlock = None - self._init(maxsize) - - # QQQ make maxsize into a property with setter that schedules unlock if necessary - - def _init(self, maxsize): - self.queue = collections.deque() - - def _get(self): - return self.queue.popleft() - - def _put(self, item): - self.queue.append(item) - - def __repr__(self): - return '<%s at %s %s>' % (type(self).__name__, hex(id(self)), self._format()) - - def __str__(self): - return '<%s %s>' % (type(self).__name__, self._format()) - - def _format(self): - result = 'maxsize=%r' % (self.maxsize, ) - if getattr(self, 'queue', None): - result += ' queue=%r' % self.queue - if self.getters: - result += ' getters[%s]' % len(self.getters) - if self.putters: - result += ' putters[%s]' % len(self.putters) - if self._event_unlock is not None: - result += ' unlocking' - return result - - def qsize(self): - """Return the size of the queue.""" - return len(self.queue) - - def resize(self, size): - """Resizes the queue's maximum size. - - If the size is increased, and there are putters waiting, they may be woken up.""" - # None is not comparable in 3.x - if self.maxsize is not None and (size is None or size > self.maxsize): - # Maybe wake some stuff up - self._schedule_unlock() - self.maxsize = size - - def putting(self): - """Returns the number of greenthreads that are blocked waiting to put - items into the queue.""" - return len(self.putters) - - def getting(self): - """Returns the number of greenthreads that are blocked waiting on an - empty queue.""" - return len(self.getters) - - def empty(self): - """Return ``True`` if the queue is empty, ``False`` otherwise.""" - return not self.qsize() - - def full(self): - """Return ``True`` if the queue is full, ``False`` otherwise. - - ``Queue(None)`` is never full. - """ - # None is not comparable in 3.x - return self.maxsize is not None and self.qsize() >= self.maxsize - - def put(self, item, block=True, timeout=None): - """Put an item into the queue. - - If optional arg *block* is true and *timeout* is ``None`` (the default), - block if necessary until a free slot is available. If *timeout* is - a positive number, it blocks at most *timeout* seconds and raises - the :class:`Full` exception if no free slot was available within that time. - Otherwise (*block* is false), put an item on the queue if a free slot - is immediately available, else raise the :class:`Full` exception (*timeout* - is ignored in that case). - """ - if self.maxsize is None or self.qsize() < self.maxsize: - # there's a free slot, put an item right away - self._put(item) - if self.getters: - self._schedule_unlock() - elif not block and get_hub().greenlet is getcurrent(): - # we're in the mainloop, so we cannot wait; we can switch() to other greenlets though - # find a getter and deliver an item to it - while self.getters: - getter = self.getters.pop() - if getter: - self._put(item) - item = self._get() - getter.switch(item) - return - raise Full - elif block: - waiter = ItemWaiter(item, block) - self.putters.add(waiter) - timeout = Timeout(timeout, Full) - try: - if self.getters: - self._schedule_unlock() - result = waiter.wait() - assert result is waiter, "Invalid switch into Queue.put: %r" % (result, ) - if waiter.item is not _NONE: - self._put(item) - finally: - timeout.cancel() - self.putters.discard(waiter) - elif self.getters: - waiter = ItemWaiter(item, block) - self.putters.add(waiter) - self._schedule_unlock() - result = waiter.wait() - assert result is waiter, "Invalid switch into Queue.put: %r" % (result, ) - if waiter.item is not _NONE: - raise Full - else: - raise Full - - def put_nowait(self, item): - """Put an item into the queue without blocking. - - Only enqueue the item if a free slot is immediately available. - Otherwise raise the :class:`Full` exception. - """ - self.put(item, False) - - def get(self, block=True, timeout=None): - """Remove and return an item from the queue. - - If optional args *block* is true and *timeout* is ``None`` (the default), - block if necessary until an item is available. If *timeout* is a positive number, - it blocks at most *timeout* seconds and raises the :class:`Empty` exception - if no item was available within that time. Otherwise (*block* is false), return - an item if one is immediately available, else raise the :class:`Empty` exception - (*timeout* is ignored in that case). - """ - if self.qsize(): - if self.putters: - self._schedule_unlock() - return self._get() - elif not block and get_hub().greenlet is getcurrent(): - # special case to make get_nowait() runnable in the mainloop greenlet - # there are no items in the queue; try to fix the situation by unlocking putters - while self.putters: - putter = self.putters.pop() - if putter: - putter.switch(putter) - if self.qsize(): - return self._get() - raise Empty - elif block: - waiter = Waiter() - timeout = Timeout(timeout, Empty) - try: - self.getters.add(waiter) - if self.putters: - self._schedule_unlock() - try: - return waiter.wait() - except: - self._schedule_unlock() - raise - finally: - self.getters.discard(waiter) - timeout.cancel() - else: - raise Empty - - def get_nowait(self): - """Remove and return an item from the queue without blocking. - - Only get an item if one is immediately available. Otherwise - raise the :class:`Empty` exception. - """ - return self.get(False) - - def _unlock(self): - try: - while True: - if self.qsize() and self.getters: - getter = self.getters.pop() - if getter: - try: - item = self._get() - except: - getter.throw(*sys.exc_info()) - else: - getter.switch(item) - elif self.putters and self.getters: - putter = self.putters.pop() - if putter: - getter = self.getters.pop() - if getter: - item = putter.item - # this makes greenlet calling put() not to call _put() again - putter.item = _NONE - self._put(item) - item = self._get() - getter.switch(item) - putter.switch(putter) - else: - self.putters.add(putter) - elif self.putters and (self.getters or - self.maxsize is None or - self.qsize() < self.maxsize): - putter = self.putters.pop() - putter.switch(putter) - elif self.putters and not self.getters: - full = [p for p in self.putters if not p.block] - if not full: - break - for putter in full: - self.putters.discard(putter) - get_hub().schedule_call_global( - 0, putter.greenlet.throw, Full) - else: - break - finally: - self._event_unlock = None # QQQ maybe it's possible to obtain this info from libevent? - # i.e. whether this event is pending _OR_ currently executing - # testcase: 2 greenlets: while True: q.put(q.get()) - nothing else has a change to execute - # to avoid this, schedule unlock with timer(0, ...) once in a while - - def _schedule_unlock(self): - if self._event_unlock is None: - self._event_unlock = get_hub().schedule_call_global(0, self._unlock) - - -class ItemWaiter(Waiter): - __slots__ = ['item', 'block'] - - def __init__(self, item, block): - Waiter.__init__(self) - self.item = item - self.block = block - - -class Queue(LightQueue): - '''Create a queue object with a given maximum size. - - If *maxsize* is less than zero or ``None``, the queue size is infinite. - - ``Queue(0)`` is a channel, that is, its :meth:`put` method always blocks - until the item is delivered. (This is unlike the standard - :class:`Stdlib_Queue`, where 0 means infinite size). - - In all other respects, this Queue class resembles the standard library, - :class:`Stdlib_Queue`. - ''' - - def __init__(self, maxsize=None): - LightQueue.__init__(self, maxsize) - self.unfinished_tasks = 0 - self._cond = Event() - - def _format(self): - result = LightQueue._format(self) - if self.unfinished_tasks: - result += ' tasks=%s _cond=%s' % (self.unfinished_tasks, self._cond) - return result - - def _put(self, item): - LightQueue._put(self, item) - self._put_bookkeeping() - - def _put_bookkeeping(self): - self.unfinished_tasks += 1 - if self._cond.ready(): - self._cond.reset() - - def task_done(self): - '''Indicate that a formerly enqueued task is complete. Used by queue consumer threads. - For each :meth:`get ` used to fetch a task, a subsequent call to - :meth:`task_done` tells the queue that the processing on the task is complete. - - If a :meth:`join` is currently blocking, it will resume when all items have been processed - (meaning that a :meth:`task_done` call was received for every item that had been - :meth:`put ` into the queue). - - Raises a :exc:`ValueError` if called more times than there were items placed in the queue. - ''' - - if self.unfinished_tasks <= 0: - raise ValueError('task_done() called too many times') - self.unfinished_tasks -= 1 - if self.unfinished_tasks == 0: - self._cond.send(None) - - def join(self): - '''Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the queue. - The count goes down whenever a consumer thread calls :meth:`task_done` to indicate - that the item was retrieved and all work on it is complete. When the count of - unfinished tasks drops to zero, :meth:`join` unblocks. - ''' - if self.unfinished_tasks > 0: - self._cond.wait() - - -class PriorityQueue(Queue): - '''A subclass of :class:`Queue` that retrieves entries in priority order (lowest first). - - Entries are typically tuples of the form: ``(priority number, data)``. - ''' - - def _init(self, maxsize): - self.queue = [] - - def _put(self, item, heappush=heapq.heappush): - heappush(self.queue, item) - self._put_bookkeeping() - - def _get(self, heappop=heapq.heappop): - return heappop(self.queue) - - -class LifoQueue(Queue): - '''A subclass of :class:`Queue` that retrieves most recently added entries first.''' - - def _init(self, maxsize): - self.queue = [] - - def _put(self, item): - self.queue.append(item) - self._put_bookkeeping() - - def _get(self): - return self.queue.pop() diff --git a/venv/lib/python3.6/site-packages/eventlet/semaphore.py b/venv/lib/python3.6/site-packages/eventlet/semaphore.py deleted file mode 100644 index 18b5b05..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/semaphore.py +++ /dev/null @@ -1,315 +0,0 @@ -import collections - -import eventlet -from eventlet import hubs - - -class Semaphore(object): - - """An unbounded semaphore. - Optionally initialize with a resource *count*, then :meth:`acquire` and - :meth:`release` resources as needed. Attempting to :meth:`acquire` when - *count* is zero suspends the calling greenthread until *count* becomes - nonzero again. - - This is API-compatible with :class:`threading.Semaphore`. - - It is a context manager, and thus can be used in a with block:: - - sem = Semaphore(2) - with sem: - do_some_stuff() - - If not specified, *value* defaults to 1. - - It is possible to limit acquire time:: - - sem = Semaphore() - ok = sem.acquire(timeout=0.1) - # True if acquired, False if timed out. - - """ - - def __init__(self, value=1): - try: - value = int(value) - except ValueError as e: - msg = 'Semaphore() expect value :: int, actual: {0} {1}'.format(type(value), str(e)) - raise TypeError(msg) - if value < 0: - msg = 'Semaphore() expect value >= 0, actual: {0}'.format(repr(value)) - raise ValueError(msg) - self.counter = value - self._waiters = collections.deque() - - def __repr__(self): - params = (self.__class__.__name__, hex(id(self)), - self.counter, len(self._waiters)) - return '<%s at %s c=%s _w[%s]>' % params - - def __str__(self): - params = (self.__class__.__name__, self.counter, len(self._waiters)) - return '<%s c=%s _w[%s]>' % params - - def locked(self): - """Returns true if a call to acquire would block. - """ - return self.counter <= 0 - - def bounded(self): - """Returns False; for consistency with - :class:`~eventlet.semaphore.CappedSemaphore`. - """ - return False - - def acquire(self, blocking=True, timeout=None): - """Acquire a semaphore. - - When invoked without arguments: if the internal counter is larger than - zero on entry, decrement it by one and return immediately. If it is zero - on entry, block, waiting until some other thread has called release() to - make it larger than zero. This is done with proper interlocking so that - if multiple acquire() calls are blocked, release() will wake exactly one - of them up. The implementation may pick one at random, so the order in - which blocked threads are awakened should not be relied on. There is no - return value in this case. - - When invoked with blocking set to true, do the same thing as when called - without arguments, and return true. - - When invoked with blocking set to false, do not block. If a call without - an argument would block, return false immediately; otherwise, do the - same thing as when called without arguments, and return true. - - Timeout value must be strictly positive. - """ - if timeout == -1: - timeout = None - if timeout is not None and timeout < 0: - raise ValueError("timeout value must be strictly positive") - if not blocking: - if timeout is not None: - raise ValueError("can't specify timeout for non-blocking acquire") - timeout = 0 - if not blocking and self.locked(): - return False - - current_thread = eventlet.getcurrent() - - if self.counter <= 0 or self._waiters: - if current_thread not in self._waiters: - self._waiters.append(current_thread) - try: - if timeout is not None: - ok = False - with eventlet.Timeout(timeout, False): - while self.counter <= 0: - hubs.get_hub().switch() - ok = True - if not ok: - return False - else: - # If someone else is already in this wait loop, give them - # a chance to get out. - while True: - hubs.get_hub().switch() - if self.counter > 0: - break - finally: - try: - self._waiters.remove(current_thread) - except ValueError: - # Fine if its already been dropped. - pass - - self.counter -= 1 - return True - - def __enter__(self): - self.acquire() - - def release(self, blocking=True): - """Release a semaphore, incrementing the internal counter by one. When - it was zero on entry and another thread is waiting for it to become - larger than zero again, wake up that thread. - - The *blocking* argument is for consistency with CappedSemaphore and is - ignored - """ - self.counter += 1 - if self._waiters: - hubs.get_hub().schedule_call_global(0, self._do_acquire) - return True - - def _do_acquire(self): - if self._waiters and self.counter > 0: - waiter = self._waiters.popleft() - waiter.switch() - - def __exit__(self, typ, val, tb): - self.release() - - @property - def balance(self): - """An integer value that represents how many new calls to - :meth:`acquire` or :meth:`release` would be needed to get the counter to - 0. If it is positive, then its value is the number of acquires that can - happen before the next acquire would block. If it is negative, it is - the negative of the number of releases that would be required in order - to make the counter 0 again (one more release would push the counter to - 1 and unblock acquirers). It takes into account how many greenthreads - are currently blocking in :meth:`acquire`. - """ - # positive means there are free items - # zero means there are no free items but nobody has requested one - # negative means there are requests for items, but no items - return self.counter - len(self._waiters) - - -class BoundedSemaphore(Semaphore): - - """A bounded semaphore checks to make sure its current value doesn't exceed - its initial value. If it does, ValueError is raised. In most situations - semaphores are used to guard resources with limited capacity. If the - semaphore is released too many times it's a sign of a bug. If not given, - *value* defaults to 1. - """ - - def __init__(self, value=1): - super(BoundedSemaphore, self).__init__(value) - self.original_counter = value - - def release(self, blocking=True): - """Release a semaphore, incrementing the internal counter by one. If - the counter would exceed the initial value, raises ValueError. When - it was zero on entry and another thread is waiting for it to become - larger than zero again, wake up that thread. - - The *blocking* argument is for consistency with :class:`CappedSemaphore` - and is ignored - """ - if self.counter >= self.original_counter: - raise ValueError("Semaphore released too many times") - return super(BoundedSemaphore, self).release(blocking) - - -class CappedSemaphore(object): - - """A blockingly bounded semaphore. - - Optionally initialize with a resource *count*, then :meth:`acquire` and - :meth:`release` resources as needed. Attempting to :meth:`acquire` when - *count* is zero suspends the calling greenthread until count becomes nonzero - again. Attempting to :meth:`release` after *count* has reached *limit* - suspends the calling greenthread until *count* becomes less than *limit* - again. - - This has the same API as :class:`threading.Semaphore`, though its - semantics and behavior differ subtly due to the upper limit on calls - to :meth:`release`. It is **not** compatible with - :class:`threading.BoundedSemaphore` because it blocks when reaching *limit* - instead of raising a ValueError. - - It is a context manager, and thus can be used in a with block:: - - sem = CappedSemaphore(2) - with sem: - do_some_stuff() - """ - - def __init__(self, count, limit): - if count < 0: - raise ValueError("CappedSemaphore must be initialized with a " - "positive number, got %s" % count) - if count > limit: - # accidentally, this also catches the case when limit is None - raise ValueError("'count' cannot be more than 'limit'") - self.lower_bound = Semaphore(count) - self.upper_bound = Semaphore(limit - count) - - def __repr__(self): - params = (self.__class__.__name__, hex(id(self)), - self.balance, self.lower_bound, self.upper_bound) - return '<%s at %s b=%s l=%s u=%s>' % params - - def __str__(self): - params = (self.__class__.__name__, self.balance, - self.lower_bound, self.upper_bound) - return '<%s b=%s l=%s u=%s>' % params - - def locked(self): - """Returns true if a call to acquire would block. - """ - return self.lower_bound.locked() - - def bounded(self): - """Returns true if a call to release would block. - """ - return self.upper_bound.locked() - - def acquire(self, blocking=True): - """Acquire a semaphore. - - When invoked without arguments: if the internal counter is larger than - zero on entry, decrement it by one and return immediately. If it is zero - on entry, block, waiting until some other thread has called release() to - make it larger than zero. This is done with proper interlocking so that - if multiple acquire() calls are blocked, release() will wake exactly one - of them up. The implementation may pick one at random, so the order in - which blocked threads are awakened should not be relied on. There is no - return value in this case. - - When invoked with blocking set to true, do the same thing as when called - without arguments, and return true. - - When invoked with blocking set to false, do not block. If a call without - an argument would block, return false immediately; otherwise, do the - same thing as when called without arguments, and return true. - """ - if not blocking and self.locked(): - return False - self.upper_bound.release() - try: - return self.lower_bound.acquire() - except: - self.upper_bound.counter -= 1 - # using counter directly means that it can be less than zero. - # however I certainly don't need to wait here and I don't seem to have - # a need to care about such inconsistency - raise - - def __enter__(self): - self.acquire() - - def release(self, blocking=True): - """Release a semaphore. In this class, this behaves very much like - an :meth:`acquire` but in the opposite direction. - - Imagine the docs of :meth:`acquire` here, but with every direction - reversed. When calling this method, it will block if the internal - counter is greater than or equal to *limit*. - """ - if not blocking and self.bounded(): - return False - self.lower_bound.release() - try: - return self.upper_bound.acquire() - except: - self.lower_bound.counter -= 1 - raise - - def __exit__(self, typ, val, tb): - self.release() - - @property - def balance(self): - """An integer value that represents how many new calls to - :meth:`acquire` or :meth:`release` would be needed to get the counter to - 0. If it is positive, then its value is the number of acquires that can - happen before the next acquire would block. If it is negative, it is - the negative of the number of releases that would be required in order - to make the counter 0 again (one more release would push the counter to - 1 and unblock acquirers). It takes into account how many greenthreads - are currently blocking in :meth:`acquire` and :meth:`release`. - """ - return self.lower_bound.balance - self.upper_bound.balance diff --git a/venv/lib/python3.6/site-packages/eventlet/support/__init__.py b/venv/lib/python3.6/site-packages/eventlet/support/__init__.py deleted file mode 100644 index 43bac91..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -import inspect -import functools -import sys -import warnings - -from eventlet.support import greenlets - - -_MISSING = object() - - -def get_errno(exc): - """ Get the error code out of socket.error objects. - socket.error in <2.5 does not have errno attribute - socket.error in 3.x does not allow indexing access - e.args[0] works for all. - There are cases when args[0] is not errno. - i.e. http://bugs.python.org/issue6471 - Maybe there are cases when errno is set, but it is not the first argument? - """ - - try: - if exc.errno is not None: - return exc.errno - except AttributeError: - pass - try: - return exc.args[0] - except IndexError: - return None - - -if sys.version_info[0] < 3 and not greenlets.preserves_excinfo: - from sys import exc_clear as clear_sys_exc_info -else: - def clear_sys_exc_info(): - """No-op In py3k. - Exception information is not visible outside of except statements. - sys.exc_clear became obsolete and removed.""" - pass - -if sys.version_info[0] < 3: - def bytes_to_str(b, encoding='ascii'): - return b -else: - def bytes_to_str(b, encoding='ascii'): - return b.decode(encoding) - -PY33 = sys.version_info[:2] == (3, 3) - - -def wrap_deprecated(old, new): - def _resolve(s): - return 'eventlet.'+s if '.' not in s else s - msg = '''\ -{old} is deprecated and will be removed in next version. Use {new} instead. -Autoupgrade: fgrep -rl '{old}' . |xargs -t sed --in-place='' -e 's/{old}/{new}/' -'''.format(old=_resolve(old), new=_resolve(new)) - - def wrapper(base): - klass = None - if inspect.isclass(base): - class klass(base): - pass - klass.__name__ = base.__name__ - klass.__module__ = base.__module__ - - @functools.wraps(base) - def wrapped(*a, **kw): - warnings.warn(msg, DeprecationWarning, stacklevel=5) - return base(*a, **kw) - - if klass is not None: - klass.__init__ = wrapped - return klass - - return wrapped - return wrapper diff --git a/venv/lib/python3.6/site-packages/eventlet/support/greendns.py b/venv/lib/python3.6/site-packages/eventlet/support/greendns.py deleted file mode 100644 index 13968c2..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/greendns.py +++ /dev/null @@ -1,841 +0,0 @@ -'''greendns - non-blocking DNS support for Eventlet -''' - -# Portions of this code taken from the gogreen project: -# http://github.com/slideinc/gogreen -# -# Copyright (c) 2005-2010 Slide, Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of the author nor the names of other -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import re -import struct -import sys - -import eventlet -from eventlet import patcher -from eventlet.green import _socket_nodns -from eventlet.green import os -from eventlet.green import time -from eventlet.green import select -import six - - -def import_patched(module_name): - # Import cycle note: it's crucial to use _socket_nodns here because - # regular evenlet.green.socket imports *this* module and if we imported - # it back we'd end with an import cycle (socket -> greendns -> socket). - # We break this import cycle by providing a restricted socket module. - modules = { - 'select': select, - 'time': time, - 'os': os, - 'socket': _socket_nodns, - } - return patcher.import_patched(module_name, **modules) - - -dns = import_patched('dns') -for pkg in dns.__all__: - setattr(dns, pkg, import_patched('dns.' + pkg)) -dns.rdtypes.__all__.extend(['dnskeybase', 'dsbase', 'txtbase']) -for pkg in dns.rdtypes.__all__: - setattr(dns.rdtypes, pkg, import_patched('dns.rdtypes.' + pkg)) -for pkg in dns.rdtypes.IN.__all__: - setattr(dns.rdtypes.IN, pkg, import_patched('dns.rdtypes.IN.' + pkg)) -for pkg in dns.rdtypes.ANY.__all__: - setattr(dns.rdtypes.ANY, pkg, import_patched('dns.rdtypes.ANY.' + pkg)) -del import_patched - - -socket = _socket_nodns - -DNS_QUERY_TIMEOUT = 10.0 -HOSTS_TTL = 10.0 - -EAI_EAGAIN_ERROR = socket.gaierror(socket.EAI_AGAIN, 'Lookup timed out') -EAI_NONAME_ERROR = socket.gaierror(socket.EAI_NONAME, 'Name or service not known') -# EAI_NODATA was removed from RFC3493, it's now replaced with EAI_NONAME -# socket.EAI_NODATA is not defined on FreeBSD, probably on some other platforms too. -# https://lists.freebsd.org/pipermail/freebsd-ports/2003-October/005757.html -EAI_NODATA_ERROR = EAI_NONAME_ERROR -if (os.environ.get('EVENTLET_DEPRECATED_EAI_NODATA', '').lower() in ('1', 'y', 'yes') - and hasattr(socket, 'EAI_NODATA')): - EAI_NODATA_ERROR = socket.gaierror(socket.EAI_NODATA, 'No address associated with hostname') - - -def is_ipv4_addr(host): - """Return True if host is a valid IPv4 address""" - if not isinstance(host, six.string_types): - return False - try: - dns.ipv4.inet_aton(host) - except dns.exception.SyntaxError: - return False - else: - return True - - -def is_ipv6_addr(host): - """Return True if host is a valid IPv6 address""" - if not isinstance(host, six.string_types): - return False - host = host.split('%', 1)[0] - try: - dns.ipv6.inet_aton(host) - except dns.exception.SyntaxError: - return False - else: - return True - - -def is_ip_addr(host): - """Return True if host is a valid IPv4 or IPv6 address""" - return is_ipv4_addr(host) or is_ipv6_addr(host) - - -class HostsAnswer(dns.resolver.Answer): - """Answer class for HostsResolver object""" - - def __init__(self, qname, rdtype, rdclass, rrset, raise_on_no_answer=True): - """Create a new answer - - :qname: A dns.name.Name instance of the query name - :rdtype: The rdatatype of the query - :rdclass: The rdataclass of the query - :rrset: The dns.rrset.RRset with the response, must have ttl attribute - :raise_on_no_answer: Whether to raise dns.resolver.NoAnswer if no - answer. - """ - self.response = None - self.qname = qname - self.rdtype = rdtype - self.rdclass = rdclass - self.canonical_name = qname - if not rrset and raise_on_no_answer: - raise dns.resolver.NoAnswer() - self.rrset = rrset - self.expiration = (time.time() + - rrset.ttl if hasattr(rrset, 'ttl') else 0) - - -class HostsResolver(object): - """Class to parse the hosts file - - Attributes - ---------- - - :fname: The filename of the hosts file in use. - :interval: The time between checking for hosts file modification - """ - - LINES_RE = re.compile(r""" - \s* # Leading space - ([^\r\n#]*?) # The actual match, non-greedy so as not to include trailing space - \s* # Trailing space - (?:[#][^\r\n]+)? # Comments - (?:$|[\r\n]+) # EOF or newline - """, re.VERBOSE) - - def __init__(self, fname=None, interval=HOSTS_TTL): - self._v4 = {} # name -> ipv4 - self._v6 = {} # name -> ipv6 - self._aliases = {} # name -> canonical_name - self.interval = interval - self.fname = fname - if fname is None: - if os.name == 'posix': - self.fname = '/etc/hosts' - elif os.name == 'nt': - self.fname = os.path.expandvars( - r'%SystemRoot%\system32\drivers\etc\hosts') - self._last_load = 0 - if self.fname: - self._load() - - def _readlines(self): - """Read the contents of the hosts file - - Return list of lines, comment lines and empty lines are - excluded. - - Note that this performs disk I/O so can be blocking. - """ - try: - with open(self.fname, 'rb') as fp: - fdata = fp.read() - except (IOError, OSError): - return [] - - udata = fdata.decode(errors='ignore') - - return six.moves.filter(None, self.LINES_RE.findall(udata)) - - def _load(self): - """Load hosts file - - This will unconditionally (re)load the data from the hosts - file. - """ - lines = self._readlines() - self._v4.clear() - self._v6.clear() - self._aliases.clear() - for line in lines: - parts = line.split() - if len(parts) < 2: - continue - ip = parts.pop(0) - if is_ipv4_addr(ip): - ipmap = self._v4 - elif is_ipv6_addr(ip): - if ip.startswith('fe80'): - # Do not use link-local addresses, OSX stores these here - continue - ipmap = self._v6 - else: - continue - cname = parts.pop(0).lower() - ipmap[cname] = ip - for alias in parts: - alias = alias.lower() - ipmap[alias] = ip - self._aliases[alias] = cname - self._last_load = time.time() - - def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, - tcp=False, source=None, raise_on_no_answer=True): - """Query the hosts file - - The known rdtypes are dns.rdatatype.A, dns.rdatatype.AAAA and - dns.rdatatype.CNAME. - - The ``rdclass`` parameter must be dns.rdataclass.IN while the - ``tcp`` and ``source`` parameters are ignored. - - Return a HostAnswer instance or raise a dns.resolver.NoAnswer - exception. - """ - now = time.time() - if self._last_load + self.interval < now: - self._load() - rdclass = dns.rdataclass.IN - if isinstance(qname, six.string_types): - name = qname - qname = dns.name.from_text(qname) - else: - name = str(qname) - name = name.lower() - rrset = dns.rrset.RRset(qname, rdclass, rdtype) - rrset.ttl = self._last_load + self.interval - now - if rdclass == dns.rdataclass.IN and rdtype == dns.rdatatype.A: - addr = self._v4.get(name) - if not addr and qname.is_absolute(): - addr = self._v4.get(name[:-1]) - if addr: - rrset.add(dns.rdtypes.IN.A.A(rdclass, rdtype, addr)) - elif rdclass == dns.rdataclass.IN and rdtype == dns.rdatatype.AAAA: - addr = self._v6.get(name) - if not addr and qname.is_absolute(): - addr = self._v6.get(name[:-1]) - if addr: - rrset.add(dns.rdtypes.IN.AAAA.AAAA(rdclass, rdtype, addr)) - elif rdclass == dns.rdataclass.IN and rdtype == dns.rdatatype.CNAME: - cname = self._aliases.get(name) - if not cname and qname.is_absolute(): - cname = self._aliases.get(name[:-1]) - if cname: - rrset.add(dns.rdtypes.ANY.CNAME.CNAME( - rdclass, rdtype, dns.name.from_text(cname))) - return HostsAnswer(qname, rdtype, rdclass, rrset, raise_on_no_answer) - - def getaliases(self, hostname): - """Return a list of all the aliases of a given cname""" - # Due to the way store aliases this is a bit inefficient, this - # clearly was an afterthought. But this is only used by - # gethostbyname_ex so it's probably fine. - aliases = [] - if hostname in self._aliases: - cannon = self._aliases[hostname] - else: - cannon = hostname - aliases.append(cannon) - for alias, cname in six.iteritems(self._aliases): - if cannon == cname: - aliases.append(alias) - aliases.remove(hostname) - return aliases - - -class ResolverProxy(object): - """Resolver class which can also use /etc/hosts - - Initialise with a HostsResolver instance in order for it to also - use the hosts file. - """ - - def __init__(self, hosts_resolver=None, filename='/etc/resolv.conf'): - """Initialise the resolver proxy - - :param hosts_resolver: An instance of HostsResolver to use. - - :param filename: The filename containing the resolver - configuration. The default value is correct for both UNIX - and Windows, on Windows it will result in the configuration - being read from the Windows registry. - """ - self._hosts = hosts_resolver - self._filename = filename - self.clear() - - def clear(self): - self._resolver = dns.resolver.Resolver(filename=self._filename) - self._resolver.cache = dns.resolver.LRUCache() - - def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, - tcp=False, source=None, raise_on_no_answer=True, - _hosts_rdtypes=(dns.rdatatype.A, dns.rdatatype.AAAA), - use_network=True): - """Query the resolver, using /etc/hosts if enabled. - - Behavior: - 1. if hosts is enabled and contains answer, return it now - 2. query nameservers for qname if use_network is True - 3. if qname did not contain dots, pretend it was top-level domain, - query "foobar." and append to previous result - """ - result = [None, None, 0] - - if qname is None: - qname = '0.0.0.0' - if isinstance(qname, six.string_types): - qname = dns.name.from_text(qname, None) - - def step(fun, *args, **kwargs): - try: - a = fun(*args, **kwargs) - except Exception as e: - result[1] = e - return False - if a.rrset is not None and len(a.rrset): - if result[0] is None: - result[0] = a - else: - result[0].rrset.union_update(a.rrset) - result[2] += len(a.rrset) - return True - - def end(): - if result[0] is not None: - if raise_on_no_answer and result[2] == 0: - raise dns.resolver.NoAnswer - return result[0] - if result[1] is not None: - if raise_on_no_answer or not isinstance(result[1], dns.resolver.NoAnswer): - raise result[1] - raise dns.resolver.NXDOMAIN(qnames=(qname,)) - - if (self._hosts and (rdclass == dns.rdataclass.IN) and (rdtype in _hosts_rdtypes)): - if step(self._hosts.query, qname, rdtype, raise_on_no_answer=False): - if (result[0] is not None) or (result[1] is not None) or (not use_network): - return end() - - # Main query - step(self._resolver.query, qname, rdtype, rdclass, tcp, source, raise_on_no_answer=False) - - # `resolv.conf` docs say unqualified names must resolve from search (or local) domain. - # However, common OS `getaddrinfo()` implementations append trailing dot (e.g. `db -> db.`) - # and ask nameservers, as if top-level domain was queried. - # This step follows established practice. - # https://github.com/nameko/nameko/issues/392 - # https://github.com/eventlet/eventlet/issues/363 - if len(qname) == 1: - step(self._resolver.query, qname.concatenate(dns.name.root), - rdtype, rdclass, tcp, source, raise_on_no_answer=False) - - return end() - - def getaliases(self, hostname): - """Return a list of all the aliases of a given hostname""" - if self._hosts: - aliases = self._hosts.getaliases(hostname) - else: - aliases = [] - while True: - try: - ans = self._resolver.query(hostname, dns.rdatatype.CNAME) - except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN): - break - else: - aliases.extend(str(rr.target) for rr in ans.rrset) - hostname = ans[0].target - return aliases - - -resolver = ResolverProxy(hosts_resolver=HostsResolver()) - - -def resolve(name, family=socket.AF_INET, raises=True, _proxy=None, - use_network=True): - """Resolve a name for a given family using the global resolver proxy. - - This method is called by the global getaddrinfo() function. If use_network - is False, only resolution via hosts file will be performed. - - Return a dns.resolver.Answer instance. If there is no answer it's - rrset will be emtpy. - """ - if family == socket.AF_INET: - rdtype = dns.rdatatype.A - elif family == socket.AF_INET6: - rdtype = dns.rdatatype.AAAA - else: - raise socket.gaierror(socket.EAI_FAMILY, - 'Address family not supported') - - if _proxy is None: - _proxy = resolver - try: - try: - return _proxy.query(name, rdtype, raise_on_no_answer=raises, - use_network=use_network) - except dns.resolver.NXDOMAIN: - if not raises: - return HostsAnswer(dns.name.Name(name), - rdtype, dns.rdataclass.IN, None, False) - raise - except dns.exception.Timeout: - raise EAI_EAGAIN_ERROR - except dns.exception.DNSException: - raise EAI_NODATA_ERROR - - -def resolve_cname(host): - """Return the canonical name of a hostname""" - try: - ans = resolver.query(host, dns.rdatatype.CNAME) - except dns.resolver.NoAnswer: - return host - except dns.exception.Timeout: - raise EAI_EAGAIN_ERROR - except dns.exception.DNSException: - raise EAI_NODATA_ERROR - else: - return str(ans[0].target) - - -def getaliases(host): - """Return a list of for aliases for the given hostname - - This method does translate the dnspython exceptions into - socket.gaierror exceptions. If no aliases are available an empty - list will be returned. - """ - try: - return resolver.getaliases(host) - except dns.exception.Timeout: - raise EAI_EAGAIN_ERROR - except dns.exception.DNSException: - raise EAI_NODATA_ERROR - - -def _getaddrinfo_lookup(host, family, flags): - """Resolve a hostname to a list of addresses - - Helper function for getaddrinfo. - """ - if flags & socket.AI_NUMERICHOST: - raise EAI_NONAME_ERROR - addrs = [] - if family == socket.AF_UNSPEC: - err = None - for use_network in [False, True]: - for qfamily in [socket.AF_INET6, socket.AF_INET]: - try: - answer = resolve(host, qfamily, False, use_network=use_network) - except socket.gaierror as e: - if e.errno not in (socket.EAI_AGAIN, EAI_NONAME_ERROR.errno, EAI_NODATA_ERROR.errno): - raise - err = e - else: - if answer.rrset: - addrs.extend(rr.address for rr in answer.rrset) - if addrs: - break - if err is not None and not addrs: - raise err - elif family == socket.AF_INET6 and flags & socket.AI_V4MAPPED: - answer = resolve(host, socket.AF_INET6, False) - if answer.rrset: - addrs = [rr.address for rr in answer.rrset] - if not addrs or flags & socket.AI_ALL: - answer = resolve(host, socket.AF_INET, False) - if answer.rrset: - addrs = ['::ffff:' + rr.address for rr in answer.rrset] - else: - answer = resolve(host, family, False) - if answer.rrset: - addrs = [rr.address for rr in answer.rrset] - return str(answer.qname), addrs - - -def getaddrinfo(host, port, family=0, socktype=0, proto=0, flags=0): - """Replacement for Python's socket.getaddrinfo - - This does the A and AAAA lookups asynchronously after which it - calls the OS' getaddrinfo(3) using the AI_NUMERICHOST flag. This - flag ensures getaddrinfo(3) does not use the network itself and - allows us to respect all the other arguments like the native OS. - """ - if isinstance(host, six.string_types): - host = host.encode('idna').decode('ascii') - if host is not None and not is_ip_addr(host): - qname, addrs = _getaddrinfo_lookup(host, family, flags) - else: - qname = host - addrs = [host] - aiflags = (flags | socket.AI_NUMERICHOST) & (0xffff ^ socket.AI_CANONNAME) - res = [] - err = None - for addr in addrs: - try: - ai = socket.getaddrinfo(addr, port, family, - socktype, proto, aiflags) - except socket.error as e: - if flags & socket.AI_ADDRCONFIG: - err = e - continue - raise - res.extend(ai) - if not res: - if err: - raise err - raise socket.gaierror(socket.EAI_NONAME, 'No address found') - if flags & socket.AI_CANONNAME: - if not is_ip_addr(qname): - qname = resolve_cname(qname).encode('ascii').decode('idna') - ai = res[0] - res[0] = (ai[0], ai[1], ai[2], qname, ai[4]) - return res - - -def gethostbyname(hostname): - """Replacement for Python's socket.gethostbyname""" - if is_ipv4_addr(hostname): - return hostname - rrset = resolve(hostname) - return rrset[0].address - - -def gethostbyname_ex(hostname): - """Replacement for Python's socket.gethostbyname_ex""" - if is_ipv4_addr(hostname): - return (hostname, [], [hostname]) - ans = resolve(hostname) - aliases = getaliases(hostname) - addrs = [rr.address for rr in ans.rrset] - qname = str(ans.qname) - if qname[-1] == '.': - qname = qname[:-1] - return (qname, aliases, addrs) - - -def getnameinfo(sockaddr, flags): - """Replacement for Python's socket.getnameinfo. - - Currently only supports IPv4. - """ - try: - host, port = sockaddr - except (ValueError, TypeError): - if not isinstance(sockaddr, tuple): - del sockaddr # to pass a stdlib test that is - # hyper-careful about reference counts - raise TypeError('getnameinfo() argument 1 must be a tuple') - else: - # must be ipv6 sockaddr, pretending we don't know how to resolve it - raise EAI_NONAME_ERROR - - if (flags & socket.NI_NAMEREQD) and (flags & socket.NI_NUMERICHOST): - # Conflicting flags. Punt. - raise EAI_NONAME_ERROR - - if is_ipv4_addr(host): - try: - rrset = resolver.query( - dns.reversename.from_address(host), dns.rdatatype.PTR) - if len(rrset) > 1: - raise socket.error('sockaddr resolved to multiple addresses') - host = rrset[0].target.to_text(omit_final_dot=True) - except dns.exception.Timeout: - if flags & socket.NI_NAMEREQD: - raise EAI_EAGAIN_ERROR - except dns.exception.DNSException: - if flags & socket.NI_NAMEREQD: - raise EAI_NONAME_ERROR - else: - try: - rrset = resolver.query(host) - if len(rrset) > 1: - raise socket.error('sockaddr resolved to multiple addresses') - if flags & socket.NI_NUMERICHOST: - host = rrset[0].address - except dns.exception.Timeout: - raise EAI_EAGAIN_ERROR - except dns.exception.DNSException: - raise socket.gaierror( - (socket.EAI_NODATA, 'No address associated with hostname')) - - if not (flags & socket.NI_NUMERICSERV): - proto = (flags & socket.NI_DGRAM) and 'udp' or 'tcp' - port = socket.getservbyport(port, proto) - - return (host, port) - - -def _net_read(sock, count, expiration): - """coro friendly replacement for dns.query._net_read - Read the specified number of bytes from sock. Keep trying until we - either get the desired amount, or we hit EOF. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - s = bytearray() - while count > 0: - try: - n = sock.recv(count) - except socket.timeout: - # Q: Do we also need to catch coro.CoroutineSocketWake and pass? - if expiration - time.time() <= 0.0: - raise dns.exception.Timeout - eventlet.sleep(0.01) - continue - if n == b'': - raise EOFError - count = count - len(n) - s += n - return s - - -def _net_write(sock, data, expiration): - """coro friendly replacement for dns.query._net_write - Write the specified data to the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - current = 0 - l = len(data) - while current < l: - try: - current += sock.send(data[current:]) - except socket.timeout: - # Q: Do we also need to catch coro.CoroutineSocketWake and pass? - if expiration - time.time() <= 0.0: - raise dns.exception.Timeout - - -def udp(q, where, timeout=DNS_QUERY_TIMEOUT, port=53, - af=None, source=None, source_port=0, ignore_unexpected=False): - """coro friendly replacement for dns.query.udp - Return the response obtained after sending a query via UDP. - - @param q: the query - @type q: dns.message.Message - @param where: where to send the message - @type where: string containing an IPv4 or IPv6 address - @param timeout: The number of seconds to wait before the query times out. - If None, the default, wait forever. - @type timeout: float - @param port: The port to which to send the message. The default is 53. - @type port: int - @param af: the address family to use. The default is None, which - causes the address family to use to be inferred from the form of of where. - If the inference attempt fails, AF_INET is used. - @type af: int - @rtype: dns.message.Message object - @param source: source address. The default is the IPv4 wildcard address. - @type source: string - @param source_port: The port from which to send the message. - The default is 0. - @type source_port: int - @param ignore_unexpected: If True, ignore responses from unexpected - sources. The default is False. - @type ignore_unexpected: bool""" - - wire = q.to_wire() - if af is None: - try: - af = dns.inet.af_for_address(where) - except: - af = dns.inet.AF_INET - if af == dns.inet.AF_INET: - destination = (where, port) - if source is not None: - source = (source, source_port) - elif af == dns.inet.AF_INET6: - # Purge any stray zeroes in source address. When doing the tuple comparison - # below, we need to always ensure both our target and where we receive replies - # from are compared with all zeroes removed so that we don't erroneously fail. - # e.g. ('00::1', 53, 0, 0) != ('::1', 53, 0, 0) - where_trunc = dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(where)) - destination = (where_trunc, port, 0, 0) - if source is not None: - source = (source, source_port, 0, 0) - - s = socket.socket(af, socket.SOCK_DGRAM) - s.settimeout(timeout) - try: - expiration = dns.query._compute_expiration(timeout) - if source is not None: - s.bind(source) - while True: - try: - s.sendto(wire, destination) - break - except socket.timeout: - # Q: Do we also need to catch coro.CoroutineSocketWake and pass? - if expiration - time.time() <= 0.0: - raise dns.exception.Timeout - eventlet.sleep(0.01) - continue - - tried = False - while True: - # If we've tried to receive at least once, check to see if our - # timer expired - if tried and (expiration - time.time() <= 0.0): - raise dns.exception.Timeout - # Sleep if we are retrying the operation due to a bad source - # address or a socket timeout. - if tried: - eventlet.sleep(0.01) - tried = True - - try: - (wire, from_address) = s.recvfrom(65535) - except socket.timeout: - # Q: Do we also need to catch coro.CoroutineSocketWake and pass? - continue - if dns.inet.af_for_address(from_address[0]) == dns.inet.AF_INET6: - # Purge all possible zeroes for ipv6 to match above logic - addr = from_address[0] - addr = dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(addr)) - from_address = (addr, from_address[1], from_address[2], from_address[3]) - if from_address == destination: - break - if not ignore_unexpected: - raise dns.query.UnexpectedSource( - 'got a response from %s instead of %s' - % (from_address, destination)) - finally: - s.close() - - r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac) - if not q.is_response(r): - raise dns.query.BadResponse() - return r - - -def tcp(q, where, timeout=DNS_QUERY_TIMEOUT, port=53, - af=None, source=None, source_port=0): - """coro friendly replacement for dns.query.tcp - Return the response obtained after sending a query via TCP. - - @param q: the query - @type q: dns.message.Message object - @param where: where to send the message - @type where: string containing an IPv4 or IPv6 address - @param timeout: The number of seconds to wait before the query times out. - If None, the default, wait forever. - @type timeout: float - @param port: The port to which to send the message. The default is 53. - @type port: int - @param af: the address family to use. The default is None, which - causes the address family to use to be inferred from the form of of where. - If the inference attempt fails, AF_INET is used. - @type af: int - @rtype: dns.message.Message object - @param source: source address. The default is the IPv4 wildcard address. - @type source: string - @param source_port: The port from which to send the message. - The default is 0. - @type source_port: int""" - - wire = q.to_wire() - if af is None: - try: - af = dns.inet.af_for_address(where) - except: - af = dns.inet.AF_INET - if af == dns.inet.AF_INET: - destination = (where, port) - if source is not None: - source = (source, source_port) - elif af == dns.inet.AF_INET6: - destination = (where, port, 0, 0) - if source is not None: - source = (source, source_port, 0, 0) - s = socket.socket(af, socket.SOCK_STREAM) - s.settimeout(timeout) - try: - expiration = dns.query._compute_expiration(timeout) - if source is not None: - s.bind(source) - while True: - try: - s.connect(destination) - break - except socket.timeout: - # Q: Do we also need to catch coro.CoroutineSocketWake and pass? - if expiration - time.time() <= 0.0: - raise dns.exception.Timeout - eventlet.sleep(0.01) - continue - - l = len(wire) - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = struct.pack("!H", l) + wire - _net_write(s, tcpmsg, expiration) - ldata = _net_read(s, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = bytes(_net_read(s, l, expiration)) - finally: - s.close() - r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac) - if not q.is_response(r): - raise dns.query.BadResponse() - return r - - -def reset(): - resolver.clear() - -# Install our coro-friendly replacements for the tcp and udp query methods. -dns.query.tcp = tcp -dns.query.udp = udp diff --git a/venv/lib/python3.6/site-packages/eventlet/support/greenlets.py b/venv/lib/python3.6/site-packages/eventlet/support/greenlets.py deleted file mode 100644 index d4e1793..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/greenlets.py +++ /dev/null @@ -1,8 +0,0 @@ -import distutils.version - -import greenlet -getcurrent = greenlet.greenlet.getcurrent -GreenletExit = greenlet.greenlet.GreenletExit -preserves_excinfo = (distutils.version.LooseVersion(greenlet.__version__) - >= distutils.version.LooseVersion('0.3.2')) -greenlet = greenlet.greenlet diff --git a/venv/lib/python3.6/site-packages/eventlet/support/psycopg2_patcher.py b/venv/lib/python3.6/site-packages/eventlet/support/psycopg2_patcher.py deleted file mode 100644 index 2f4034a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/psycopg2_patcher.py +++ /dev/null @@ -1,55 +0,0 @@ -"""A wait callback to allow psycopg2 cooperation with eventlet. - -Use `make_psycopg_green()` to enable eventlet support in Psycopg. -""" - -# Copyright (C) 2010 Daniele Varrazzo -# and licensed under the MIT license: -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import psycopg2 -from psycopg2 import extensions - -import eventlet.hubs - - -def make_psycopg_green(): - """Configure Psycopg to be used with eventlet in non-blocking way.""" - if not hasattr(extensions, 'set_wait_callback'): - raise ImportError( - "support for coroutines not available in this Psycopg version (%s)" - % psycopg2.__version__) - - extensions.set_wait_callback(eventlet_wait_callback) - - -def eventlet_wait_callback(conn, timeout=-1): - """A wait callback useful to allow eventlet to work with Psycopg.""" - while 1: - state = conn.poll() - if state == extensions.POLL_OK: - break - elif state == extensions.POLL_READ: - eventlet.hubs.trampoline(conn.fileno(), read=True) - elif state == extensions.POLL_WRITE: - eventlet.hubs.trampoline(conn.fileno(), write=True) - else: - raise psycopg2.OperationalError( - "Bad result from poll: %r" % state) diff --git a/venv/lib/python3.6/site-packages/eventlet/support/pylib.py b/venv/lib/python3.6/site-packages/eventlet/support/pylib.py deleted file mode 100644 index fdb0682..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/pylib.py +++ /dev/null @@ -1,12 +0,0 @@ -from py.magic import greenlet - -import sys -import types - - -def emulate(): - module = types.ModuleType('greenlet') - sys.modules['greenlet'] = module - module.greenlet = greenlet - module.getcurrent = greenlet.getcurrent - module.GreenletExit = greenlet.GreenletExit diff --git a/venv/lib/python3.6/site-packages/eventlet/support/stacklesspypys.py b/venv/lib/python3.6/site-packages/eventlet/support/stacklesspypys.py deleted file mode 100644 index fe3638a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/stacklesspypys.py +++ /dev/null @@ -1,12 +0,0 @@ -from stackless import greenlet - -import sys -import types - - -def emulate(): - module = types.ModuleType('greenlet') - sys.modules['greenlet'] = module - module.greenlet = greenlet - module.getcurrent = greenlet.getcurrent - module.GreenletExit = greenlet.GreenletExit diff --git a/venv/lib/python3.6/site-packages/eventlet/support/stacklesss.py b/venv/lib/python3.6/site-packages/eventlet/support/stacklesss.py deleted file mode 100644 index 4d19c5b..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/support/stacklesss.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Support for using stackless python. Broken and riddled with print statements -at the moment. Please fix it! -""" - -import sys -import types - -import stackless - -caller = None -coro_args = {} -tasklet_to_greenlet = {} - - -def getcurrent(): - return tasklet_to_greenlet[stackless.getcurrent()] - - -class FirstSwitch(object): - def __init__(self, gr): - self.gr = gr - - def __call__(self, *args, **kw): - # print("first call", args, kw) - gr = self.gr - del gr.switch - run, gr.run = gr.run, None - t = stackless.tasklet(run) - gr.t = t - tasklet_to_greenlet[t] = gr - t.setup(*args, **kw) - t.run() - - -class greenlet(object): - def __init__(self, run=None, parent=None): - self.dead = False - if parent is None: - parent = getcurrent() - - self.parent = parent - if run is not None: - self.run = run - - self.switch = FirstSwitch(self) - - def switch(self, *args): - # print("switch", args) - global caller - caller = stackless.getcurrent() - coro_args[self] = args - self.t.insert() - stackless.schedule() - if caller is not self.t: - caller.remove() - rval = coro_args[self] - return rval - - def run(self): - pass - - def __bool__(self): - return self.run is None and not self.dead - - -class GreenletExit(Exception): - pass - - -def emulate(): - module = types.ModuleType('greenlet') - sys.modules['greenlet'] = module - module.greenlet = greenlet - module.getcurrent = getcurrent - module.GreenletExit = GreenletExit - - caller = stackless.getcurrent() - tasklet_to_greenlet[caller] = None - main_coro = greenlet() - tasklet_to_greenlet[caller] = main_coro - main_coro.t = caller - del main_coro.switch # It's already running - coro_args[main_coro] = None diff --git a/venv/lib/python3.6/site-packages/eventlet/timeout.py b/venv/lib/python3.6/site-packages/eventlet/timeout.py deleted file mode 100644 index 6e1e08f..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/timeout.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright (c) 2009-2010 Denis Bilenko, denis.bilenko at gmail com -# Copyright (c) 2010 Eventlet Contributors (see AUTHORS) -# and licensed under the MIT license: -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import functools -import inspect - -import eventlet -from eventlet.support import greenlets as greenlet -from eventlet.hubs import get_hub - -__all__ = ['Timeout', 'with_timeout', 'wrap_is_timeout', 'is_timeout'] - -_MISSING = object() - -# deriving from BaseException so that "except Exception as e" doesn't catch -# Timeout exceptions. - - -class Timeout(BaseException): - """Raises *exception* in the current greenthread after *timeout* seconds. - - When *exception* is omitted or ``None``, the :class:`Timeout` instance - itself is raised. If *seconds* is None, the timer is not scheduled, and is - only useful if you're planning to raise it directly. - - Timeout objects are context managers, and so can be used in with statements. - When used in a with statement, if *exception* is ``False``, the timeout is - still raised, but the context manager suppresses it, so the code outside the - with-block won't see it. - """ - - def __init__(self, seconds=None, exception=None): - self.seconds = seconds - self.exception = exception - self.timer = None - self.start() - - def start(self): - """Schedule the timeout. This is called on construction, so - it should not be called explicitly, unless the timer has been - canceled.""" - assert not self.pending, \ - '%r is already started; to restart it, cancel it first' % self - if self.seconds is None: # "fake" timeout (never expires) - self.timer = None - elif self.exception is None or isinstance(self.exception, bool): # timeout that raises self - self.timer = get_hub().schedule_call_global( - self.seconds, greenlet.getcurrent().throw, self) - else: # regular timeout with user-provided exception - self.timer = get_hub().schedule_call_global( - self.seconds, greenlet.getcurrent().throw, self.exception) - return self - - @property - def pending(self): - """True if the timeout is scheduled to be raised.""" - if self.timer is not None: - return self.timer.pending - else: - return False - - def cancel(self): - """If the timeout is pending, cancel it. If not using - Timeouts in ``with`` statements, always call cancel() in a - ``finally`` after the block of code that is getting timed out. - If not canceled, the timeout will be raised later on, in some - unexpected section of the application.""" - if self.timer is not None: - self.timer.cancel() - self.timer = None - - def __repr__(self): - classname = self.__class__.__name__ - if self.pending: - pending = ' pending' - else: - pending = '' - if self.exception is None: - exception = '' - else: - exception = ' exception=%r' % self.exception - return '<%s at %s seconds=%s%s%s>' % ( - classname, hex(id(self)), self.seconds, exception, pending) - - def __str__(self): - """ - >>> raise Timeout # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - Timeout - """ - if self.seconds is None: - return '' - if self.seconds == 1: - suffix = '' - else: - suffix = 's' - if self.exception is None or self.exception is True: - return '%s second%s' % (self.seconds, suffix) - elif self.exception is False: - return '%s second%s (silent)' % (self.seconds, suffix) - else: - return '%s second%s (%s)' % (self.seconds, suffix, self.exception) - - def __enter__(self): - if self.timer is None: - self.start() - return self - - def __exit__(self, typ, value, tb): - self.cancel() - if value is self and self.exception is False: - return True - - @property - def is_timeout(self): - return True - - -def with_timeout(seconds, function, *args, **kwds): - """Wrap a call to some (yielding) function with a timeout; if the called - function fails to return before the timeout, cancel it and return a flag - value. - """ - timeout_value = kwds.pop("timeout_value", _MISSING) - timeout = Timeout(seconds) - try: - try: - return function(*args, **kwds) - except Timeout as ex: - if ex is timeout and timeout_value is not _MISSING: - return timeout_value - raise - finally: - timeout.cancel() - - -def wrap_is_timeout(base): - '''Adds `.is_timeout=True` attribute to objects returned by `base()`. - - When `base` is class, attribute is added as read-only property. Returns `base`. - Otherwise, it returns a function that sets attribute on result of `base()` call. - - Wrappers make best effort to be transparent. - ''' - if inspect.isclass(base): - base.is_timeout = property(lambda _: True) - return base - - @functools.wraps(base) - def fun(*args, **kwargs): - ex = base(*args, **kwargs) - ex.is_timeout = True - return ex - return fun - - -def is_timeout(obj): - py3err = getattr(__builtins__, 'TimeoutError', Timeout) - return bool(getattr(obj, 'is_timeout', False)) or isinstance(obj, py3err) diff --git a/venv/lib/python3.6/site-packages/eventlet/tpool.py b/venv/lib/python3.6/site-packages/eventlet/tpool.py deleted file mode 100644 index 60b18e8..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/tpool.py +++ /dev/null @@ -1,340 +0,0 @@ -# Copyright (c) 2007-2009, Linden Research, Inc. -# Copyright (c) 2007, IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import atexit -import imp -import os -import sys -import traceback - -import eventlet -from eventlet import event, greenio, greenthread, patcher, timeout -import six - -__all__ = ['execute', 'Proxy', 'killall', 'set_num_threads'] - - -EXC_CLASSES = (Exception, timeout.Timeout) -SYS_EXCS = (GeneratorExit, KeyboardInterrupt, SystemExit) - -QUIET = True - -socket = patcher.original('socket') -threading = patcher.original('threading') -if six.PY2: - Queue_module = patcher.original('Queue') -if six.PY3: - Queue_module = patcher.original('queue') - -Empty = Queue_module.Empty -Queue = Queue_module.Queue - -_bytetosend = b' ' -_coro = None -_nthreads = int(os.environ.get('EVENTLET_THREADPOOL_SIZE', 20)) -_reqq = _rspq = None -_rsock = _wsock = None -_setup_already = False -_threads = [] - - -def tpool_trampoline(): - global _rspq - while True: - try: - _c = _rsock.recv(1) - assert _c - # FIXME: this is probably redundant since using sockets instead of pipe now - except ValueError: - break # will be raised when pipe is closed - while not _rspq.empty(): - try: - (e, rv) = _rspq.get(block=False) - e.send(rv) - e = rv = None - except Empty: - pass - - -def tworker(): - global _rspq - while True: - try: - msg = _reqq.get() - except AttributeError: - return # can't get anything off of a dud queue - if msg is None: - return - (e, meth, args, kwargs) = msg - rv = None - try: - rv = meth(*args, **kwargs) - except SYS_EXCS: - raise - except EXC_CLASSES: - rv = sys.exc_info() - if sys.version_info >= (3, 4): - traceback.clear_frames(rv[1].__traceback__) - if six.PY2: - sys.exc_clear() - # test_leakage_from_tracebacks verifies that the use of - # exc_info does not lead to memory leaks - _rspq.put((e, rv)) - msg = meth = args = kwargs = e = rv = None - _wsock.sendall(_bytetosend) - - -def execute(meth, *args, **kwargs): - """ - Execute *meth* in a Python thread, blocking the current coroutine/ - greenthread until the method completes. - - The primary use case for this is to wrap an object or module that is not - amenable to monkeypatching or any of the other tricks that Eventlet uses - to achieve cooperative yielding. With tpool, you can force such objects to - cooperate with green threads by sticking them in native threads, at the cost - of some overhead. - """ - setup() - # if already in tpool, don't recurse into the tpool - # also, call functions directly if we're inside an import lock, because - # if meth does any importing (sadly common), it will hang - my_thread = threading.currentThread() - if my_thread in _threads or imp.lock_held() or _nthreads == 0: - return meth(*args, **kwargs) - - e = event.Event() - _reqq.put((e, meth, args, kwargs)) - - rv = e.wait() - if isinstance(rv, tuple) \ - and len(rv) == 3 \ - and isinstance(rv[1], EXC_CLASSES): - (c, e, tb) = rv - if not QUIET: - traceback.print_exception(c, e, tb) - traceback.print_stack() - six.reraise(c, e, tb) - return rv - - -def proxy_call(autowrap, f, *args, **kwargs): - """ - Call a function *f* and returns the value. If the type of the return value - is in the *autowrap* collection, then it is wrapped in a :class:`Proxy` - object before return. - - Normally *f* will be called in the threadpool with :func:`execute`; if the - keyword argument "nonblocking" is set to ``True``, it will simply be - executed directly. This is useful if you have an object which has methods - that don't need to be called in a separate thread, but which return objects - that should be Proxy wrapped. - """ - if kwargs.pop('nonblocking', False): - rv = f(*args, **kwargs) - else: - rv = execute(f, *args, **kwargs) - if isinstance(rv, autowrap): - return Proxy(rv, autowrap) - else: - return rv - - -class Proxy(object): - """ - a simple proxy-wrapper of any object that comes with a - methods-only interface, in order to forward every method - invocation onto a thread in the native-thread pool. A key - restriction is that the object's methods should not switch - greenlets or use Eventlet primitives, since they are in a - different thread from the main hub, and therefore might behave - unexpectedly. This is for running native-threaded code - only. - - It's common to want to have some of the attributes or return - values also wrapped in Proxy objects (for example, database - connection objects produce cursor objects which also should be - wrapped in Proxy objects to remain nonblocking). *autowrap*, if - supplied, is a collection of types; if an attribute or return - value matches one of those types (via isinstance), it will be - wrapped in a Proxy. *autowrap_names* is a collection - of strings, which represent the names of attributes that should be - wrapped in Proxy objects when accessed. - """ - - def __init__(self, obj, autowrap=(), autowrap_names=()): - self._obj = obj - self._autowrap = autowrap - self._autowrap_names = autowrap_names - - def __getattr__(self, attr_name): - f = getattr(self._obj, attr_name) - if not hasattr(f, '__call__'): - if isinstance(f, self._autowrap) or attr_name in self._autowrap_names: - return Proxy(f, self._autowrap) - return f - - def doit(*args, **kwargs): - result = proxy_call(self._autowrap, f, *args, **kwargs) - if attr_name in self._autowrap_names and not isinstance(result, Proxy): - return Proxy(result) - return result - return doit - - # the following are a buncha methods that the python interpeter - # doesn't use getattr to retrieve and therefore have to be defined - # explicitly - def __getitem__(self, key): - return proxy_call(self._autowrap, self._obj.__getitem__, key) - - def __setitem__(self, key, value): - return proxy_call(self._autowrap, self._obj.__setitem__, key, value) - - def __deepcopy__(self, memo=None): - return proxy_call(self._autowrap, self._obj.__deepcopy__, memo) - - def __copy__(self, memo=None): - return proxy_call(self._autowrap, self._obj.__copy__, memo) - - def __call__(self, *a, **kw): - if '__call__' in self._autowrap_names: - return Proxy(proxy_call(self._autowrap, self._obj, *a, **kw)) - else: - return proxy_call(self._autowrap, self._obj, *a, **kw) - - def __enter__(self): - return proxy_call(self._autowrap, self._obj.__enter__) - - def __exit__(self, *exc): - return proxy_call(self._autowrap, self._obj.__exit__, *exc) - - # these don't go through a proxy call, because they're likely to - # be called often, and are unlikely to be implemented on the - # wrapped object in such a way that they would block - def __eq__(self, rhs): - return self._obj == rhs - - def __hash__(self): - return self._obj.__hash__() - - def __repr__(self): - return self._obj.__repr__() - - def __str__(self): - return self._obj.__str__() - - def __len__(self): - return len(self._obj) - - def __nonzero__(self): - return bool(self._obj) - # Python3 - __bool__ = __nonzero__ - - def __iter__(self): - it = iter(self._obj) - if it == self._obj: - return self - else: - return Proxy(it) - - def next(self): - return proxy_call(self._autowrap, next, self._obj) - # Python3 - __next__ = next - - -def setup(): - global _rsock, _wsock, _coro, _setup_already, _rspq, _reqq - if _setup_already: - return - else: - _setup_already = True - - assert _nthreads >= 0, "Can't specify negative number of threads" - if _nthreads == 0: - import warnings - warnings.warn("Zero threads in tpool. All tpool.execute calls will\ - execute in main thread. Check the value of the environment \ - variable EVENTLET_THREADPOOL_SIZE.", RuntimeWarning) - _reqq = Queue(maxsize=-1) - _rspq = Queue(maxsize=-1) - - # connected socket pair - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(('127.0.0.1', 0)) - sock.listen(1) - csock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - csock.connect(sock.getsockname()) - csock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True) - _wsock, _addr = sock.accept() - _wsock.settimeout(None) - _wsock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True) - sock.close() - _rsock = greenio.GreenSocket(csock) - _rsock.settimeout(None) - - for i in six.moves.range(_nthreads): - t = threading.Thread(target=tworker, - name="tpool_thread_%s" % i) - t.setDaemon(True) - t.start() - _threads.append(t) - - _coro = greenthread.spawn_n(tpool_trampoline) - # This yield fixes subtle error with GreenSocket.__del__ - eventlet.sleep(0) - - -# Avoid ResourceWarning unclosed socket on Python3.2+ -@atexit.register -def killall(): - global _setup_already, _rspq, _rsock, _wsock - if not _setup_already: - return - - # This yield fixes freeze in some scenarios - eventlet.sleep(0) - - for thr in _threads: - _reqq.put(None) - for thr in _threads: - thr.join() - del _threads[:] - - # return any remaining results - while (_rspq is not None) and not _rspq.empty(): - try: - (e, rv) = _rspq.get(block=False) - e.send(rv) - e = rv = None - except Empty: - pass - - if _coro is not None: - greenthread.kill(_coro) - if _rsock is not None: - _rsock.close() - _rsock = None - if _wsock is not None: - _wsock.close() - _wsock = None - _rspq = None - _setup_already = False - - -def set_num_threads(nthreads): - global _nthreads - _nthreads = nthreads diff --git a/venv/lib/python3.6/site-packages/eventlet/websocket.py b/venv/lib/python3.6/site-packages/eventlet/websocket.py deleted file mode 100644 index 17efed8..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/websocket.py +++ /dev/null @@ -1,831 +0,0 @@ -import base64 -import codecs -import collections -import errno -from random import Random -from socket import error as SocketError -import string -import struct -import sys -import time - -import zlib - -try: - from hashlib import md5, sha1 -except ImportError: # pragma NO COVER - from md5 import md5 - from sha import sha as sha1 - -from eventlet import semaphore -from eventlet import wsgi -from eventlet.green import socket -from eventlet.support import get_errno -import six - -# Python 2's utf8 decoding is more lenient than we'd like -# In order to pass autobahn's testsuite we need stricter validation -# if available... -for _mod in ('wsaccel.utf8validator', 'autobahn.utf8validator'): - # autobahn has it's own python-based validator. in newest versions - # this prefers to use wsaccel, a cython based implementation, if available. - # wsaccel may also be installed w/out autobahn, or with a earlier version. - try: - utf8validator = __import__(_mod, {}, {}, ['']) - except ImportError: - utf8validator = None - else: - break - -ACCEPTABLE_CLIENT_ERRORS = set((errno.ECONNRESET, errno.EPIPE)) - -__all__ = ["WebSocketWSGI", "WebSocket"] -PROTOCOL_GUID = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11' -VALID_CLOSE_STATUS = set( - list(range(1000, 1004)) + - list(range(1007, 1012)) + - # 3000-3999: reserved for use by libraries, frameworks, - # and applications - list(range(3000, 4000)) + - # 4000-4999: reserved for private use and thus can't - # be registered - list(range(4000, 5000)) -) - - -class BadRequest(Exception): - def __init__(self, status='400 Bad Request', body=None, headers=None): - super(Exception, self).__init__() - self.status = status - self.body = body - self.headers = headers - - -class WebSocketWSGI(object): - """Wraps a websocket handler function in a WSGI application. - - Use it like this:: - - @websocket.WebSocketWSGI - def my_handler(ws): - from_browser = ws.wait() - ws.send("from server") - - The single argument to the function will be an instance of - :class:`WebSocket`. To close the socket, simply return from the - function. Note that the server will log the websocket request at - the time of closure. - """ - - def __init__(self, handler): - self.handler = handler - self.protocol_version = None - self.support_legacy_versions = True - self.supported_protocols = [] - self.origin_checker = None - - @classmethod - def configured(cls, - handler=None, - supported_protocols=None, - origin_checker=None, - support_legacy_versions=False): - def decorator(handler): - inst = cls(handler) - inst.support_legacy_versions = support_legacy_versions - inst.origin_checker = origin_checker - if supported_protocols: - inst.supported_protocols = supported_protocols - return inst - if handler is None: - return decorator - return decorator(handler) - - def __call__(self, environ, start_response): - http_connection_parts = [ - part.strip() - for part in environ.get('HTTP_CONNECTION', '').lower().split(',')] - if not ('upgrade' in http_connection_parts and - environ.get('HTTP_UPGRADE', '').lower() == 'websocket'): - # need to check a few more things here for true compliance - start_response('400 Bad Request', [('Connection', 'close')]) - return [] - - try: - if 'HTTP_SEC_WEBSOCKET_VERSION' in environ: - ws = self._handle_hybi_request(environ) - elif self.support_legacy_versions: - ws = self._handle_legacy_request(environ) - else: - raise BadRequest() - except BadRequest as e: - status = e.status - body = e.body or b'' - headers = e.headers or [] - start_response(status, - [('Connection', 'close'), ] + headers) - return [body] - - try: - self.handler(ws) - except socket.error as e: - if get_errno(e) not in ACCEPTABLE_CLIENT_ERRORS: - raise - # Make sure we send the closing frame - ws._send_closing_frame(True) - # use this undocumented feature of eventlet.wsgi to ensure that it - # doesn't barf on the fact that we didn't call start_response - return wsgi.ALREADY_HANDLED - - def _handle_legacy_request(self, environ): - if 'eventlet.input' in environ: - sock = environ['eventlet.input'].get_socket() - elif 'gunicorn.socket' in environ: - sock = environ['gunicorn.socket'] - else: - raise Exception('No eventlet.input or gunicorn.socket present in environ.') - - if 'HTTP_SEC_WEBSOCKET_KEY1' in environ: - self.protocol_version = 76 - if 'HTTP_SEC_WEBSOCKET_KEY2' not in environ: - raise BadRequest() - else: - self.protocol_version = 75 - - if self.protocol_version == 76: - key1 = self._extract_number(environ['HTTP_SEC_WEBSOCKET_KEY1']) - key2 = self._extract_number(environ['HTTP_SEC_WEBSOCKET_KEY2']) - # There's no content-length header in the request, but it has 8 - # bytes of data. - environ['wsgi.input'].content_length = 8 - key3 = environ['wsgi.input'].read(8) - key = struct.pack(">II", key1, key2) + key3 - response = md5(key).digest() - - # Start building the response - scheme = 'ws' - if environ.get('wsgi.url_scheme') == 'https': - scheme = 'wss' - location = '%s://%s%s%s' % ( - scheme, - environ.get('HTTP_HOST'), - environ.get('SCRIPT_NAME'), - environ.get('PATH_INFO') - ) - qs = environ.get('QUERY_STRING') - if qs is not None: - location += '?' + qs - if self.protocol_version == 75: - handshake_reply = ( - b"HTTP/1.1 101 Web Socket Protocol Handshake\r\n" - b"Upgrade: WebSocket\r\n" - b"Connection: Upgrade\r\n" - b"WebSocket-Origin: " + six.b(environ.get('HTTP_ORIGIN')) + b"\r\n" - b"WebSocket-Location: " + six.b(location) + b"\r\n\r\n" - ) - elif self.protocol_version == 76: - handshake_reply = ( - b"HTTP/1.1 101 WebSocket Protocol Handshake\r\n" - b"Upgrade: WebSocket\r\n" - b"Connection: Upgrade\r\n" - b"Sec-WebSocket-Origin: " + six.b(environ.get('HTTP_ORIGIN')) + b"\r\n" - b"Sec-WebSocket-Protocol: " + - six.b(environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL', 'default')) + b"\r\n" - b"Sec-WebSocket-Location: " + six.b(location) + b"\r\n" - b"\r\n" + response - ) - else: # pragma NO COVER - raise ValueError("Unknown WebSocket protocol version.") - sock.sendall(handshake_reply) - return WebSocket(sock, environ, self.protocol_version) - - def _parse_extension_header(self, header): - if header is None: - return None - res = {} - for ext in header.split(","): - parts = ext.split(";") - config = {} - for part in parts[1:]: - key_val = part.split("=") - if len(key_val) == 1: - config[key_val[0].strip().lower()] = True - else: - config[key_val[0].strip().lower()] = key_val[1].strip().strip('"').lower() - res.setdefault(parts[0].strip().lower(), []).append(config) - return res - - def _negotiate_permessage_deflate(self, extensions): - if not extensions: - return None - deflate = extensions.get("permessage-deflate") - if deflate is None: - return None - for config in deflate: - # We'll evaluate each config in the client's preferred order and pick - # the first that we can support. - want_config = { - # These are bool options, we can support both - "server_no_context_takeover": config.get("server_no_context_takeover", False), - "client_no_context_takeover": config.get("client_no_context_takeover", False) - } - # These are either bool OR int options. True means the client can accept a value - # for the option, a number means the client wants that specific value. - max_wbits = min(zlib.MAX_WBITS, 15) - mwb = config.get("server_max_window_bits") - if mwb is not None: - if mwb is True: - want_config["server_max_window_bits"] = max_wbits - else: - want_config["server_max_window_bits"] = \ - int(config.get("server_max_window_bits", max_wbits)) - if not (8 <= want_config["server_max_window_bits"] <= 15): - continue - mwb = config.get("client_max_window_bits") - if mwb is not None: - if mwb is True: - want_config["client_max_window_bits"] = max_wbits - else: - want_config["client_max_window_bits"] = \ - int(config.get("client_max_window_bits", max_wbits)) - if not (8 <= want_config["client_max_window_bits"] <= 15): - continue - return want_config - return None - - def _format_extension_header(self, parsed_extensions): - if not parsed_extensions: - return None - parts = [] - for name, config in parsed_extensions.items(): - ext_parts = [six.b(name)] - for key, value in config.items(): - if value is False: - pass - elif value is True: - ext_parts.append(six.b(key)) - else: - ext_parts.append(six.b("%s=%s" % (key, str(value)))) - parts.append(b"; ".join(ext_parts)) - return b", ".join(parts) - - def _handle_hybi_request(self, environ): - if 'eventlet.input' in environ: - sock = environ['eventlet.input'].get_socket() - elif 'gunicorn.socket' in environ: - sock = environ['gunicorn.socket'] - else: - raise Exception('No eventlet.input or gunicorn.socket present in environ.') - - hybi_version = environ['HTTP_SEC_WEBSOCKET_VERSION'] - if hybi_version not in ('8', '13', ): - raise BadRequest(status='426 Upgrade Required', - headers=[('Sec-WebSocket-Version', '8, 13')]) - self.protocol_version = int(hybi_version) - if 'HTTP_SEC_WEBSOCKET_KEY' not in environ: - # That's bad. - raise BadRequest() - origin = environ.get( - 'HTTP_ORIGIN', - (environ.get('HTTP_SEC_WEBSOCKET_ORIGIN', '') - if self.protocol_version <= 8 else '')) - if self.origin_checker is not None: - if not self.origin_checker(environ.get('HTTP_HOST'), origin): - raise BadRequest(status='403 Forbidden') - protocols = environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL', None) - negotiated_protocol = None - if protocols: - for p in (i.strip() for i in protocols.split(',')): - if p in self.supported_protocols: - negotiated_protocol = p - break - - key = environ['HTTP_SEC_WEBSOCKET_KEY'] - response = base64.b64encode(sha1(six.b(key) + PROTOCOL_GUID).digest()) - handshake_reply = [b"HTTP/1.1 101 Switching Protocols", - b"Upgrade: websocket", - b"Connection: Upgrade", - b"Sec-WebSocket-Accept: " + response] - if negotiated_protocol: - handshake_reply.append(b"Sec-WebSocket-Protocol: " + six.b(negotiated_protocol)) - - parsed_extensions = {} - extensions = self._parse_extension_header(environ.get("HTTP_SEC_WEBSOCKET_EXTENSIONS")) - - deflate = self._negotiate_permessage_deflate(extensions) - if deflate is not None: - parsed_extensions["permessage-deflate"] = deflate - - formatted_ext = self._format_extension_header(parsed_extensions) - if formatted_ext is not None: - handshake_reply.append(b"Sec-WebSocket-Extensions: " + formatted_ext) - - sock.sendall(b'\r\n'.join(handshake_reply) + b'\r\n\r\n') - return RFC6455WebSocket(sock, environ, self.protocol_version, - protocol=negotiated_protocol, - extensions=parsed_extensions) - - def _extract_number(self, value): - """ - Utility function which, given a string like 'g98sd 5[]221@1', will - return 9852211. Used to parse the Sec-WebSocket-Key headers. - """ - out = "" - spaces = 0 - for char in value: - if char in string.digits: - out += char - elif char == " ": - spaces += 1 - return int(out) // spaces - - -class WebSocket(object): - """A websocket object that handles the details of - serialization/deserialization to the socket. - - The primary way to interact with a :class:`WebSocket` object is to - call :meth:`send` and :meth:`wait` in order to pass messages back - and forth with the browser. Also available are the following - properties: - - path - The path value of the request. This is the same as the WSGI PATH_INFO variable, - but more convenient. - protocol - The value of the Websocket-Protocol header. - origin - The value of the 'Origin' header. - environ - The full WSGI environment for this request. - - """ - - def __init__(self, sock, environ, version=76): - """ - :param socket: The eventlet socket - :type socket: :class:`eventlet.greenio.GreenSocket` - :param environ: The wsgi environment - :param version: The WebSocket spec version to follow (default is 76) - """ - self.log = environ.get('wsgi.errors', sys.stderr) - self.log_context = 'server={shost}/{spath} client={caddr}:{cport}'.format( - shost=environ.get('HTTP_HOST'), - spath=environ.get('SCRIPT_NAME', '') + environ.get('PATH_INFO', ''), - caddr=environ.get('REMOTE_ADDR'), cport=environ.get('REMOTE_PORT'), - ) - self.socket = sock - self.origin = environ.get('HTTP_ORIGIN') - self.protocol = environ.get('HTTP_WEBSOCKET_PROTOCOL') - self.path = environ.get('PATH_INFO') - self.environ = environ - self.version = version - self.websocket_closed = False - self._buf = b"" - self._msgs = collections.deque() - self._sendlock = semaphore.Semaphore() - - def _pack_message(self, message): - """Pack the message inside ``00`` and ``FF`` - - As per the dataframing section (5.3) for the websocket spec - """ - if isinstance(message, six.text_type): - message = message.encode('utf-8') - elif not isinstance(message, six.binary_type): - message = six.b(str(message)) - packed = b"\x00" + message + b"\xFF" - return packed - - def _parse_messages(self): - """ Parses for messages in the buffer *buf*. It is assumed that - the buffer contains the start character for a message, but that it - may contain only part of the rest of the message. - - Returns an array of messages, and the buffer remainder that - didn't contain any full messages.""" - msgs = [] - end_idx = 0 - buf = self._buf - while buf: - frame_type = six.indexbytes(buf, 0) - if frame_type == 0: - # Normal message. - end_idx = buf.find(b"\xFF") - if end_idx == -1: # pragma NO COVER - break - msgs.append(buf[1:end_idx].decode('utf-8', 'replace')) - buf = buf[end_idx + 1:] - elif frame_type == 255: - # Closing handshake. - assert six.indexbytes(buf, 1) == 0, "Unexpected closing handshake: %r" % buf - self.websocket_closed = True - break - else: - raise ValueError("Don't understand how to parse this type of message: %r" % buf) - self._buf = buf - return msgs - - def send(self, message): - """Send a message to the browser. - - *message* should be convertable to a string; unicode objects should be - encodable as utf-8. Raises socket.error with errno of 32 - (broken pipe) if the socket has already been closed by the client.""" - packed = self._pack_message(message) - # if two greenthreads are trying to send at the same time - # on the same socket, sendlock prevents interleaving and corruption - self._sendlock.acquire() - try: - self.socket.sendall(packed) - finally: - self._sendlock.release() - - def wait(self): - """Waits for and deserializes messages. - - Returns a single message; the oldest not yet processed. If the client - has already closed the connection, returns None. This is different - from normal socket behavior because the empty string is a valid - websocket message.""" - while not self._msgs: - # Websocket might be closed already. - if self.websocket_closed: - return None - # no parsed messages, must mean buf needs more data - delta = self.socket.recv(8096) - if delta == b'': - return None - self._buf += delta - msgs = self._parse_messages() - self._msgs.extend(msgs) - return self._msgs.popleft() - - def _send_closing_frame(self, ignore_send_errors=False): - """Sends the closing frame to the client, if required.""" - if self.version == 76 and not self.websocket_closed: - try: - self.socket.sendall(b"\xff\x00") - except SocketError: - # Sometimes, like when the remote side cuts off the connection, - # we don't care about this. - if not ignore_send_errors: # pragma NO COVER - raise - self.websocket_closed = True - - def close(self): - """Forcibly close the websocket; generally it is preferable to - return from the handler method.""" - try: - self._send_closing_frame(True) - self.socket.shutdown(True) - except SocketError as e: - if e.errno != errno.ENOTCONN: - self.log.write('{ctx} socket shutdown error: {e}'.format(ctx=self.log_context, e=e)) - finally: - self.socket.close() - - -class ConnectionClosedError(Exception): - pass - - -class FailedConnectionError(Exception): - def __init__(self, status, message): - super(FailedConnectionError, self).__init__(status, message) - self.message = message - self.status = status - - -class ProtocolError(ValueError): - pass - - -class RFC6455WebSocket(WebSocket): - def __init__(self, sock, environ, version=13, protocol=None, client=False, extensions=None): - super(RFC6455WebSocket, self).__init__(sock, environ, version) - self.iterator = self._iter_frames() - self.client = client - self.protocol = protocol - self.extensions = extensions or {} - - self._deflate_enc = None - self._deflate_dec = None - - class UTF8Decoder(object): - def __init__(self): - if utf8validator: - self.validator = utf8validator.Utf8Validator() - else: - self.validator = None - decoderclass = codecs.getincrementaldecoder('utf8') - self.decoder = decoderclass() - - def reset(self): - if self.validator: - self.validator.reset() - self.decoder.reset() - - def decode(self, data, final=False): - if self.validator: - valid, eocp, c_i, t_i = self.validator.validate(data) - if not valid: - raise ValueError('Data is not valid unicode') - return self.decoder.decode(data, final) - - def _get_permessage_deflate_enc(self): - options = self.extensions.get("permessage-deflate") - if options is None: - return None - - def _make(): - return zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, - -options.get("client_max_window_bits" if self.client - else "server_max_window_bits", - zlib.MAX_WBITS)) - - if options.get("client_no_context_takeover" if self.client - else "server_no_context_takeover"): - # This option means we have to make a new one every time - return _make() - else: - if self._deflate_enc is None: - self._deflate_enc = _make() - return self._deflate_enc - - def _get_permessage_deflate_dec(self, rsv1): - options = self.extensions.get("permessage-deflate") - if options is None or not rsv1: - return None - - def _make(): - return zlib.decompressobj(-options.get("server_max_window_bits" if self.client - else "client_max_window_bits", - zlib.MAX_WBITS)) - - if options.get("server_no_context_takeover" if self.client - else "client_no_context_takeover"): - # This option means we have to make a new one every time - return _make() - else: - if self._deflate_dec is None: - self._deflate_dec = _make() - return self._deflate_dec - - def _get_bytes(self, numbytes): - data = b'' - while len(data) < numbytes: - d = self.socket.recv(numbytes - len(data)) - if not d: - raise ConnectionClosedError() - data = data + d - return data - - class Message(object): - def __init__(self, opcode, decoder=None, decompressor=None): - self.decoder = decoder - self.data = [] - self.finished = False - self.opcode = opcode - self.decompressor = decompressor - - def push(self, data, final=False): - self.finished = final - self.data.append(data) - - def getvalue(self): - data = b"".join(self.data) - if not self.opcode & 8 and self.decompressor: - data = self.decompressor.decompress(data + b'\x00\x00\xff\xff') - if self.decoder: - data = self.decoder.decode(data, self.finished) - return data - - @staticmethod - def _apply_mask(data, mask, length=None, offset=0): - if length is None: - length = len(data) - cnt = range(length) - return b''.join(six.int2byte(six.indexbytes(data, i) ^ mask[(offset + i) % 4]) for i in cnt) - - def _handle_control_frame(self, opcode, data): - if opcode == 8: # connection close - if not data: - status = 1000 - elif len(data) > 1: - status = struct.unpack_from('!H', data)[0] - if not status or status not in VALID_CLOSE_STATUS: - raise FailedConnectionError( - 1002, - "Unexpected close status code.") - try: - data = self.UTF8Decoder().decode(data[2:], True) - except (UnicodeDecodeError, ValueError): - raise FailedConnectionError( - 1002, - "Close message data should be valid UTF-8.") - else: - status = 1002 - self.close(close_data=(status, '')) - raise ConnectionClosedError() - elif opcode == 9: # ping - self.send(data, control_code=0xA) - elif opcode == 0xA: # pong - pass - else: - raise FailedConnectionError( - 1002, "Unknown control frame received.") - - def _iter_frames(self): - fragmented_message = None - try: - while True: - message = self._recv_frame(message=fragmented_message) - if message.opcode & 8: - self._handle_control_frame( - message.opcode, message.getvalue()) - continue - if fragmented_message and message is not fragmented_message: - raise RuntimeError('Unexpected message change.') - fragmented_message = message - if message.finished: - data = fragmented_message.getvalue() - fragmented_message = None - yield data - except FailedConnectionError: - exc_typ, exc_val, exc_tb = sys.exc_info() - self.close(close_data=(exc_val.status, exc_val.message)) - except ConnectionClosedError: - return - except Exception: - self.close(close_data=(1011, 'Internal Server Error')) - raise - - def _recv_frame(self, message=None): - recv = self._get_bytes - - # Unpacking the frame described in Section 5.2 of RFC6455 - # (https://tools.ietf.org/html/rfc6455#section-5.2) - header = recv(2) - a, b = struct.unpack('!BB', header) - finished = a >> 7 == 1 - rsv123 = a >> 4 & 7 - rsv1 = rsv123 & 4 - if rsv123: - if rsv1 and "permessage-deflate" not in self.extensions: - # must be zero - unless it's compressed then rsv1 is true - raise FailedConnectionError( - 1002, - "RSV1, RSV2, RSV3: MUST be 0 unless an extension is" - " negotiated that defines meanings for non-zero values.") - opcode = a & 15 - if opcode not in (0, 1, 2, 8, 9, 0xA): - raise FailedConnectionError(1002, "Unknown opcode received.") - masked = b & 128 == 128 - if not masked and not self.client: - raise FailedConnectionError(1002, "A client MUST mask all frames" - " that it sends to the server") - length = b & 127 - if opcode & 8: - if not finished: - raise FailedConnectionError(1002, "Control frames must not" - " be fragmented.") - if length > 125: - raise FailedConnectionError( - 1002, - "All control frames MUST have a payload length of 125" - " bytes or less") - elif opcode and message: - raise FailedConnectionError( - 1002, - "Received a non-continuation opcode within" - " fragmented message.") - elif not opcode and not message: - raise FailedConnectionError( - 1002, - "Received continuation opcode with no previous" - " fragments received.") - if length == 126: - length = struct.unpack('!H', recv(2))[0] - elif length == 127: - length = struct.unpack('!Q', recv(8))[0] - if masked: - mask = struct.unpack('!BBBB', recv(4)) - received = 0 - if not message or opcode & 8: - decoder = self.UTF8Decoder() if opcode == 1 else None - decompressor = self._get_permessage_deflate_dec(rsv1) - message = self.Message(opcode, decoder=decoder, decompressor=decompressor) - if not length: - message.push(b'', final=finished) - else: - while received < length: - d = self.socket.recv(length - received) - if not d: - raise ConnectionClosedError() - dlen = len(d) - if masked: - d = self._apply_mask(d, mask, length=dlen, offset=received) - received = received + dlen - try: - message.push(d, final=finished) - except (UnicodeDecodeError, ValueError): - raise FailedConnectionError( - 1007, "Text data must be valid utf-8") - return message - - def _pack_message(self, message, masked=False, - continuation=False, final=True, control_code=None): - is_text = False - if isinstance(message, six.text_type): - message = message.encode('utf-8') - is_text = True - - compress_bit = 0 - compressor = self._get_permessage_deflate_enc() - if message and compressor: - message = compressor.compress(message) - message += compressor.flush(zlib.Z_SYNC_FLUSH) - assert message[-4:] == b"\x00\x00\xff\xff" - message = message[:-4] - compress_bit = 1 << 6 - - length = len(message) - if not length: - # no point masking empty data - masked = False - if control_code: - if control_code not in (8, 9, 0xA): - raise ProtocolError('Unknown control opcode.') - if continuation or not final: - raise ProtocolError('Control frame cannot be a fragment.') - if length > 125: - raise ProtocolError('Control frame data too large (>125).') - header = struct.pack('!B', control_code | 1 << 7) - else: - opcode = 0 if continuation else ((1 if is_text else 2) | compress_bit) - header = struct.pack('!B', opcode | (1 << 7 if final else 0)) - lengthdata = 1 << 7 if masked else 0 - if length > 65535: - lengthdata = struct.pack('!BQ', lengthdata | 127, length) - elif length > 125: - lengthdata = struct.pack('!BH', lengthdata | 126, length) - else: - lengthdata = struct.pack('!B', lengthdata | length) - if masked: - # NOTE: RFC6455 states: - # A server MUST NOT mask any frames that it sends to the client - rand = Random(time.time()) - mask = [rand.getrandbits(8) for _ in six.moves.xrange(4)] - message = RFC6455WebSocket._apply_mask(message, mask, length) - maskdata = struct.pack('!BBBB', *mask) - else: - maskdata = b'' - - return b''.join((header, lengthdata, maskdata, message)) - - def wait(self): - for i in self.iterator: - return i - - def _send(self, frame): - self._sendlock.acquire() - try: - self.socket.sendall(frame) - finally: - self._sendlock.release() - - def send(self, message, **kw): - kw['masked'] = self.client - payload = self._pack_message(message, **kw) - self._send(payload) - - def _send_closing_frame(self, ignore_send_errors=False, close_data=None): - if self.version in (8, 13) and not self.websocket_closed: - if close_data is not None: - status, msg = close_data - if isinstance(msg, six.text_type): - msg = msg.encode('utf-8') - data = struct.pack('!H', status) + msg - else: - data = '' - try: - self.send(data, control_code=8) - except SocketError: - # Sometimes, like when the remote side cuts off the connection, - # we don't care about this. - if not ignore_send_errors: # pragma NO COVER - raise - self.websocket_closed = True - - def close(self, close_data=None): - """Forcibly close the websocket; generally it is preferable to - return from the handler method.""" - try: - self._send_closing_frame(close_data=close_data, ignore_send_errors=True) - self.socket.shutdown(socket.SHUT_WR) - except SocketError as e: - if e.errno != errno.ENOTCONN: - self.log.write('{ctx} socket shutdown error: {e}'.format(ctx=self.log_context, e=e)) - finally: - self.socket.close() diff --git a/venv/lib/python3.6/site-packages/eventlet/wsgi.py b/venv/lib/python3.6/site-packages/eventlet/wsgi.py deleted file mode 100644 index ee8ad2d..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/wsgi.py +++ /dev/null @@ -1,1012 +0,0 @@ -import errno -import os -import sys -import time -import traceback -import types -import warnings - -import eventlet -from eventlet import greenio -from eventlet import support -from eventlet.green import BaseHTTPServer -from eventlet.green import socket -import six -from six.moves import urllib - - -DEFAULT_MAX_SIMULTANEOUS_REQUESTS = 1024 -DEFAULT_MAX_HTTP_VERSION = 'HTTP/1.1' -MAX_REQUEST_LINE = 8192 -MAX_HEADER_LINE = 8192 -MAX_TOTAL_HEADER_SIZE = 65536 -MINIMUM_CHUNK_SIZE = 4096 -# %(client_port)s is also available -DEFAULT_LOG_FORMAT = ('%(client_ip)s - - [%(date_time)s] "%(request_line)s"' - ' %(status_code)s %(body_length)s %(wall_seconds).6f') -RESPONSE_414 = b'''HTTP/1.0 414 Request URI Too Long\r\n\ -Connection: close\r\n\ -Content-Length: 0\r\n\r\n''' -is_accepting = True - -STATE_IDLE = 'idle' -STATE_REQUEST = 'request' -STATE_CLOSE = 'close' - -__all__ = ['server', 'format_date_time'] - -# Weekday and month names for HTTP date/time formatting; always English! -_weekdayname = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] -_monthname = [None, # Dummy so we can use 1-based month numbers - "Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] - - -def format_date_time(timestamp): - """Formats a unix timestamp into an HTTP standard string.""" - year, month, day, hh, mm, ss, wd, _y, _z = time.gmtime(timestamp) - return "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( - _weekdayname[wd], day, _monthname[month], year, hh, mm, ss - ) - - -def addr_to_host_port(addr): - host = 'unix' - port = '' - if isinstance(addr, tuple): - host = addr[0] - port = addr[1] - return (host, port) - - -# Collections of error codes to compare against. Not all attributes are set -# on errno module on all platforms, so some are literals :( -BAD_SOCK = set((errno.EBADF, 10053)) -BROKEN_SOCK = set((errno.EPIPE, errno.ECONNRESET)) - - -class ChunkReadError(ValueError): - pass - - -# special flag return value for apps -class _AlreadyHandled(object): - - def __iter__(self): - return self - - def next(self): - raise StopIteration - - __next__ = next - - -ALREADY_HANDLED = _AlreadyHandled() - - -class Input(object): - - def __init__(self, - rfile, - content_length, - sock, - wfile=None, - wfile_line=None, - chunked_input=False): - - self.rfile = rfile - self._sock = sock - if content_length is not None: - content_length = int(content_length) - self.content_length = content_length - - self.wfile = wfile - self.wfile_line = wfile_line - - self.position = 0 - self.chunked_input = chunked_input - self.chunk_length = -1 - - # (optional) headers to send with a "100 Continue" response. Set by - # calling set_hundred_continue_respose_headers() on env['wsgi.input'] - self.hundred_continue_headers = None - self.is_hundred_continue_response_sent = False - - # handle_one_response should give us a ref to the response state so we - # know whether we can still send the 100 Continue; until then, though, - # we're flying blind - self.headers_sent = None - - def send_hundred_continue_response(self): - if self.headers_sent: - # To late; application has already started sending data back - # to the client - # TODO: maybe log a warning if self.hundred_continue_headers - # is not None? - return - - towrite = [] - - # 100 Continue status line - towrite.append(self.wfile_line) - - # Optional headers - if self.hundred_continue_headers is not None: - # 100 Continue headers - for header in self.hundred_continue_headers: - towrite.append(six.b('%s: %s\r\n' % header)) - - # Blank line - towrite.append(b'\r\n') - - self.wfile.writelines(towrite) - self.wfile.flush() - - # Reinitialize chunk_length (expect more data) - self.chunk_length = -1 - - def _do_read(self, reader, length=None): - if self.wfile is not None and not self.is_hundred_continue_response_sent: - # 100 Continue response - self.send_hundred_continue_response() - self.is_hundred_continue_response_sent = True - if (self.content_length is not None) and ( - length is None or length > self.content_length - self.position): - length = self.content_length - self.position - if not length: - return b'' - try: - read = reader(length) - except greenio.SSL.ZeroReturnError: - read = b'' - self.position += len(read) - return read - - def _chunked_read(self, rfile, length=None, use_readline=False): - if self.wfile is not None and not self.is_hundred_continue_response_sent: - # 100 Continue response - self.send_hundred_continue_response() - self.is_hundred_continue_response_sent = True - try: - if length == 0: - return b"" - - if length and length < 0: - length = None - - if use_readline: - reader = self.rfile.readline - else: - reader = self.rfile.read - - response = [] - while self.chunk_length != 0: - maxreadlen = self.chunk_length - self.position - if length is not None and length < maxreadlen: - maxreadlen = length - - if maxreadlen > 0: - data = reader(maxreadlen) - if not data: - self.chunk_length = 0 - raise IOError("unexpected end of file while parsing chunked data") - - datalen = len(data) - response.append(data) - - self.position += datalen - if self.chunk_length == self.position: - rfile.readline() - - if length is not None: - length -= datalen - if length == 0: - break - if use_readline and data[-1:] == b"\n": - break - else: - try: - self.chunk_length = int(rfile.readline().split(b";", 1)[0], 16) - except ValueError as err: - raise ChunkReadError(err) - self.position = 0 - if self.chunk_length == 0: - rfile.readline() - except greenio.SSL.ZeroReturnError: - pass - return b''.join(response) - - def read(self, length=None): - if self.chunked_input: - return self._chunked_read(self.rfile, length) - return self._do_read(self.rfile.read, length) - - def readline(self, size=None): - if self.chunked_input: - return self._chunked_read(self.rfile, size, True) - else: - return self._do_read(self.rfile.readline, size) - - def readlines(self, hint=None): - if self.chunked_input: - lines = [] - for line in iter(self.readline, b''): - lines.append(line) - if hint and hint > 0: - hint -= len(line) - if hint <= 0: - break - return lines - else: - return self._do_read(self.rfile.readlines, hint) - - def __iter__(self): - return iter(self.read, b'') - - def get_socket(self): - return self._sock - - def set_hundred_continue_response_headers(self, headers, - capitalize_response_headers=True): - # Response headers capitalization (default) - # CONTent-TYpe: TExt/PlaiN -> Content-Type: TExt/PlaiN - # Per HTTP RFC standard, header name is case-insensitive. - # Please, fix your client to ignore header case if possible. - if capitalize_response_headers: - headers = [ - ('-'.join([x.capitalize() for x in key.split('-')]), value) - for key, value in headers] - self.hundred_continue_headers = headers - - def discard(self, buffer_size=16 << 10): - while self.read(buffer_size): - pass - - -class HeaderLineTooLong(Exception): - pass - - -class HeadersTooLarge(Exception): - pass - - -def get_logger(log, debug): - if callable(getattr(log, 'info', None)) \ - and callable(getattr(log, 'debug', None)): - return log - else: - return LoggerFileWrapper(log or sys.stderr, debug) - - -class LoggerNull(object): - def __init__(self): - pass - - def error(self, msg, *args, **kwargs): - pass - - def info(self, msg, *args, **kwargs): - pass - - def debug(self, msg, *args, **kwargs): - pass - - def write(self, msg, *args): - pass - - -class LoggerFileWrapper(LoggerNull): - def __init__(self, log, debug): - self.log = log - self._debug = debug - - def error(self, msg, *args, **kwargs): - self.write(msg, *args) - - def info(self, msg, *args, **kwargs): - self.write(msg, *args) - - def debug(self, msg, *args, **kwargs): - if self._debug: - self.write(msg, *args) - - def write(self, msg, *args): - msg = msg + '\n' - if args: - msg = msg % args - self.log.write(msg) - - -class FileObjectForHeaders(object): - - def __init__(self, fp): - self.fp = fp - self.total_header_size = 0 - - def readline(self, size=-1): - sz = size - if size < 0: - sz = MAX_HEADER_LINE - rv = self.fp.readline(sz) - if len(rv) >= MAX_HEADER_LINE: - raise HeaderLineTooLong() - self.total_header_size += len(rv) - if self.total_header_size > MAX_TOTAL_HEADER_SIZE: - raise HeadersTooLarge() - return rv - - -class HttpProtocol(BaseHTTPServer.BaseHTTPRequestHandler): - protocol_version = 'HTTP/1.1' - minimum_chunk_size = MINIMUM_CHUNK_SIZE - capitalize_response_headers = True - - # https://github.com/eventlet/eventlet/issues/295 - # Stdlib default is 0 (unbuffered), but then `wfile.writelines()` looses data - # so before going back to unbuffered, remove any usage of `writelines`. - wbufsize = 16 << 10 - - def __init__(self, conn_state, server): - self.request = conn_state[1] - self.client_address = conn_state[0] - self.conn_state = conn_state - self.server = server - self.setup() - try: - self.handle() - finally: - self.finish() - - def setup(self): - # overriding SocketServer.setup to correctly handle SSL.Connection objects - conn = self.connection = self.request - - # TCP_QUICKACK is a better alternative to disabling Nagle's algorithm - # https://news.ycombinator.com/item?id=10607422 - if getattr(socket, 'TCP_QUICKACK', None): - try: - conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_QUICKACK, True) - except socket.error: - pass - - try: - self.rfile = conn.makefile('rb', self.rbufsize) - self.wfile = conn.makefile('wb', self.wbufsize) - except (AttributeError, NotImplementedError): - if hasattr(conn, 'send') and hasattr(conn, 'recv'): - # it's an SSL.Connection - self.rfile = socket._fileobject(conn, "rb", self.rbufsize) - self.wfile = socket._fileobject(conn, "wb", self.wbufsize) - else: - # it's a SSLObject, or a martian - raise NotImplementedError( - '''eventlet.wsgi doesn't support sockets of type {0}'''.format(type(conn))) - - def handle(self): - self.close_connection = True - - while True: - self.handle_one_request() - if self.conn_state[2] == STATE_CLOSE: - self.close_connection = 1 - if self.close_connection: - break - - def _read_request_line(self): - if self.rfile.closed: - self.close_connection = 1 - return '' - - try: - return self.rfile.readline(self.server.url_length_limit) - except greenio.SSL.ZeroReturnError: - pass - except socket.error as e: - last_errno = support.get_errno(e) - if last_errno in BROKEN_SOCK: - self.server.log.debug('({0}) connection reset by peer {1!r}'.format( - self.server.pid, - self.client_address)) - elif last_errno not in BAD_SOCK: - raise - return '' - - def handle_one_request(self): - if self.server.max_http_version: - self.protocol_version = self.server.max_http_version - - self.raw_requestline = self._read_request_line() - if not self.raw_requestline: - self.close_connection = 1 - return - if len(self.raw_requestline) >= self.server.url_length_limit: - self.wfile.write(RESPONSE_414) - self.close_connection = 1 - return - - orig_rfile = self.rfile - try: - self.rfile = FileObjectForHeaders(self.rfile) - if not self.parse_request(): - return - except HeaderLineTooLong: - self.wfile.write( - b"HTTP/1.0 400 Header Line Too Long\r\n" - b"Connection: close\r\nContent-length: 0\r\n\r\n") - self.close_connection = 1 - return - except HeadersTooLarge: - self.wfile.write( - b"HTTP/1.0 400 Headers Too Large\r\n" - b"Connection: close\r\nContent-length: 0\r\n\r\n") - self.close_connection = 1 - return - finally: - self.rfile = orig_rfile - - content_length = self.headers.get('content-length') - if content_length is not None: - try: - if int(content_length) < 0: - raise ValueError - except ValueError: - # Negative, or not an int at all - self.wfile.write( - b"HTTP/1.0 400 Bad Request\r\n" - b"Connection: close\r\nContent-length: 0\r\n\r\n") - self.close_connection = 1 - return - - self.environ = self.get_environ() - self.application = self.server.app - try: - self.server.outstanding_requests += 1 - try: - self.handle_one_response() - except socket.error as e: - # Broken pipe, connection reset by peer - if support.get_errno(e) not in BROKEN_SOCK: - raise - finally: - self.server.outstanding_requests -= 1 - - def handle_one_response(self): - start = time.time() - headers_set = [] - headers_sent = [] - # Push the headers-sent state into the Input so it won't send a - # 100 Continue response if we've already started a response. - self.environ['wsgi.input'].headers_sent = headers_sent - - wfile = self.wfile - result = None - use_chunked = [False] - length = [0] - status_code = [200] - - def write(data): - towrite = [] - if not headers_set: - raise AssertionError("write() before start_response()") - elif not headers_sent: - status, response_headers = headers_set - headers_sent.append(1) - header_list = [header[0].lower() for header in response_headers] - towrite.append(six.b('%s %s\r\n' % (self.protocol_version, status))) - for header in response_headers: - towrite.append(six.b('%s: %s\r\n' % header)) - - # send Date header? - if 'date' not in header_list: - towrite.append(six.b('Date: %s\r\n' % (format_date_time(time.time()),))) - - client_conn = self.headers.get('Connection', '').lower() - send_keep_alive = False - if self.close_connection == 0 and \ - self.server.keepalive and (client_conn == 'keep-alive' or - (self.request_version == 'HTTP/1.1' and - not client_conn == 'close')): - # only send keep-alives back to clients that sent them, - # it's redundant for 1.1 connections - send_keep_alive = (client_conn == 'keep-alive') - self.close_connection = 0 - else: - self.close_connection = 1 - - if 'content-length' not in header_list: - if self.request_version == 'HTTP/1.1': - use_chunked[0] = True - towrite.append(b'Transfer-Encoding: chunked\r\n') - elif 'content-length' not in header_list: - # client is 1.0 and therefore must read to EOF - self.close_connection = 1 - - if self.close_connection: - towrite.append(b'Connection: close\r\n') - elif send_keep_alive: - towrite.append(b'Connection: keep-alive\r\n') - towrite.append(b'\r\n') - # end of header writing - - if use_chunked[0]: - # Write the chunked encoding - towrite.append(six.b("%x" % (len(data),)) + b"\r\n" + data + b"\r\n") - else: - towrite.append(data) - wfile.writelines(towrite) - wfile.flush() - length[0] = length[0] + sum(map(len, towrite)) - - def start_response(status, response_headers, exc_info=None): - status_code[0] = status.split()[0] - if exc_info: - try: - if headers_sent: - # Re-raise original exception if headers sent - six.reraise(exc_info[0], exc_info[1], exc_info[2]) - finally: - # Avoid dangling circular ref - exc_info = None - - # Response headers capitalization - # CONTent-TYpe: TExt/PlaiN -> Content-Type: TExt/PlaiN - # Per HTTP RFC standard, header name is case-insensitive. - # Please, fix your client to ignore header case if possible. - if self.capitalize_response_headers: - response_headers = [ - ('-'.join([x.capitalize() for x in key.split('-')]), value) - for key, value in response_headers] - - headers_set[:] = [status, response_headers] - return write - - try: - try: - result = self.application(self.environ, start_response) - if (isinstance(result, _AlreadyHandled) - or isinstance(getattr(result, '_obj', None), _AlreadyHandled)): - self.close_connection = 1 - return - - # Set content-length if possible - if not headers_sent and hasattr(result, '__len__') and \ - 'Content-Length' not in [h for h, _v in headers_set[1]]: - headers_set[1].append(('Content-Length', str(sum(map(len, result))))) - - towrite = [] - towrite_size = 0 - just_written_size = 0 - minimum_write_chunk_size = int(self.environ.get( - 'eventlet.minimum_write_chunk_size', self.minimum_chunk_size)) - for data in result: - if len(data) == 0: - continue - if isinstance(data, six.text_type): - data = data.encode('ascii') - - towrite.append(data) - towrite_size += len(data) - if towrite_size >= minimum_write_chunk_size: - write(b''.join(towrite)) - towrite = [] - just_written_size = towrite_size - towrite_size = 0 - if towrite: - just_written_size = towrite_size - write(b''.join(towrite)) - if not headers_sent or (use_chunked[0] and just_written_size): - write(b'') - except Exception: - self.close_connection = 1 - tb = traceback.format_exc() - self.server.log.info(tb) - if not headers_sent: - err_body = six.b(tb) if self.server.debug else b'' - start_response("500 Internal Server Error", - [('Content-type', 'text/plain'), - ('Content-length', len(err_body))]) - write(err_body) - finally: - if hasattr(result, 'close'): - result.close() - request_input = self.environ['eventlet.input'] - if (request_input.chunked_input or - request_input.position < (request_input.content_length or 0)): - # Read and discard body if there was no pending 100-continue - if not request_input.wfile and self.close_connection == 0: - try: - request_input.discard() - except ChunkReadError as e: - self.close_connection = 1 - self.server.log.error(( - 'chunked encoding error while discarding request body.' - + ' client={0} request="{1}" error="{2}"').format( - self.get_client_address()[0], self.requestline, e, - )) - except IOError as e: - self.close_connection = 1 - self.server.log.error(( - 'I/O error while discarding request body.' - + ' client={0} request="{1}" error="{2}"').format( - self.get_client_address()[0], self.requestline, e, - )) - finish = time.time() - - for hook, args, kwargs in self.environ['eventlet.posthooks']: - hook(self.environ, *args, **kwargs) - - if self.server.log_output: - client_host, client_port = self.get_client_address() - - self.server.log.info(self.server.log_format % { - 'client_ip': client_host, - 'client_port': client_port, - 'date_time': self.log_date_time_string(), - 'request_line': self.requestline, - 'status_code': status_code[0], - 'body_length': length[0], - 'wall_seconds': finish - start, - }) - - def get_client_address(self): - host, port = addr_to_host_port(self.client_address) - - if self.server.log_x_forwarded_for: - forward = self.headers.get('X-Forwarded-For', '').replace(' ', '') - if forward: - host = forward + ',' + host - return (host, port) - - def get_environ(self): - env = self.server.get_environ() - env['REQUEST_METHOD'] = self.command - env['SCRIPT_NAME'] = '' - - pq = self.path.split('?', 1) - env['RAW_PATH_INFO'] = pq[0] - if six.PY2: - env['PATH_INFO'] = urllib.parse.unquote(pq[0]) - else: - env['PATH_INFO'] = urllib.parse.unquote(pq[0], encoding='latin1') - if len(pq) > 1: - env['QUERY_STRING'] = pq[1] - - ct = self.headers.get('content-type') - if ct is None: - try: - ct = self.headers.type - except AttributeError: - ct = self.headers.get_content_type() - env['CONTENT_TYPE'] = ct - - length = self.headers.get('content-length') - if length: - env['CONTENT_LENGTH'] = length - env['SERVER_PROTOCOL'] = 'HTTP/1.0' - - sockname = self.request.getsockname() - server_addr = addr_to_host_port(sockname) - env['SERVER_NAME'] = server_addr[0] - env['SERVER_PORT'] = str(server_addr[1]) - client_addr = addr_to_host_port(self.client_address) - env['REMOTE_ADDR'] = client_addr[0] - env['REMOTE_PORT'] = str(client_addr[1]) - env['GATEWAY_INTERFACE'] = 'CGI/1.1' - - try: - headers = self.headers.headers - except AttributeError: - headers = self.headers._headers - else: - headers = [h.split(':', 1) for h in headers] - - env['headers_raw'] = headers_raw = tuple((k, v.strip(' \t\n\r')) for k, v in headers) - for k, v in headers_raw: - k = k.replace('-', '_').upper() - if k in ('CONTENT_TYPE', 'CONTENT_LENGTH'): - # These do not get the HTTP_ prefix and were handled above - continue - envk = 'HTTP_' + k - if envk in env: - env[envk] += ',' + v - else: - env[envk] = v - - if env.get('HTTP_EXPECT', '').lower() == '100-continue': - wfile = self.wfile - wfile_line = b'HTTP/1.1 100 Continue\r\n' - else: - wfile = None - wfile_line = None - chunked = env.get('HTTP_TRANSFER_ENCODING', '').lower() == 'chunked' - env['wsgi.input'] = env['eventlet.input'] = Input( - self.rfile, length, self.connection, wfile=wfile, wfile_line=wfile_line, - chunked_input=chunked) - env['eventlet.posthooks'] = [] - - return env - - def finish(self): - try: - BaseHTTPServer.BaseHTTPRequestHandler.finish(self) - except socket.error as e: - # Broken pipe, connection reset by peer - if support.get_errno(e) not in BROKEN_SOCK: - raise - greenio.shutdown_safe(self.connection) - self.connection.close() - - def handle_expect_100(self): - return True - - -class Server(BaseHTTPServer.HTTPServer): - - def __init__(self, - socket, - address, - app, - log=None, - environ=None, - max_http_version=None, - protocol=HttpProtocol, - minimum_chunk_size=None, - log_x_forwarded_for=True, - keepalive=True, - log_output=True, - log_format=DEFAULT_LOG_FORMAT, - url_length_limit=MAX_REQUEST_LINE, - debug=True, - socket_timeout=None, - capitalize_response_headers=True): - - self.outstanding_requests = 0 - self.socket = socket - self.address = address - self.log = LoggerNull() - if log_output: - self.log = get_logger(log, debug) - self.app = app - self.keepalive = keepalive - self.environ = environ - self.max_http_version = max_http_version - self.protocol = protocol - self.pid = os.getpid() - self.minimum_chunk_size = minimum_chunk_size - self.log_x_forwarded_for = log_x_forwarded_for - self.log_output = log_output - self.log_format = log_format - self.url_length_limit = url_length_limit - self.debug = debug - self.socket_timeout = socket_timeout - self.capitalize_response_headers = capitalize_response_headers - - if not self.capitalize_response_headers: - warnings.warn("""capitalize_response_headers is disabled. - Please, make sure you know what you are doing. - HTTP headers names are case-insensitive per RFC standard. - Most likely, you need to fix HTTP parsing in your client software.""", - DeprecationWarning, stacklevel=3) - - def get_environ(self): - d = { - 'wsgi.errors': sys.stderr, - 'wsgi.version': (1, 0), - 'wsgi.multithread': True, - 'wsgi.multiprocess': False, - 'wsgi.run_once': False, - 'wsgi.url_scheme': 'http', - } - # detect secure socket - if hasattr(self.socket, 'do_handshake'): - d['wsgi.url_scheme'] = 'https' - d['HTTPS'] = 'on' - if self.environ is not None: - d.update(self.environ) - return d - - def process_request(self, conn_state): - # The actual request handling takes place in __init__, so we need to - # set minimum_chunk_size before __init__ executes and we don't want to modify - # class variable - proto = new(self.protocol) - if self.minimum_chunk_size is not None: - proto.minimum_chunk_size = self.minimum_chunk_size - proto.capitalize_response_headers = self.capitalize_response_headers - try: - proto.__init__(conn_state, self) - except socket.timeout: - # Expected exceptions are not exceptional - conn_state[1].close() - # similar to logging "accepted" in server() - self.log.debug('({0}) timed out {1!r}'.format(self.pid, conn_state[0])) - - def log_message(self, message): - raise AttributeError('''\ -eventlet.wsgi.server.log_message was deprecated and deleted. -Please use server.log.info instead.''') - - -try: - new = types.InstanceType -except AttributeError: - new = lambda cls: cls.__new__(cls) - - -try: - import ssl - ACCEPT_EXCEPTIONS = (socket.error, ssl.SSLError) - ACCEPT_ERRNO = set((errno.EPIPE, errno.EBADF, errno.ECONNRESET, - ssl.SSL_ERROR_EOF, ssl.SSL_ERROR_SSL)) -except ImportError: - ACCEPT_EXCEPTIONS = (socket.error,) - ACCEPT_ERRNO = set((errno.EPIPE, errno.EBADF, errno.ECONNRESET)) - - -def socket_repr(sock): - scheme = 'http' - if hasattr(sock, 'do_handshake'): - scheme = 'https' - - name = sock.getsockname() - if sock.family == socket.AF_INET: - hier_part = '//{0}:{1}'.format(*name) - elif sock.family == socket.AF_INET6: - hier_part = '//[{0}]:{1}'.format(*name[:2]) - elif sock.family == socket.AF_UNIX: - hier_part = name - else: - hier_part = repr(name) - - return scheme + ':' + hier_part - - -def server(sock, site, - log=None, - environ=None, - max_size=None, - max_http_version=DEFAULT_MAX_HTTP_VERSION, - protocol=HttpProtocol, - server_event=None, - minimum_chunk_size=None, - log_x_forwarded_for=True, - custom_pool=None, - keepalive=True, - log_output=True, - log_format=DEFAULT_LOG_FORMAT, - url_length_limit=MAX_REQUEST_LINE, - debug=True, - socket_timeout=None, - capitalize_response_headers=True): - """Start up a WSGI server handling requests from the supplied server - socket. This function loops forever. The *sock* object will be - closed after server exits, but the underlying file descriptor will - remain open, so if you have a dup() of *sock*, it will remain usable. - - .. warning:: - - At the moment :func:`server` will always wait for active connections to finish before - exiting, even if there's an exception raised inside it - (*all* exceptions are handled the same way, including :class:`greenlet.GreenletExit` - and those inheriting from `BaseException`). - - While this may not be an issue normally, when it comes to long running HTTP connections - (like :mod:`eventlet.websocket`) it will become problematic and calling - :meth:`~eventlet.greenthread.GreenThread.wait` on a thread that runs the server may hang, - even after using :meth:`~eventlet.greenthread.GreenThread.kill`, as long - as there are active connections. - - :param sock: Server socket, must be already bound to a port and listening. - :param site: WSGI application function. - :param log: logging.Logger instance or file-like object that logs should be written to. - If a Logger instance is supplied, messages are sent to the INFO log level. - If not specified, sys.stderr is used. - :param environ: Additional parameters that go into the environ dictionary of every request. - :param max_size: Maximum number of client connections opened at any time by this server. - Default is 1024. - :param max_http_version: Set to "HTTP/1.0" to make the server pretend it only supports HTTP 1.0. - This can help with applications or clients that don't behave properly using HTTP 1.1. - :param protocol: Protocol class. Deprecated. - :param server_event: Used to collect the Server object. Deprecated. - :param minimum_chunk_size: Minimum size in bytes for http chunks. This can be used to improve - performance of applications which yield many small strings, though - using it technically violates the WSGI spec. This can be overridden - on a per request basis by setting environ['eventlet.minimum_write_chunk_size']. - :param log_x_forwarded_for: If True (the default), logs the contents of the x-forwarded-for - header in addition to the actual client ip address in the 'client_ip' field of the - log line. - :param custom_pool: A custom GreenPool instance which is used to spawn client green threads. - If this is supplied, max_size is ignored. - :param keepalive: If set to False, disables keepalives on the server; all connections will be - closed after serving one request. - :param log_output: A Boolean indicating if the server will log data or not. - :param log_format: A python format string that is used as the template to generate log lines. - The following values can be formatted into it: client_ip, date_time, request_line, - status_code, body_length, wall_seconds. The default is a good example of how to - use it. - :param url_length_limit: A maximum allowed length of the request url. If exceeded, 414 error - is returned. - :param debug: True if the server should send exception tracebacks to the clients on 500 errors. - If False, the server will respond with empty bodies. - :param socket_timeout: Timeout for client connections' socket operations. Default None means - wait forever. - :param capitalize_response_headers: Normalize response headers' names to Foo-Bar. - Default is True. - """ - serv = Server( - sock, sock.getsockname(), - site, log, - environ=environ, - max_http_version=max_http_version, - protocol=protocol, - minimum_chunk_size=minimum_chunk_size, - log_x_forwarded_for=log_x_forwarded_for, - keepalive=keepalive, - log_output=log_output, - log_format=log_format, - url_length_limit=url_length_limit, - debug=debug, - socket_timeout=socket_timeout, - capitalize_response_headers=capitalize_response_headers, - ) - if server_event is not None: - warnings.warn( - 'eventlet.wsgi.Server() server_event kwarg is deprecated and will be removed soon', - DeprecationWarning, stacklevel=2) - server_event.send(serv) - if max_size is None: - max_size = DEFAULT_MAX_SIMULTANEOUS_REQUESTS - if custom_pool is not None: - pool = custom_pool - else: - pool = eventlet.GreenPool(max_size) - - if not (hasattr(pool, 'spawn') and hasattr(pool, 'waitall')): - raise AttributeError('''\ -eventlet.wsgi.Server pool must provide methods: `spawn`, `waitall`. -If unsure, use eventlet.GreenPool.''') - - # [addr, socket, state] - connections = {} - - def _clean_connection(_, conn): - connections.pop(conn[0], None) - conn[2] = STATE_CLOSE - greenio.shutdown_safe(conn[1]) - conn[1].close() - - try: - serv.log.info('({0}) wsgi starting up on {1}'.format(serv.pid, socket_repr(sock))) - while is_accepting: - try: - client_socket, client_addr = sock.accept() - client_socket.settimeout(serv.socket_timeout) - serv.log.debug('({0}) accepted {1!r}'.format(serv.pid, client_addr)) - connections[client_addr] = connection = [client_addr, client_socket, STATE_IDLE] - (pool.spawn(serv.process_request, connection) - .link(_clean_connection, connection)) - except ACCEPT_EXCEPTIONS as e: - if support.get_errno(e) not in ACCEPT_ERRNO: - raise - except (KeyboardInterrupt, SystemExit): - serv.log.info('wsgi exiting') - break - finally: - for cs in six.itervalues(connections): - prev_state = cs[2] - cs[2] = STATE_CLOSE - if prev_state == STATE_IDLE: - greenio.shutdown_safe(cs[1]) - pool.waitall() - serv.log.info('({0}) wsgi exited, is_accepting={1}'.format(serv.pid, is_accepting)) - try: - # NOTE: It's not clear whether we want this to leave the - # socket open or close it. Use cases like Spawning want - # the underlying fd to remain open, but if we're going - # that far we might as well not bother closing sock at - # all. - sock.close() - except socket.error as e: - if support.get_errno(e) not in BROKEN_SOCK: - traceback.print_exc() diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/__init__.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/__init__.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/__init__.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/__init__.py deleted file mode 100644 index adefd8e..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__all__ = ['ttypes', 'constants'] diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/constants.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/constants.py deleted file mode 100644 index 3e04f77..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.8.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# - -from thrift.Thrift import TType, TMessageType, TException -from ttypes import * - -CLIENT_SEND = "cs" -CLIENT_RECV = "cr" -SERVER_SEND = "ss" -SERVER_RECV = "sr" diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/ttypes.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/ttypes.py deleted file mode 100644 index 418911f..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/_thrift/zipkinCore/ttypes.py +++ /dev/null @@ -1,452 +0,0 @@ -# -# Autogenerated by Thrift Compiler (0.8.0) -# -# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -# -# - -from thrift.Thrift import TType, TMessageType, TException - -from thrift.transport import TTransport -from thrift.protocol import TBinaryProtocol, TProtocol -try: - from thrift.protocol import fastbinary -except: - fastbinary = None - - -class AnnotationType: - BOOL = 0 - BYTES = 1 - I16 = 2 - I32 = 3 - I64 = 4 - DOUBLE = 5 - STRING = 6 - - _VALUES_TO_NAMES = { - 0: "BOOL", - 1: "BYTES", - 2: "I16", - 3: "I32", - 4: "I64", - 5: "DOUBLE", - 6: "STRING", - } - - _NAMES_TO_VALUES = { - "BOOL": 0, - "BYTES": 1, - "I16": 2, - "I32": 3, - "I64": 4, - "DOUBLE": 5, - "STRING": 6, - } - - -class Endpoint: - """ - Attributes: - - ipv4 - - port - - service_name - """ - - thrift_spec = ( - None, # 0 - (1, TType.I32, 'ipv4', None, None, ), # 1 - (2, TType.I16, 'port', None, None, ), # 2 - (3, TType.STRING, 'service_name', None, None, ), # 3 - ) - - def __init__(self, ipv4=None, port=None, service_name=None,): - self.ipv4 = ipv4 - self.port = port - self.service_name = service_name - - def read(self, iprot): - if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: - fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I32: - self.ipv4 = iprot.readI32(); - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I16: - self.port = iprot.readI16(); - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.service_name = iprot.readString(); - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: - oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) - return - oprot.writeStructBegin('Endpoint') - if self.ipv4 is not None: - oprot.writeFieldBegin('ipv4', TType.I32, 1) - oprot.writeI32(self.ipv4) - oprot.writeFieldEnd() - if self.port is not None: - oprot.writeFieldBegin('port', TType.I16, 2) - oprot.writeI16(self.port) - oprot.writeFieldEnd() - if self.service_name is not None: - oprot.writeFieldBegin('service_name', TType.STRING, 3) - oprot.writeString(self.service_name) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.iteritems()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - -class Annotation: - """ - Attributes: - - timestamp - - value - - host - """ - - thrift_spec = ( - None, # 0 - (1, TType.I64, 'timestamp', None, None, ), # 1 - (2, TType.STRING, 'value', None, None, ), # 2 - (3, TType.STRUCT, 'host', (Endpoint, Endpoint.thrift_spec), None, ), # 3 - ) - - def __init__(self, timestamp=None, value=None, host=None,): - self.timestamp = timestamp - self.value = value - self.host = host - - def read(self, iprot): - if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: - fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I64: - self.timestamp = iprot.readI64(); - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.value = iprot.readString(); - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRUCT: - self.host = Endpoint() - self.host.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: - oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) - return - oprot.writeStructBegin('Annotation') - if self.timestamp is not None: - oprot.writeFieldBegin('timestamp', TType.I64, 1) - oprot.writeI64(self.timestamp) - oprot.writeFieldEnd() - if self.value is not None: - oprot.writeFieldBegin('value', TType.STRING, 2) - oprot.writeString(self.value) - oprot.writeFieldEnd() - if self.host is not None: - oprot.writeFieldBegin('host', TType.STRUCT, 3) - self.host.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.iteritems()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - -class BinaryAnnotation: - """ - Attributes: - - key - - value - - annotation_type - - host - """ - - thrift_spec = ( - None, # 0 - (1, TType.STRING, 'key', None, None, ), # 1 - (2, TType.STRING, 'value', None, None, ), # 2 - (3, TType.I32, 'annotation_type', None, None, ), # 3 - (4, TType.STRUCT, 'host', (Endpoint, Endpoint.thrift_spec), None, ), # 4 - ) - - def __init__(self, key=None, value=None, annotation_type=None, host=None,): - self.key = key - self.value = value - self.annotation_type = annotation_type - self.host = host - - def read(self, iprot): - if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: - fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.key = iprot.readString(); - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.STRING: - self.value = iprot.readString(); - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.I32: - self.annotation_type = iprot.readI32(); - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.STRUCT: - self.host = Endpoint() - self.host.read(iprot) - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: - oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) - return - oprot.writeStructBegin('BinaryAnnotation') - if self.key is not None: - oprot.writeFieldBegin('key', TType.STRING, 1) - oprot.writeString(self.key) - oprot.writeFieldEnd() - if self.value is not None: - oprot.writeFieldBegin('value', TType.STRING, 2) - oprot.writeString(self.value) - oprot.writeFieldEnd() - if self.annotation_type is not None: - oprot.writeFieldBegin('annotation_type', TType.I32, 3) - oprot.writeI32(self.annotation_type) - oprot.writeFieldEnd() - if self.host is not None: - oprot.writeFieldBegin('host', TType.STRUCT, 4) - self.host.write(oprot) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.iteritems()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) - -class Span: - """ - Attributes: - - trace_id - - name - - id - - parent_id - - annotations - - binary_annotations - """ - - thrift_spec = ( - None, # 0 - (1, TType.I64, 'trace_id', None, None, ), # 1 - None, # 2 - (3, TType.STRING, 'name', None, None, ), # 3 - (4, TType.I64, 'id', None, None, ), # 4 - (5, TType.I64, 'parent_id', None, None, ), # 5 - (6, TType.LIST, 'annotations', (TType.STRUCT,(Annotation, Annotation.thrift_spec)), None, ), # 6 - None, # 7 - (8, TType.LIST, 'binary_annotations', (TType.STRUCT,(BinaryAnnotation, BinaryAnnotation.thrift_spec)), None, ), # 8 - ) - - def __init__(self, trace_id=None, name=None, id=None, parent_id=None, annotations=None, binary_annotations=None,): - self.trace_id = trace_id - self.name = name - self.id = id - self.parent_id = parent_id - self.annotations = annotations - self.binary_annotations = binary_annotations - - def read(self, iprot): - if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: - fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) - return - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.I64: - self.trace_id = iprot.readI64(); - else: - iprot.skip(ftype) - elif fid == 3: - if ftype == TType.STRING: - self.name = iprot.readString(); - else: - iprot.skip(ftype) - elif fid == 4: - if ftype == TType.I64: - self.id = iprot.readI64(); - else: - iprot.skip(ftype) - elif fid == 5: - if ftype == TType.I64: - self.parent_id = iprot.readI64(); - else: - iprot.skip(ftype) - elif fid == 6: - if ftype == TType.LIST: - self.annotations = [] - (_etype3, _size0) = iprot.readListBegin() - for _i4 in xrange(_size0): - _elem5 = Annotation() - _elem5.read(iprot) - self.annotations.append(_elem5) - iprot.readListEnd() - else: - iprot.skip(ftype) - elif fid == 8: - if ftype == TType.LIST: - self.binary_annotations = [] - (_etype9, _size6) = iprot.readListBegin() - for _i10 in xrange(_size6): - _elem11 = BinaryAnnotation() - _elem11.read(iprot) - self.binary_annotations.append(_elem11) - iprot.readListEnd() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: - oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) - return - oprot.writeStructBegin('Span') - if self.trace_id is not None: - oprot.writeFieldBegin('trace_id', TType.I64, 1) - oprot.writeI64(self.trace_id) - oprot.writeFieldEnd() - if self.name is not None: - oprot.writeFieldBegin('name', TType.STRING, 3) - oprot.writeString(self.name) - oprot.writeFieldEnd() - if self.id is not None: - oprot.writeFieldBegin('id', TType.I64, 4) - oprot.writeI64(self.id) - oprot.writeFieldEnd() - if self.parent_id is not None: - oprot.writeFieldBegin('parent_id', TType.I64, 5) - oprot.writeI64(self.parent_id) - oprot.writeFieldEnd() - if self.annotations is not None: - oprot.writeFieldBegin('annotations', TType.LIST, 6) - oprot.writeListBegin(TType.STRUCT, len(self.annotations)) - for iter12 in self.annotations: - iter12.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - if self.binary_annotations is not None: - oprot.writeFieldBegin('binary_annotations', TType.LIST, 8) - oprot.writeListBegin(TType.STRUCT, len(self.binary_annotations)) - for iter13 in self.binary_annotations: - iter13.write(oprot) - oprot.writeListEnd() - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - def validate(self): - return - - - def __repr__(self): - L = ['%s=%r' % (key, value) - for key, value in self.__dict__.iteritems()] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ - - def __ne__(self, other): - return not (self == other) diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/api.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/api.py deleted file mode 100644 index cd03ec0..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/api.py +++ /dev/null @@ -1,186 +0,0 @@ -import os -import sys -import time -import struct -import socket -import random - -from eventlet.green import threading -from eventlet.zipkin._thrift.zipkinCore import ttypes -from eventlet.zipkin._thrift.zipkinCore.constants import SERVER_SEND - - -client = None -_tls = threading.local() # thread local storage - - -def put_annotation(msg, endpoint=None): - """ This is annotation API. - You can add your own annotation from in your code. - Annotation is recorded with timestamp automatically. - e.g.) put_annotation('cache hit for %s' % request) - - :param msg: String message - :param endpoint: host info - """ - if is_sample(): - a = ZipkinDataBuilder.build_annotation(msg, endpoint) - trace_data = get_trace_data() - trace_data.add_annotation(a) - - -def put_key_value(key, value, endpoint=None): - """ This is binary annotation API. - You can add your own key-value extra information from in your code. - Key-value doesn't have a time component. - e.g.) put_key_value('http.uri', '/hoge/index.html') - - :param key: String - :param value: String - :param endpoint: host info - """ - if is_sample(): - b = ZipkinDataBuilder.build_binary_annotation(key, value, endpoint) - trace_data = get_trace_data() - trace_data.add_binary_annotation(b) - - -def is_tracing(): - """ Return whether the current thread is tracking or not """ - return hasattr(_tls, 'trace_data') - - -def is_sample(): - """ Return whether it should record trace information - for the request or not - """ - return is_tracing() and _tls.trace_data.sampled - - -def get_trace_data(): - if is_tracing(): - return _tls.trace_data - - -def set_trace_data(trace_data): - _tls.trace_data = trace_data - - -def init_trace_data(): - if is_tracing(): - del _tls.trace_data - - -def _uniq_id(): - """ - Create a random 64-bit signed integer appropriate - for use as trace and span IDs. - XXX: By experimentation zipkin has trouble recording traces with ids - larger than (2 ** 56) - 1 - """ - return random.randint(0, (2 ** 56) - 1) - - -def generate_trace_id(): - return _uniq_id() - - -def generate_span_id(): - return _uniq_id() - - -class TraceData(object): - - END_ANNOTATION = SERVER_SEND - - def __init__(self, name, trace_id, span_id, parent_id, sampled, endpoint): - """ - :param name: RPC name (String) - :param trace_id: int - :param span_id: int - :param parent_id: int or None - :param sampled: lets the downstream servers know - if I should record trace data for the request (bool) - :param endpoint: zipkin._thrift.zipkinCore.ttypes.EndPoint - """ - self.name = name - self.trace_id = trace_id - self.span_id = span_id - self.parent_id = parent_id - self.sampled = sampled - self.endpoint = endpoint - self.annotations = [] - self.bannotations = [] - self._done = False - - def add_annotation(self, annotation): - if annotation.host is None: - annotation.host = self.endpoint - if not self._done: - self.annotations.append(annotation) - if annotation.value == self.END_ANNOTATION: - self.flush() - - def add_binary_annotation(self, bannotation): - if bannotation.host is None: - bannotation.host = self.endpoint - if not self._done: - self.bannotations.append(bannotation) - - def flush(self): - span = ZipkinDataBuilder.build_span(name=self.name, - trace_id=self.trace_id, - span_id=self.span_id, - parent_id=self.parent_id, - annotations=self.annotations, - bannotations=self.bannotations) - client.send_to_collector(span) - self.annotations = [] - self.bannotations = [] - self._done = True - - -class ZipkinDataBuilder: - @staticmethod - def build_span(name, trace_id, span_id, parent_id, - annotations, bannotations): - return ttypes.Span( - name=name, - trace_id=trace_id, - id=span_id, - parent_id=parent_id, - annotations=annotations, - binary_annotations=bannotations - ) - - @staticmethod - def build_annotation(value, endpoint=None): - if isinstance(value, unicode): - value = value.encode('utf-8') - return ttypes.Annotation(time.time() * 1000 * 1000, - str(value), endpoint) - - @staticmethod - def build_binary_annotation(key, value, endpoint=None): - annotation_type = ttypes.AnnotationType.STRING - return ttypes.BinaryAnnotation(key, value, annotation_type, endpoint) - - @staticmethod - def build_endpoint(ipv4=None, port=None, service_name=None): - if ipv4 is not None: - ipv4 = ZipkinDataBuilder._ipv4_to_int(ipv4) - if service_name is None: - service_name = ZipkinDataBuilder._get_script_name() - return ttypes.Endpoint( - ipv4=ipv4, - port=port, - service_name=service_name - ) - - @staticmethod - def _ipv4_to_int(ipv4): - return struct.unpack('!i', socket.inet_aton(ipv4))[0] - - @staticmethod - def _get_script_name(): - return os.path.basename(sys.argv[0]) diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/client.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/client.py deleted file mode 100644 index c94070a..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/client.py +++ /dev/null @@ -1,56 +0,0 @@ -import base64 -import warnings - -from scribe import scribe -from thrift.transport import TTransport, TSocket -from thrift.protocol import TBinaryProtocol - -from eventlet import GreenPile - - -CATEGORY = 'zipkin' - - -class ZipkinClient(object): - - def __init__(self, host='127.0.0.1', port=9410): - """ - :param host: zipkin collector IP addoress (default '127.0.0.1') - :param port: zipkin collector port (default 9410) - """ - self.host = host - self.port = port - self.pile = GreenPile(1) - self._connect() - - def _connect(self): - socket = TSocket.TSocket(self.host, self.port) - self.transport = TTransport.TFramedTransport(socket) - protocol = TBinaryProtocol.TBinaryProtocol(self.transport, - False, False) - self.scribe_client = scribe.Client(protocol) - try: - self.transport.open() - except TTransport.TTransportException as e: - warnings.warn(e.message) - - def _build_message(self, thrift_obj): - trans = TTransport.TMemoryBuffer() - protocol = TBinaryProtocol.TBinaryProtocolAccelerated(trans=trans) - thrift_obj.write(protocol) - return base64.b64encode(trans.getvalue()) - - def send_to_collector(self, span): - self.pile.spawn(self._send, span) - - def _send(self, span): - log_entry = scribe.LogEntry(CATEGORY, self._build_message(span)) - try: - self.scribe_client.Log([log_entry]) - except Exception as e: - msg = 'ZipkinClient send error %s' % str(e) - warnings.warn(msg) - self._connect() - - def close(self): - self.transport.close() diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/greenthread.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/greenthread.py deleted file mode 100644 index 37e12d6..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/greenthread.py +++ /dev/null @@ -1,33 +0,0 @@ -from eventlet import greenthread - -from eventlet.zipkin import api - - -__original_init__ = greenthread.GreenThread.__init__ -__original_main__ = greenthread.GreenThread.main - - -def _patched__init(self, parent): - # parent thread saves current TraceData from tls to self - if api.is_tracing(): - self.trace_data = api.get_trace_data() - - __original_init__(self, parent) - - -def _patched_main(self, function, args, kwargs): - # child thread inherits TraceData - if hasattr(self, 'trace_data'): - api.set_trace_data(self.trace_data) - - __original_main__(self, function, args, kwargs) - - -def patch(): - greenthread.GreenThread.__init__ = _patched__init - greenthread.GreenThread.main = _patched_main - - -def unpatch(): - greenthread.GreenThread.__init__ = __original_init__ - greenthread.GreenThread.main = __original_main__ diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/http.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/http.py deleted file mode 100644 index 3ab5925..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/http.py +++ /dev/null @@ -1,61 +0,0 @@ -import warnings - -import six -from eventlet.green import httplib -from eventlet.zipkin import api - - -# see https://twitter.github.io/zipkin/Instrumenting.html -HDR_TRACE_ID = 'X-B3-TraceId' -HDR_SPAN_ID = 'X-B3-SpanId' -HDR_PARENT_SPAN_ID = 'X-B3-ParentSpanId' -HDR_SAMPLED = 'X-B3-Sampled' - - -if six.PY2: - __org_endheaders__ = httplib.HTTPConnection.endheaders - __org_begin__ = httplib.HTTPResponse.begin - - def _patched_endheaders(self): - if api.is_tracing(): - trace_data = api.get_trace_data() - new_span_id = api.generate_span_id() - self.putheader(HDR_TRACE_ID, hex_str(trace_data.trace_id)) - self.putheader(HDR_SPAN_ID, hex_str(new_span_id)) - self.putheader(HDR_PARENT_SPAN_ID, hex_str(trace_data.span_id)) - self.putheader(HDR_SAMPLED, int(trace_data.sampled)) - api.put_annotation('Client Send') - - __org_endheaders__(self) - - def _patched_begin(self): - __org_begin__(self) - - if api.is_tracing(): - api.put_annotation('Client Recv (%s)' % self.status) - - -def patch(): - if six.PY2: - httplib.HTTPConnection.endheaders = _patched_endheaders - httplib.HTTPResponse.begin = _patched_begin - if six.PY3: - warnings.warn("Since current Python thrift release \ - doesn't support Python 3, eventlet.zipkin.http \ - doesn't also support Python 3 (http.client)") - - -def unpatch(): - if six.PY2: - httplib.HTTPConnection.endheaders = __org_endheaders__ - httplib.HTTPResponse.begin = __org_begin__ - if six.PY3: - pass - - -def hex_str(n): - """ - Thrift uses a binary representation of trace and span ids - HTTP headers use a hexadecimal representation of the same - """ - return '%0.16x' % (n,) diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/log.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/log.py deleted file mode 100644 index b7f9d32..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/log.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging - -from eventlet.zipkin import api - - -__original_handle__ = logging.Logger.handle - - -def _patched_handle(self, record): - __original_handle__(self, record) - api.put_annotation(record.getMessage()) - - -def patch(): - logging.Logger.handle = _patched_handle - - -def unpatch(): - logging.Logger.handle = __original_handle__ diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/patcher.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/patcher.py deleted file mode 100644 index 8e7d8ad..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/patcher.py +++ /dev/null @@ -1,41 +0,0 @@ -from eventlet.zipkin import http -from eventlet.zipkin import wsgi -from eventlet.zipkin import greenthread -from eventlet.zipkin import log -from eventlet.zipkin import api -from eventlet.zipkin.client import ZipkinClient - - -def enable_trace_patch(host='127.0.0.1', port=9410, - trace_app_log=False, sampling_rate=1.0): - """ Apply monkey patch to trace your WSGI application. - - :param host: Scribe daemon IP address (default: '127.0.0.1') - :param port: Scribe daemon port (default: 9410) - :param trace_app_log: A Boolean indicating if the tracer will trace - application log together or not. This facility assume that - your application uses python standard logging library. - (default: False) - :param sampling_rate: A Float value (0.0~1.0) that indicates - the tracing frequency. If you specify 1.0, all request - are traced (and sent to Zipkin collecotr). - If you specify 0.1, only 1/10 requests are traced. (default: 1.0) - """ - api.client = ZipkinClient(host, port) - - # monkey patch for adding tracing facility - wsgi.patch(sampling_rate) - http.patch() - greenthread.patch() - - # monkey patch for capturing application log - if trace_app_log: - log.patch() - - -def disable_trace_patch(): - http.unpatch() - wsgi.unpatch() - greenthread.unpatch() - log.unpatch() - api.client.close() diff --git a/venv/lib/python3.6/site-packages/eventlet/zipkin/wsgi.py b/venv/lib/python3.6/site-packages/eventlet/zipkin/wsgi.py deleted file mode 100644 index 3d52911..0000000 --- a/venv/lib/python3.6/site-packages/eventlet/zipkin/wsgi.py +++ /dev/null @@ -1,78 +0,0 @@ -import random - -from eventlet import wsgi -from eventlet.zipkin import api -from eventlet.zipkin._thrift.zipkinCore.constants import \ - SERVER_RECV, SERVER_SEND -from eventlet.zipkin.http import \ - HDR_TRACE_ID, HDR_SPAN_ID, HDR_PARENT_SPAN_ID, HDR_SAMPLED - - -_sampler = None -__original_handle_one_response__ = wsgi.HttpProtocol.handle_one_response - - -def _patched_handle_one_response(self): - api.init_trace_data() - trace_id = int_or_none(self.headers.getheader(HDR_TRACE_ID)) - span_id = int_or_none(self.headers.getheader(HDR_SPAN_ID)) - parent_id = int_or_none(self.headers.getheader(HDR_PARENT_SPAN_ID)) - sampled = bool_or_none(self.headers.getheader(HDR_SAMPLED)) - if trace_id is None: # front-end server - trace_id = span_id = api.generate_trace_id() - parent_id = None - sampled = _sampler.sampling() - ip, port = self.request.getsockname()[:2] - ep = api.ZipkinDataBuilder.build_endpoint(ip, port) - trace_data = api.TraceData(name=self.command, - trace_id=trace_id, - span_id=span_id, - parent_id=parent_id, - sampled=sampled, - endpoint=ep) - api.set_trace_data(trace_data) - api.put_annotation(SERVER_RECV) - api.put_key_value('http.uri', self.path) - - __original_handle_one_response__(self) - - if api.is_sample(): - api.put_annotation(SERVER_SEND) - - -class Sampler(object): - def __init__(self, sampling_rate): - self.sampling_rate = sampling_rate - - def sampling(self): - # avoid generating unneeded random numbers - if self.sampling_rate == 1.0: - return True - r = random.random() - if r < self.sampling_rate: - return True - return False - - -def int_or_none(val): - if val is None: - return None - return int(val, 16) - - -def bool_or_none(val): - if val == '1': - return True - if val == '0': - return False - return None - - -def patch(sampling_rate): - global _sampler - _sampler = Sampler(sampling_rate) - wsgi.HttpProtocol.handle_one_response = _patched_handle_one_response - - -def unpatch(): - wsgi.HttpProtocol.handle_one_response = __original_handle_one_response__ diff --git a/venv/lib/python3.6/site-packages/flask/__init__.py b/venv/lib/python3.6/site-packages/flask/__init__.py deleted file mode 100644 index 687475b..0000000 --- a/venv/lib/python3.6/site-packages/flask/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask - ~~~~~ - - A microframework based on Werkzeug. It's extensively documented - and follows best practice patterns. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -# utilities we import from Werkzeug and Jinja2 that are unused -# in the module but are exported as public interface. -from jinja2 import escape -from jinja2 import Markup -from werkzeug.exceptions import abort -from werkzeug.utils import redirect - -from . import json -from ._compat import json_available -from .app import Flask -from .app import Request -from .app import Response -from .blueprints import Blueprint -from .config import Config -from .ctx import after_this_request -from .ctx import copy_current_request_context -from .ctx import has_app_context -from .ctx import has_request_context -from .globals import _app_ctx_stack -from .globals import _request_ctx_stack -from .globals import current_app -from .globals import g -from .globals import request -from .globals import session -from .helpers import flash -from .helpers import get_flashed_messages -from .helpers import get_template_attribute -from .helpers import make_response -from .helpers import safe_join -from .helpers import send_file -from .helpers import send_from_directory -from .helpers import stream_with_context -from .helpers import url_for -from .json import jsonify -from .signals import appcontext_popped -from .signals import appcontext_pushed -from .signals import appcontext_tearing_down -from .signals import before_render_template -from .signals import got_request_exception -from .signals import message_flashed -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .signals import signals_available -from .signals import template_rendered -from .templating import render_template -from .templating import render_template_string - -__version__ = "1.1.1" diff --git a/venv/lib/python3.6/site-packages/flask/__main__.py b/venv/lib/python3.6/site-packages/flask/__main__.py deleted file mode 100644 index f61dbc0..0000000 --- a/venv/lib/python3.6/site-packages/flask/__main__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.__main__ - ~~~~~~~~~~~~~~ - - Alias for flask.run for the command line. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" - -if __name__ == "__main__": - from .cli import main - - main(as_module=True) diff --git a/venv/lib/python3.6/site-packages/flask/_compat.py b/venv/lib/python3.6/site-packages/flask/_compat.py deleted file mode 100644 index 76c442c..0000000 --- a/venv/lib/python3.6/site-packages/flask/_compat.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask._compat - ~~~~~~~~~~~~~ - - Some py2/py3 compatibility support based on a stripped down - version of six so we don't have to depend on a specific version - of it. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import sys - -PY2 = sys.version_info[0] == 2 -_identity = lambda x: x - -try: # Python 2 - text_type = unicode - string_types = (str, unicode) - integer_types = (int, long) -except NameError: # Python 3 - text_type = str - string_types = (str,) - integer_types = (int,) - -if not PY2: - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - from inspect import getfullargspec as getargspec - from io import StringIO - import collections.abc as collections_abc - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - implements_to_string = _identity - -else: - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - from inspect import getargspec - from cStringIO import StringIO - import collections as collections_abc - - exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode("utf-8") - return cls - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. - class metaclass(type): - def __new__(metacls, name, this_bases, d): - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) - - -# Certain versions of pypy have a bug where clearing the exception stack -# breaks the __exit__ function in a very peculiar way. The second level of -# exception blocks is necessary because pypy seems to forget to check if an -# exception happened until the next bytecode instruction? -# -# Relevant PyPy bugfix commit: -# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301 -# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later -# versions. -# -# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug. -BROKEN_PYPY_CTXMGR_EXIT = False -if hasattr(sys, "pypy_version_info"): - - class _Mgr(object): - def __enter__(self): - return self - - def __exit__(self, *args): - if hasattr(sys, "exc_clear"): - # Python 3 (PyPy3) doesn't have exc_clear - sys.exc_clear() - - try: - try: - with _Mgr(): - raise AssertionError() - except: # noqa: B001 - # We intentionally use a bare except here. See the comment above - # regarding a pypy bug as to why. - raise - except TypeError: - BROKEN_PYPY_CTXMGR_EXIT = True - except AssertionError: - pass - - -try: - from os import fspath -except ImportError: - # Backwards compatibility as proposed in PEP 0519: - # https://www.python.org/dev/peps/pep-0519/#backwards-compatibility - def fspath(path): - return path.__fspath__() if hasattr(path, "__fspath__") else path - - -class _DeprecatedBool(object): - def __init__(self, name, version, value): - self.message = "'{}' is deprecated and will be removed in version {}.".format( - name, version - ) - self.value = value - - def _warn(self): - import warnings - - warnings.warn(self.message, DeprecationWarning, stacklevel=2) - - def __eq__(self, other): - self._warn() - return other == self.value - - def __ne__(self, other): - self._warn() - return other != self.value - - def __bool__(self): - self._warn() - return self.value - - __nonzero__ = __bool__ - - -json_available = _DeprecatedBool("flask.json_available", "2.0.0", True) diff --git a/venv/lib/python3.6/site-packages/flask/app.py b/venv/lib/python3.6/site-packages/flask/app.py deleted file mode 100644 index e596fe5..0000000 --- a/venv/lib/python3.6/site-packages/flask/app.py +++ /dev/null @@ -1,2466 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.app - ~~~~~~~~~ - - This module implements the central WSGI application object. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import os -import sys -import warnings -from datetime import timedelta -from functools import update_wrapper -from itertools import chain -from threading import Lock - -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.exceptions import MethodNotAllowed -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.wrappers import BaseResponse - -from . import cli -from . import json -from ._compat import integer_types -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type -from .config import Config -from .config import ConfigAttribute -from .ctx import _AppCtxGlobals -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _request_ctx_stack -from .globals import g -from .globals import request -from .globals import session -from .helpers import _endpoint_from_view_func -from .helpers import _PackageBoundObject -from .helpers import find_package -from .helpers import get_debug_flag -from .helpers import get_env -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import locked_cached_property -from .helpers import url_for -from .json import jsonify -from .logging import create_logger -from .sessions import SecureCookieSessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import _default_template_ctx_processor -from .templating import DispatchingJinjaLoader -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -def _make_timedelta(value): - if not isinstance(value, timedelta): - return timedelta(seconds=value) - return value - - -def setupmethod(f): - """Wraps a method so that it performs a check in debug mode if the - first request was already handled. - """ - - def wrapper_func(self, *args, **kwargs): - if self.debug and self._got_first_request: - raise AssertionError( - "A setup function was called after the " - "first request was handled. This usually indicates a bug " - "in the application where a module was not imported " - "and decorators or other functionality was called too late.\n" - "To fix this make sure to import all your view modules, " - "database models and everything related at a central place " - "before the application starts serving requests." - ) - return f(self, *args, **kwargs) - - return update_wrapper(wrapper_func, f) - - -class Flask(_PackageBoundObject): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: the folder with static files that should be served - at `static_url_path`. Defaults to the ``'static'`` - folder in the root path of the application. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: Flask by default will automatically calculate the path - to the root of the application. In certain situations - this cannot be achieved (for instance if the package - is a Python 3 namespace package) and needs to be - manually defined. - """ - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class = Response - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute("SECRET_KEY") - - #: The secure cookie uses this for the name of the session cookie. - #: - #: This attribute can also be configured from the config with the - #: ``SESSION_COOKIE_NAME`` configuration key. Defaults to ``'session'`` - session_cookie_name = ConfigAttribute("SESSION_COOKIE_NAME") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute( - "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta - ) - - #: A :class:`~datetime.timedelta` which is used as default cache_timeout - #: for the :func:`send_file` functions. The default is 12 hours. - #: - #: This attribute can also be configured from the config with the - #: ``SEND_FILE_MAX_AGE_DEFAULT`` configuration key. This configuration - #: variable can also be set with an integer value used as seconds. - #: Defaults to ``timedelta(hours=12)`` - send_file_max_age_default = ConfigAttribute( - "SEND_FILE_MAX_AGE_DEFAULT", get_converter=_make_timedelta - ) - - #: Enable this if you want to use the X-Sendfile feature. Keep in - #: mind that the server has to support this. This only affects files - #: sent with the :func:`send_file` method. - #: - #: .. versionadded:: 0.2 - #: - #: This attribute can also be configured from the config with the - #: ``USE_X_SENDFILE`` configuration key. Defaults to ``False``. - use_x_sendfile = ConfigAttribute("USE_X_SENDFILE") - - #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. - #: - #: .. versionadded:: 0.10 - json_encoder = json.JSONEncoder - - #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. - #: - #: .. versionadded:: 0.10 - json_decoder = json.JSONDecoder - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options = {"extensions": ["jinja2.ext.autoescape", "jinja2.ext.with_"]} - - #: Default configuration parameters. - default_config = ImmutableDict( - { - "ENV": None, - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "PRESERVE_CONTEXT_ON_EXCEPTION": None, - "SECRET_KEY": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "SEND_FILE_MAX_AGE_DEFAULT": timedelta(hours=12), - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "JSON_AS_ASCII": True, - "JSON_SORT_KEYS": True, - "JSONIFY_PRETTYPRINT_REGULAR": False, - "JSONIFY_MIMETYPE": "application/json", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - } - ) - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: the test client that is used with when `test_client` is used. - #: - #: .. versionadded:: 0.7 - test_client_class = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class = None - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface = SecureCookieSessionInterface() - - # TODO remove the next three attrs when Sphinx :inherited-members: works - # https://github.com/sphinx-doc/sphinx/issues/741 - - #: The name of the package or module that this app belongs to. Do not - #: change this once it is set by the constructor. - import_name = None - - #: Location of the template files to be added to the template lookup. - #: ``None`` if templates should not be added. - template_folder = None - - #: Absolute path to the package on the filesystem. Used to look up - #: resources contained in the package. - root_path = None - - def __init__( - self, - import_name, - static_url_path=None, - static_folder="static", - static_host=None, - host_matching=False, - subdomain_matching=False, - template_folder="templates", - instance_path=None, - instance_relative_config=False, - root_path=None, - ): - _PackageBoundObject.__init__( - self, import_name, template_folder=template_folder, root_path=root_path - ) - - self.static_url_path = static_url_path - self.static_folder = static_folder - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: A dictionary of all view functions registered. The keys will - #: be function names which are also used to generate URLs and - #: the values are the function objects themselves. - #: To register a view function, use the :meth:`route` decorator. - self.view_functions = {} - - #: A dictionary of all registered error handlers. The key is ``None`` - #: for error handlers active on the application, otherwise the key is - #: the name of the blueprint. Each key points to another dictionary - #: where the key is the status code of the http exception. The - #: special key ``None`` points to a list of tuples where the first item - #: is the class for the instance check and the second the error handler - #: function. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - self.error_handler_spec = {} - - #: A list of functions that are called when :meth:`url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function registered here - #: is called with `error`, `endpoint` and `values`. If a function - #: returns ``None`` or raises a :exc:`BuildError` the next function is - #: tried. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers = [] - - #: A dictionary with lists of functions that will be called at the - #: beginning of each request. The key of the dictionary is the name of - #: the blueprint this function is active for, or ``None`` for all - #: requests. To register a function, use the :meth:`before_request` - #: decorator. - self.before_request_funcs = {} - - #: A list of functions that will be called at the beginning of the - #: first request to this instance. To register a function, use the - #: :meth:`before_first_request` decorator. - #: - #: .. versionadded:: 0.8 - self.before_first_request_funcs = [] - - #: A dictionary with lists of functions that should be called after - #: each request. The key of the dictionary is the name of the blueprint - #: this function is active for, ``None`` for all requests. This can for - #: example be used to close database connections. To register a function - #: here, use the :meth:`after_request` decorator. - self.after_request_funcs = {} - - #: A dictionary with lists of functions that are called after - #: each request, even if an exception has occurred. The key of the - #: dictionary is the name of the blueprint this function is active for, - #: ``None`` for all requests. These functions are not allowed to modify - #: the request, and their return values are ignored. If an exception - #: occurred while processing the request, it gets passed to each - #: teardown_request function. To register a function here, use the - #: :meth:`teardown_request` decorator. - #: - #: .. versionadded:: 0.7 - self.teardown_request_funcs = {} - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs = [] - - #: A dictionary with lists of functions that are called before the - #: :attr:`before_request_funcs` functions. The key of the dictionary is - #: the name of the blueprint this function is active for, or ``None`` - #: for all requests. To register a function, use - #: :meth:`url_value_preprocessor`. - #: - #: .. versionadded:: 0.7 - self.url_value_preprocessors = {} - - #: A dictionary with lists of functions that can be used as URL value - #: preprocessors. The key ``None`` here is used for application wide - #: callbacks, otherwise the key is the name of the blueprint. - #: Each of these functions has the chance to modify the dictionary - #: of URL values before they are used as the keyword arguments of the - #: view function. For each function registered this one should also - #: provide a :meth:`url_defaults` function that adds the parameters - #: automatically again that were removed that way. - #: - #: .. versionadded:: 0.7 - self.url_default_functions = {} - - #: A dictionary with list of functions that are called without argument - #: to populate the template context. The key of the dictionary is the - #: name of the blueprint this function is active for, ``None`` for all - #: requests. Each returns a dictionary that the template context is - #: updated with. To register a function here, use the - #: :meth:`context_processor` decorator. - self.template_context_processors = {None: [_default_template_ctx_processor]} - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors = [] - - #: all the attached blueprints in a dictionary by name. Blueprints - #: can be attached multiple times so this dictionary does not tell - #: you how often they got attached. - #: - #: .. versionadded:: 0.7 - self.blueprints = {} - self._blueprint_order = [] - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. For backwards compatibility extensions should register - #: themselves like this:: - #: - #: if not hasattr(app, 'extensions'): - #: app.extensions = {} - #: app.extensions['extensionname'] = SomeObject() - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class() - - self.url_map.host_matching = host_matching - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - self._before_request_lock = Lock() - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert ( - bool(static_host) == host_matching - ), "Invalid static_host/host_matching combination" - self.add_url_rule( - self.static_url_path + "/", - endpoint="static", - host=static_host, - view_func=self.send_static_file, - ) - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - @locked_cached_property - def name(self): - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @property - def propagate_exceptions(self): - """Returns the value of the ``PROPAGATE_EXCEPTIONS`` configuration - value in case it's set, otherwise a sensible default is returned. - - .. versionadded:: 0.7 - """ - rv = self.config["PROPAGATE_EXCEPTIONS"] - if rv is not None: - return rv - return self.testing or self.debug - - @property - def preserve_context_on_exception(self): - """Returns the value of the ``PRESERVE_CONTEXT_ON_EXCEPTION`` - configuration value in case it's set, otherwise a sensible default - is returned. - - .. versionadded:: 0.7 - """ - rv = self.config["PRESERVE_CONTEXT_ON_EXCEPTION"] - if rv is not None: - return rv - return self.debug - - @locked_cached_property - def logger(self): - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @locked_cached_property - def jinja_env(self): - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - @property - def got_first_request(self): - """This attribute is set to ``True`` if the application started - handling the first request. - - .. versionadded:: 0.8 - """ - return self._got_first_request - - def make_config(self, instance_relative=False): - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["ENV"] = get_env() - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def auto_find_instance_path(self): - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", self.name + "-instance") - - def open_instance_resource(self, resource, mode="rb"): - """Opens a resource from the application's instance folder - (:attr:`instance_path`). Otherwise works like - :meth:`open_resource`. Instance resources can also be opened for - writing. - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - return open(os.path.join(self.instance_path, resource), mode) - - @property - def templates_auto_reload(self): - """Reload templates when they are changed. Used by - :meth:`create_jinja_environment`. - - This attribute can be configured with :data:`TEMPLATES_AUTO_RELOAD`. If - not set, it will be enabled in debug mode. - - .. versionadded:: 1.0 - This property was added but the underlying config and behavior - already existed. - """ - rv = self.config["TEMPLATES_AUTO_RELOAD"] - return rv if rv is not None else self.debug - - @templates_auto_reload.setter - def templates_auto_reload(self, value): - self.config["TEMPLATES_AUTO_RELOAD"] = value - - def create_jinja_environment(self): - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - options["auto_reload"] = self.templates_auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.filters["tojson"] = json.tojson_filter - return rv - - def create_global_jinja_loader(self): - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename): - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml")) - - def update_template_context(self, context): - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - funcs = self.template_context_processors[None] - reqctx = _request_ctx_stack.top - if reqctx is not None: - bp = reqctx.request.blueprint - if bp is not None and bp in self.template_context_processors: - funcs = chain(funcs, self.template_context_processors[bp]) - orig_ctx = context.copy() - for func in funcs: - context.update(func()) - # make sure the original values win. This makes it possible to - # easier add new variables in context processors without breaking - # existing views. - context.update(orig_ctx) - - def make_shell_context(self): - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - #: What environment the app is running in. Flask and extensions may - #: enable behaviors based on the environment, such as enabling debug - #: mode. This maps to the :data:`ENV` config key. This is set by the - #: :envvar:`FLASK_ENV` environment variable and may not behave as - #: expected if set in code. - #: - #: **Do not enable development when deploying in production.** - #: - #: Default: ``'production'`` - env = ConfigAttribute("ENV") - - @property - def debug(self): - """Whether debug mode is enabled. When using ``flask run`` to start - the development server, an interactive debugger will be shown for - unhandled exceptions, and the server will be reloaded when code - changes. This maps to the :data:`DEBUG` config key. This is - enabled when :attr:`env` is ``'development'`` and is overridden - by the ``FLASK_DEBUG`` environment variable. It may not behave as - expected if set in code. - - **Do not enable debug mode when deploying in production.** - - Default: ``True`` if :attr:`env` is ``'development'``, or - ``False`` otherwise. - """ - return self.config["DEBUG"] - - @debug.setter - def debug(self, value): - self.config["DEBUG"] = value - self.jinja_env.auto_reload = self.templates_auto_reload - - def run(self, host=None, port=None, debug=None, load_dotenv=True, **options): - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :ref:`deployment` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - If set, the :envvar:`FLASK_ENV` and :envvar:`FLASK_DEBUG` - environment variables will override :attr:`env` and - :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Change this into a no-op if the server is invoked from the - # command line. Have a look at cli.py for more information. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - from .debughelpers import explain_ignored_app_run - - explain_ignored_app_run() - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, let env vars override previous values - if "FLASK_ENV" in os.environ: - self.env = get_env() - self.debug = get_debug_flag() - elif "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - _host = "127.0.0.1" - _port = 5000 - server_name = self.config.get("SERVER_NAME") - sn_host, sn_port = None, None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - host = host or sn_host or _host - # pick the first value that's not None (0 is allowed) - port = int(next((p for p in (port, sn_port) if p is not None), _port)) - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.env, self.debug, self.name, False) - - from werkzeug.serving import run_simple - - try: - run_simple(host, port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies=True, **kwargs): - """Creates a test client for this application. For information - about unit testing head over to :ref:`testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls(self, self.response_class, use_cookies=use_cookies, **kwargs) - - def test_cli_runner(self, **kwargs): - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) - - def open_session(self, request): - """Creates or opens a new session. Default implementation stores all - session data in a signed cookie. This requires that the - :attr:`secret_key` is set. Instead of overriding this method - we recommend replacing the :class:`session_interface`. - - .. deprecated: 1.0 - Will be removed in 1.1. Use ``session_interface.open_session`` - instead. - - :param request: an instance of :attr:`request_class`. - """ - - warnings.warn( - DeprecationWarning( - '"open_session" is deprecated and will be removed in 1.1. Use' - ' "session_interface.open_session" instead.' - ) - ) - return self.session_interface.open_session(self, request) - - def save_session(self, session, response): - """Saves the session if it needs updates. For the default - implementation, check :meth:`open_session`. Instead of overriding this - method we recommend replacing the :class:`session_interface`. - - .. deprecated: 1.0 - Will be removed in 1.1. Use ``session_interface.save_session`` - instead. - - :param session: the session to be saved (a - :class:`~werkzeug.contrib.securecookie.SecureCookie` - object) - :param response: an instance of :attr:`response_class` - """ - - warnings.warn( - DeprecationWarning( - '"save_session" is deprecated and will be removed in 1.1. Use' - ' "session_interface.save_session" instead.' - ) - ) - return self.session_interface.save_session(self, session, response) - - def make_null_session(self): - """Creates a new instance of a missing session. Instead of overriding - this method we recommend replacing the :class:`session_interface`. - - .. deprecated: 1.0 - Will be removed in 1.1. Use ``session_interface.make_null_session`` - instead. - - .. versionadded:: 0.7 - """ - - warnings.warn( - DeprecationWarning( - '"make_null_session" is deprecated and will be removed in 1.1. Use' - ' "session_interface.make_null_session" instead.' - ) - ) - return self.session_interface.make_null_session(self) - - @setupmethod - def register_blueprint(self, blueprint, **options): - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionadded:: 0.7 - """ - first_registration = False - - if blueprint.name in self.blueprints: - assert self.blueprints[blueprint.name] is blueprint, ( - "A name collision occurred between blueprints %r and %r. Both" - ' share the same name "%s". Blueprints that are created on the' - " fly need unique names." - % (blueprint, self.blueprints[blueprint.name], blueprint.name) - ) - else: - self.blueprints[blueprint.name] = blueprint - self._blueprint_order.append(blueprint) - first_registration = True - - blueprint.register(self, options, first_registration) - - def iter_blueprints(self): - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return iter(self._blueprint_order) - - @setupmethod - def add_url_rule( - self, - rule, - endpoint=None, - view_func=None, - provide_automatic_options=None, - **options - ): - """Connects a URL rule. Works exactly like the :meth:`route` - decorator. If a view_func is provided it will be registered with the - endpoint. - - Basically this example:: - - @app.route('/') - def index(): - pass - - Is equivalent to the following:: - - def index(): - pass - app.add_url_rule('/', 'index', index) - - If the view_func is not provided you will need to connect the endpoint - to a view function like so:: - - app.view_functions['index'] = index - - Internally :meth:`route` invokes :meth:`add_url_rule` so if you want - to customize the behavior via subclassing you only need to change - this method. - - For more information refer to :ref:`url-route-registrations`. - - .. versionchanged:: 0.2 - `view_func` parameter added. - - .. versionchanged:: 0.6 - ``OPTIONS`` is added automatically as method. - - :param rule: the URL rule as string - :param endpoint: the endpoint for the registered URL rule. Flask - itself assumes the name of the view function as - endpoint - :param view_func: the function to call when serving a request to the - provided endpoint - :param provide_automatic_options: controls whether the ``OPTIONS`` - method should be added automatically. This can also be controlled - by setting the ``view_func.provide_automatic_options = False`` - before adding the rule. - :param options: the options to be forwarded to the underlying - :class:`~werkzeug.routing.Rule` object. A change - to Werkzeug is handling of method options. methods - is a list of methods this rule should be limited - to (``GET``, ``POST`` etc.). By default a rule - just listens for ``GET`` (and implicitly ``HEAD``). - Starting with Flask 0.6, ``OPTIONS`` is implicitly - added and handled by the standard request handling. - """ - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, string_types): - raise TypeError( - "Allowed methods have to be iterables of strings, " - 'for example: @app.route(..., methods=["POST"])' - ) - methods = set(item.upper() for item in methods) - - # Methods that should always be added - required_methods = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule = self.url_rule_class(rule, methods=methods, **options) - rule.provide_automatic_options = provide_automatic_options - - self.url_map.add(rule) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an " - "existing endpoint function: %s" % endpoint - ) - self.view_functions[endpoint] = view_func - - def route(self, rule, **options): - """A decorator that is used to register a view function for a - given URL rule. This does the same thing as :meth:`add_url_rule` - but is intended for decorator usage:: - - @app.route('/') - def index(): - return 'Hello World' - - For more information refer to :ref:`url-route-registrations`. - - :param rule: the URL rule as string - :param endpoint: the endpoint for the registered URL rule. Flask - itself assumes the name of the view function as - endpoint - :param options: the options to be forwarded to the underlying - :class:`~werkzeug.routing.Rule` object. A change - to Werkzeug is handling of method options. methods - is a list of methods this rule should be limited - to (``GET``, ``POST`` etc.). By default a rule - just listens for ``GET`` (and implicitly ``HEAD``). - Starting with Flask 0.6, ``OPTIONS`` is implicitly - added and handled by the standard request handling. - """ - - def decorator(f): - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def endpoint(self, endpoint): - """A decorator to register a function as an endpoint. - Example:: - - @app.endpoint('example.endpoint') - def example(): - return "example" - - :param endpoint: the name of the endpoint - """ - - def decorator(f): - self.view_functions[endpoint] = f - return f - - return decorator - - @staticmethod - def _get_exc_class_and_code(exc_class_or_code): - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - if isinstance(exc_class_or_code, integer_types): - exc_class = default_exceptions[exc_class_or_code] - else: - exc_class = exc_class_or_code - - assert issubclass(exc_class, Exception) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - @setupmethod - def errorhandler(self, code_or_exception): - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f): - self._register_error_handler(None, code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler(self, code_or_exception, f): - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - self._register_error_handler(None, code_or_exception, f) - - @setupmethod - def _register_error_handler(self, key, code_or_exception, f): - """ - :type key: None|str - :type code_or_exception: int|T<=Exception - :type f: callable - """ - if isinstance(code_or_exception, HTTPException): # old broken behavior - raise ValueError( - "Tried to register a handler for an exception instance {0!r}." - " Handlers can only be registered for exception classes or" - " HTTP error codes.".format(code_or_exception) - ) - - try: - exc_class, code = self._get_exc_class_and_code(code_or_exception) - except KeyError: - raise KeyError( - "'{0}' is not a recognized HTTP error code. Use a subclass of" - " HTTPException with that code instead.".format(code_or_exception) - ) - - handlers = self.error_handler_spec.setdefault(key, {}).setdefault(code, {}) - handlers[exc_class] = f - - @setupmethod - def template_filter(self, name=None): - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter(self, f, name=None): - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test(self, name=None): - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test(self, f, name=None): - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global(self, name=None): - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global(self, f, name=None): - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def before_request(self, f): - """Registers a function to run before each request. - - For example, this can be used to open a database connection, or to load - the logged in user from the session. - - The function will be called without any arguments. If it returns a - non-None value, the value is handled as if it was the return value from - the view, and further request handling is stopped. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def before_first_request(self, f): - """Registers a function to be run before the first request to this - instance of the application. - - The function will be called without any arguments and its return - value is ignored. - - .. versionadded:: 0.8 - """ - self.before_first_request_funcs.append(f) - return f - - @setupmethod - def after_request(self, f): - """Register a function to be run after each request. - - Your function must take one parameter, an instance of - :attr:`response_class` and return a new response object or the - same (see :meth:`process_response`). - - As of Flask 0.7 this function might not be executed at the end of the - request in case an unhandled exception occurred. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f): - """Register a function to be run at the end of each request, - regardless of whether there was an exception or not. These functions - are executed when the request context is popped, even if not an - actual request was performed. - - Example:: - - ctx = app.test_request_context() - ctx.push() - ... - ctx.pop() - - When ``ctx.pop()`` is executed in the above example, the teardown - functions are called just before the request context moves from the - stack of active contexts. This becomes relevant if you are using - such constructs in tests. - - Generally teardown functions must take every necessary step to avoid - that they will fail. If they do execute code that might fail they - will have to surround the execution of these code by try/except - statements and log occurring errors. - - When a teardown function was called because of an exception it will - be passed an error object. - - The return values of teardown functions are ignored. - - .. admonition:: Debug Note - - In debug mode Flask will not tear down a request on an exception - immediately. Instead it will keep it alive so that the interactive - debugger can still access it. This behavior can be controlled - by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_appcontext(self, f): - """Registers a function to be called when the application context - ends. These functions are typically also called when the request - context is popped. - - Example:: - - ctx = app.app_context() - ctx.push() - ... - ctx.pop() - - When ``ctx.pop()`` is executed in the above example, the teardown - functions are called just before the app context moves from the - stack of active contexts. This becomes relevant if you are using - such constructs in tests. - - Since a request context typically also manages an application - context it would also be called when you pop a request context. - - When a teardown function was called because of an unhandled exception - it will be passed an error object. If an :meth:`errorhandler` is - registered, it will handle the exception and the teardown will not - receive it. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def context_processor(self, f): - """Registers a template context processor function.""" - self.template_context_processors[None].append(f) - return f - - @setupmethod - def shell_context_processor(self, f): - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - @setupmethod - def url_value_preprocessor(self, f): - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - """ - self.url_value_preprocessors.setdefault(None, []).append(f) - return f - - @setupmethod - def url_defaults(self, f): - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - """ - self.url_default_functions.setdefault(None, []).append(f) - return f - - def _find_error_handler(self, e): - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - - for name, c in ( - (request.blueprint, code), - (None, code), - (request.blueprint, None), - (None, None), - ): - handler_map = self.error_handler_spec.setdefault(name, {}).get(c) - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - - def handle_http_exception(self, e): - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPExcpetion`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e) - if handler is None: - return e - return handler(e) - - def trap_http_exception(self, e): - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def handle_user_exception(self, e): - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - exc_type, exc_value, tb = sys.exc_info() - assert exc_value is e - # ensure not to trash sys.exc_info() at that point in case someone - # wants the traceback preserved in handle_http_exception. Of course - # we cannot prevent users from trashing it themselves in a custom - # trap_http_exception method so that's their fault then. - - if isinstance(e, BadRequestKeyError): - if self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"]: - e.show_exception = True - - # Werkzeug < 0.15 doesn't add the KeyError to the 400 - # message, add it in manually. - # TODO: clean up once Werkzeug >= 0.15.5 is required - if e.args[0] not in e.get_description(): - e.description = "KeyError: '{}'".format(*e.args) - elif not hasattr(BadRequestKeyError, "show_exception"): - e.args = () - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e) - - if handler is None: - reraise(exc_type, exc_value, tb) - return handler(e) - - def handle_exception(self, e): - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :attr:`propagate_exceptions` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. note:: - Prior to Werkzeug 1.0.0, ``InternalServerError`` will not - always have an ``original_exception`` attribute. Use - ``getattr(e, "original_exception", None)`` to simulate the - behavior for compatibility. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_type, exc_value, tb = sys.exc_info() - got_request_exception.send(self, exception=e) - - if self.propagate_exceptions: - # if we want to repropagate the exception, we can attempt to - # raise it with the whole traceback in case we can do that - # (the function was actually called from the except part) - # otherwise, we just raise the error again - if exc_value is e: - reraise(exc_type, exc_value, tb) - else: - raise e - - self.log_exception((exc_type, exc_value, tb)) - server_error = InternalServerError() - # TODO: pass as param when Werkzeug>=1.0.0 is required - # TODO: also remove note about this from docstring and docs - server_error.original_exception = e - handler = self._find_error_handler(server_error) - - if handler is not None: - server_error = handler(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception(self, exc_info): - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - "Exception on %s [%s]" % (request.path, request.method), exc_info=exc_info - ) - - def raise_routing_exception(self, request): - """Exceptions that are recording during routing are reraised with - this method. During debug we are not reraising redirect requests - for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising - a different error instead to help debug situations. - - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.method in ("GET", "HEAD", "OPTIONS") - ): - raise request.routing_exception - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def dispatch_request(self): - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = _request_ctx_stack.top.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule = req.url_rule - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - return self.view_functions[rule.endpoint](**req.view_args) - - def full_dispatch_request(self): - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self.try_trigger_before_first_request_functions() - try: - request_started.send(self) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request(self, rv, from_error_handler=False): - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send(self, response=response) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def try_trigger_before_first_request_functions(self): - """Called before each request and will ensure that it triggers - the :attr:`before_first_request_funcs` and only exactly once per - application instance (which means process usually). - - :internal: - """ - if self._got_first_request: - return - with self._before_request_lock: - if self._got_first_request: - return - for func in self.before_first_request_funcs: - func() - self._got_first_request = True - - def make_default_options_response(self): - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = _request_ctx_stack.top.url_adapter - if hasattr(adapter, "allowed_methods"): - methods = adapter.allowed_methods() - else: - # fallback for Werkzeug < 0.7 - methods = [] - try: - adapter.match(method="--") - except MethodNotAllowed as e: - methods = e.valid_methods - except HTTPException: - pass - rv = self.response_class() - rv.allow.update(methods) - return rv - - def should_ignore_error(self, error): - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def make_response(self, rv): - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` (``unicode`` in Python 2) - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` (``str`` in Python 2) - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status = headers = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv - else: - rv, status = rv - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - "The view function did not return a valid response. The" - " function either returned None or ended without a return" - " statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (text_type, bytes, bytearray)): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class(rv, status=status, headers=headers) - status = headers = None - elif isinstance(rv, dict): - rv = jsonify(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type(rv, request.environ) - except TypeError as e: - new_error = TypeError( - "{e}\nThe view function did not return a valid" - " response. The return type must be a string, dict, tuple," - " Response instance, or WSGI callable, but it was a" - " {rv.__class__.__name__}.".format(e=e, rv=rv) - ) - reraise(TypeError, new_error, sys.exc_info()[2]) - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string, dict, tuple," - " Response instance, or WSGI callable, but it was a" - " {rv.__class__.__name__}.".format(rv=rv) - ) - - # prefer the status if it was provided - if status is not None: - if isinstance(status, (text_type, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.extend(headers) - - return rv - - def create_url_adapter(self, request): - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionadded:: 0.6 - - .. versionchanged:: 0.9 - This can now also be called without a request object when the - URL adapter is created for the application context. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - """ - if request is not None: - # If subdomain matching is disabled (the default), use the - # default subdomain in all cases. This should be the default - # in Werkzeug but it currently does not have that feature. - subdomain = ( - (self.url_map.default_subdomain or None) - if not self.subdomain_matching - else None - ) - return self.url_map.bind_to_environ( - request.environ, - server_name=self.config["SERVER_NAME"], - subdomain=subdomain, - ) - # We need at the very least the server name to be set for this - # to work. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - def inject_url_defaults(self, endpoint, values): - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - funcs = self.url_default_functions.get(None, ()) - if "." in endpoint: - bp = endpoint.rsplit(".", 1)[0] - funcs = chain(funcs, self.url_default_functions.get(bp, ())) - for func in funcs: - func(endpoint, values) - - def handle_url_build_error(self, error, endpoint, values): - """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. - """ - exc_type, exc_value, tb = sys.exc_info() - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - if rv is not None: - return rv - except BuildError as e: - # make error available outside except block (py3) - error = e - - # At this point we want to reraise the exception. If the error is - # still the same one we can reraise it with the original traceback, - # otherwise we raise it from here. - if error is exc_value: - reraise(exc_type, exc_value, tb) - raise error - - def preprocess_request(self): - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - - bp = _request_ctx_stack.top.request.blueprint - - funcs = self.url_value_preprocessors.get(None, ()) - if bp is not None and bp in self.url_value_preprocessors: - funcs = chain(funcs, self.url_value_preprocessors[bp]) - for func in funcs: - func(request.endpoint, request.view_args) - - funcs = self.before_request_funcs.get(None, ()) - if bp is not None and bp in self.before_request_funcs: - funcs = chain(funcs, self.before_request_funcs[bp]) - for func in funcs: - rv = func() - if rv is not None: - return rv - - def process_response(self, response): - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = _request_ctx_stack.top - bp = ctx.request.blueprint - funcs = ctx._after_request_functions - if bp is not None and bp in self.after_request_funcs: - funcs = chain(funcs, reversed(self.after_request_funcs[bp])) - if None in self.after_request_funcs: - funcs = chain(funcs, reversed(self.after_request_funcs[None])) - for handler in funcs: - response = handler(response) - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - return response - - def do_teardown_request(self, exc=_sentinel): - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - funcs = reversed(self.teardown_request_funcs.get(None, ())) - bp = _request_ctx_stack.top.request.blueprint - if bp is not None and bp in self.teardown_request_funcs: - funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) - for func in funcs: - func(exc) - request_tearing_down.send(self, exc=exc) - - def do_teardown_appcontext(self, exc=_sentinel): - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - for func in reversed(self.teardown_appcontext_funcs): - func(exc) - appcontext_tearing_down.send(self, exc=exc) - - def app_context(self): - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ): - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args, **kwargs): - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app(self, environ, start_response): - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if self.should_ignore_error(error): - error = None - ctx.auto_pop(error) - - def __call__(self, environ, start_response): - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app` which can be - wrapped to applying middleware.""" - return self.wsgi_app(environ, start_response) - - def __repr__(self): - return "<%s %r>" % (self.__class__.__name__, self.name) diff --git a/venv/lib/python3.6/site-packages/flask/blueprints.py b/venv/lib/python3.6/site-packages/flask/blueprints.py deleted file mode 100644 index 8978104..0000000 --- a/venv/lib/python3.6/site-packages/flask/blueprints.py +++ /dev/null @@ -1,569 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.blueprints - ~~~~~~~~~~~~~~~~ - - Blueprints are the recommended way to implement larger or more - pluggable applications in Flask 0.7 and later. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -from functools import update_wrapper - -from .helpers import _endpoint_from_view_func -from .helpers import _PackageBoundObject - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class BlueprintSetupState(object): - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__(self, blueprint, app, options, first_registration): - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule(self, rule, endpoint=None, view_func=None, **options): - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - self.app.add_url_rule( - rule, - "%s.%s" % (self.blueprint.name, endpoint), - view_func, - defaults=defaults, - **options - ) - - -class Blueprint(_PackageBoundObject): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :ref:`blueprints` for more information. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically this - based on ``import_name``. In certain situations this automatic - detection can fail, so the path can be specified manually - instead. - """ - - warn_on_modifications = False - _got_registered_once = False - - #: Blueprint local JSON decoder class to use. - #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`. - json_encoder = None - #: Blueprint local JSON decoder class to use. - #: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`. - json_decoder = None - - # TODO remove the next three attrs when Sphinx :inherited-members: works - # https://github.com/sphinx-doc/sphinx/issues/741 - - #: The name of the package or module that this app belongs to. Do not - #: change this once it is set by the constructor. - import_name = None - - #: Location of the template files to be added to the template lookup. - #: ``None`` if templates should not be added. - template_folder = None - - #: Absolute path to the package on the filesystem. Used to look up - #: resources contained in the package. - root_path = None - - def __init__( - self, - name, - import_name, - static_folder=None, - static_url_path=None, - template_folder=None, - url_prefix=None, - subdomain=None, - url_defaults=None, - root_path=None, - cli_group=_sentinel, - ): - _PackageBoundObject.__init__( - self, import_name, template_folder, root_path=root_path - ) - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.static_folder = static_folder - self.static_url_path = static_url_path - self.deferred_functions = [] - if url_defaults is None: - url_defaults = {} - self.url_values_defaults = url_defaults - self.cli_group = cli_group - - def record(self, func): - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - if self._got_registered_once and self.warn_on_modifications: - from warnings import warn - - warn( - Warning( - "The blueprint was already registered once " - "but is getting modified now. These changes " - "will not show up." - ) - ) - self.deferred_functions.append(func) - - def record_once(self, func): - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state): - if state.first_registration: - func(state) - - return self.record(update_wrapper(wrapper, func)) - - def make_setup_state(self, app, options, first_registration=False): - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - def register(self, app, options, first_registration=False): - """Called by :meth:`Flask.register_blueprint` to register all views - and callbacks registered on the blueprint with the application. Creates - a :class:`.BlueprintSetupState` and calls each :meth:`record` callback - with it. - - :param app: The application this blueprint is being registered with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - :param first_registration: Whether this is the first time this - blueprint has been registered on the application. - """ - self._got_registered_once = True - state = self.make_setup_state(app, options, first_registration) - - if self.has_static_folder: - state.add_url_rule( - self.static_url_path + "/", - view_func=self.send_static_file, - endpoint="static", - ) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if not self.cli.commands: - return - - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = self.name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - def route(self, rule, **options): - """Like :meth:`Flask.route` but for a blueprint. The endpoint for the - :func:`url_for` function is prefixed with the name of the blueprint. - """ - - def decorator(f): - endpoint = options.pop("endpoint", f.__name__) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - def add_url_rule(self, rule, endpoint=None, view_func=None, **options): - """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for - the :func:`url_for` function is prefixed with the name of the blueprint. - """ - if endpoint: - assert "." not in endpoint, "Blueprint endpoints should not contain dots" - if view_func and hasattr(view_func, "__name__"): - assert ( - "." not in view_func.__name__ - ), "Blueprint view function name should not contain dots" - self.record(lambda s: s.add_url_rule(rule, endpoint, view_func, **options)) - - def endpoint(self, endpoint): - """Like :meth:`Flask.endpoint` but for a blueprint. This does not - prefix the endpoint with the blueprint name, this has to be done - explicitly by the user of this method. If the endpoint is prefixed - with a `.` it will be registered to the current blueprint, otherwise - it's an application independent endpoint. - """ - - def decorator(f): - def register_endpoint(state): - state.app.view_functions[endpoint] = f - - self.record_once(register_endpoint) - return f - - return decorator - - def app_template_filter(self, name=None): - """Register a custom template filter, available application wide. Like - :meth:`Flask.template_filter` but for a blueprint. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_app_template_filter(f, name=name) - return f - - return decorator - - def add_app_template_filter(self, f, name=None): - """Register a custom template filter, available application wide. Like - :meth:`Flask.add_template_filter` but for a blueprint. Works exactly - like the :meth:`app_template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state): - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - def app_template_test(self, name=None): - """Register a custom template test, available application wide. Like - :meth:`Flask.template_test` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_app_template_test(f, name=name) - return f - - return decorator - - def add_app_template_test(self, f, name=None): - """Register a custom template test, available application wide. Like - :meth:`Flask.add_template_test` but for a blueprint. Works exactly - like the :meth:`app_template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state): - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - def app_template_global(self, name=None): - """Register a custom template global, available application wide. Like - :meth:`Flask.template_global` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f): - self.add_app_template_global(f, name=name) - return f - - return decorator - - def add_app_template_global(self, f, name=None): - """Register a custom template global, available application wide. Like - :meth:`Flask.add_template_global` but for a blueprint. Works exactly - like the :meth:`app_template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state): - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - def before_request(self, f): - """Like :meth:`Flask.before_request` but for a blueprint. This function - is only executed before each request that is handled by a function of - that blueprint. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(self.name, []).append(f) - ) - return f - - def before_app_request(self, f): - """Like :meth:`Flask.before_request`. Such a function is executed - before each request, even if outside of a blueprint. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - def before_app_first_request(self, f): - """Like :meth:`Flask.before_first_request`. Such a function is - executed before the first request to the application. - """ - self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) - return f - - def after_request(self, f): - """Like :meth:`Flask.after_request` but for a blueprint. This function - is only executed after each request that is handled by a function of - that blueprint. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(self.name, []).append(f) - ) - return f - - def after_app_request(self, f): - """Like :meth:`Flask.after_request` but for a blueprint. Such a function - is executed after each request, even if outside of the blueprint. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - def teardown_request(self, f): - """Like :meth:`Flask.teardown_request` but for a blueprint. This - function is only executed when tearing down requests handled by a - function of that blueprint. Teardown request functions are executed - when the request context is popped, even when no actual request was - performed. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(self.name, []).append(f) - ) - return f - - def teardown_app_request(self, f): - """Like :meth:`Flask.teardown_request` but for a blueprint. Such a - function is executed when tearing down each request, even if outside of - the blueprint. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - def context_processor(self, f): - """Like :meth:`Flask.context_processor` but for a blueprint. This - function is only executed for requests handled by a blueprint. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault( - self.name, [] - ).append(f) - ) - return f - - def app_context_processor(self, f): - """Like :meth:`Flask.context_processor` but for a blueprint. Such a - function is executed each request, even if outside of the blueprint. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - def app_errorhandler(self, code): - """Like :meth:`Flask.errorhandler` but for a blueprint. This - handler is used for all requests, even if outside of the blueprint. - """ - - def decorator(f): - self.record_once(lambda s: s.app.errorhandler(code)(f)) - return f - - return decorator - - def url_value_preprocessor(self, f): - """Registers a function as URL value preprocessor for this - blueprint. It's called before the view functions are called and - can modify the url values provided. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(self.name, []).append(f) - ) - return f - - def url_defaults(self, f): - """Callback function for URL defaults for this blueprint. It's called - with the endpoint and values and should update the values passed - in place. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(self.name, []).append(f) - ) - return f - - def app_url_value_preprocessor(self, f): - """Same as :meth:`url_value_preprocessor` but application wide. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - def app_url_defaults(self, f): - """Same as :meth:`url_defaults` but application wide. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f - - def errorhandler(self, code_or_exception): - """Registers an error handler that becomes active for this blueprint - only. Please be aware that routing does not happen local to a - blueprint so an error handler for 404 usually is not handled by - a blueprint unless it is caused inside a view function. Another - special case is the 500 internal server error which is always looked - up from the application. - - Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator - of the :class:`~flask.Flask` object. - """ - - def decorator(f): - self.record_once( - lambda s: s.app._register_error_handler(self.name, code_or_exception, f) - ) - return f - - return decorator - - def register_error_handler(self, code_or_exception, f): - """Non-decorator version of the :meth:`errorhandler` error attach - function, akin to the :meth:`~flask.Flask.register_error_handler` - application-wide function of the :class:`~flask.Flask` object but - for error handlers limited to this blueprint. - - .. versionadded:: 0.11 - """ - self.record_once( - lambda s: s.app._register_error_handler(self.name, code_or_exception, f) - ) diff --git a/venv/lib/python3.6/site-packages/flask/cli.py b/venv/lib/python3.6/site-packages/flask/cli.py deleted file mode 100644 index 1158545..0000000 --- a/venv/lib/python3.6/site-packages/flask/cli.py +++ /dev/null @@ -1,970 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.cli - ~~~~~~~~~ - - A simple command line application to run flask apps. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -from __future__ import print_function - -import ast -import inspect -import os -import platform -import re -import sys -import traceback -from functools import update_wrapper -from operator import attrgetter -from threading import Lock -from threading import Thread - -import click -from werkzeug.utils import import_string - -from ._compat import getargspec -from ._compat import itervalues -from ._compat import reraise -from ._compat import text_type -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_env -from .helpers import get_load_dotenv - -try: - import dotenv -except ImportError: - dotenv = None - -try: - import ssl -except ImportError: - ssl = None - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(script_info, module): - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in itervalues(module.__dict__) if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - 'Detected multiple Flask applications in module "{module}". Use ' - '"FLASK_APP={module}:name" to specify the correct ' - "one.".format(module=module.__name__) - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = call_factory(script_info, app_factory) - - if isinstance(app, Flask): - return app - except TypeError: - if not _called_with_wrong_args(app_factory): - raise - raise NoAppException( - 'Detected factory "{factory}" in module "{module}", but ' - "could not call it without arguments. Use " - "\"FLASK_APP='{module}:{factory}(args)'\" to specify " - "arguments.".format(factory=attr_name, module=module.__name__) - ) - - raise NoAppException( - 'Failed to find Flask application or factory in module "{module}". ' - 'Use "FLASK_APP={module}:name to specify one.'.format(module=module.__name__) - ) - - -def call_factory(script_info, app_factory, arguments=()): - """Takes an app factory, a ``script_info` object and optionally a tuple - of arguments. Checks for the existence of a script_info argument and calls - the app_factory depending on that and the arguments provided. - """ - args_spec = getargspec(app_factory) - arg_names = args_spec.args - arg_defaults = args_spec.defaults - - if "script_info" in arg_names: - return app_factory(*arguments, script_info=script_info) - elif arguments: - return app_factory(*arguments) - elif not arguments and len(arg_names) == 1 and arg_defaults is None: - return app_factory(script_info) - - return app_factory() - - -def _called_with_wrong_args(factory): - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param factory: the factory function that was called - :return: true if the call failed - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is factory.__code__: - # in the factory, it was called successfully - return False - - tb = tb.tb_next - - # didn't reach the factory - return True - finally: - # explicitly delete tb as it is circular referenced - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(script_info, module, app_name): - """Checks if the given string is a variable name or a function. If it is a - function, it checks for specified arguments and whether it takes a - ``script_info`` argument and calls the function with the appropriate - arguments. - """ - from . import Flask - - match = re.match(r"^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$", app_name) - - if not match: - raise NoAppException( - '"{name}" is not a valid variable name or function ' - "expression.".format(name=app_name) - ) - - name, args = match.groups() - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException(e.args[0]) - - if inspect.isfunction(attr): - if args: - try: - args = ast.literal_eval("({args},)".format(args=args)) - except (ValueError, SyntaxError) as e: - raise NoAppException( - "Could not parse the arguments in " - '"{app_name}".'.format(e=e, app_name=app_name) - ) - else: - args = () - - try: - app = call_factory(script_info, attr, args) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - '{e}\nThe factory "{app_name}" in module "{module}" could not ' - "be called with the specified arguments.".format( - e=e, app_name=app_name, module=module.__name__ - ) - ) - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from " - '"{module}:{app_name}".'.format(module=module.__name__, app_name=app_name) - ) - - -def prepare_import(path): - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -def locate_app(script_info, module_name, app_name, raise_if_not_found=True): - __traceback_hide__ = True # noqa: F841 - - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[-1].tb_next: - raise NoAppException( - 'While importing "{name}", an ImportError was raised:' - "\n\n{tb}".format(name=module_name, tb=traceback.format_exc()) - ) - elif raise_if_not_found: - raise NoAppException('Could not import "{name}".'.format(name=module_name)) - else: - return - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(script_info, module) - else: - return find_app_by_string(script_info, module, app_name) - - -def get_version(ctx, param, value): - if not value or ctx.resilient_parsing: - return - - import werkzeug - from . import __version__ - - message = "Python %(python)s\nFlask %(flask)s\nWerkzeug %(werkzeug)s" - click.echo( - message - % { - "python": platform.python_version(), - "flask": __version__, - "werkzeug": werkzeug.__version__, - }, - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the flask version", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class DispatchingApp(object): - """Special application that dispatches to a Flask application which - is imported by name in a background thread. If an error happens - it is recorded and shown as part of the WSGI handling which in case - of the Werkzeug debugger means that it shows up in the browser. - """ - - def __init__(self, loader, use_eager_loading=False): - self.loader = loader - self._app = None - self._lock = Lock() - self._bg_loading_exc_info = None - if use_eager_loading: - self._load_unlocked() - else: - self._load_in_background() - - def _load_in_background(self): - def _load_app(): - __traceback_hide__ = True # noqa: F841 - with self._lock: - try: - self._load_unlocked() - except Exception: - self._bg_loading_exc_info = sys.exc_info() - - t = Thread(target=_load_app, args=()) - t.start() - - def _flush_bg_loading_exception(self): - __traceback_hide__ = True # noqa: F841 - exc_info = self._bg_loading_exc_info - if exc_info is not None: - self._bg_loading_exc_info = None - reraise(*exc_info) - - def _load_unlocked(self): - __traceback_hide__ = True # noqa: F841 - self._app = rv = self.loader() - self._bg_loading_exc_info = None - return rv - - def __call__(self, environ, start_response): - __traceback_hide__ = True # noqa: F841 - if self._app is not None: - return self._app(environ, start_response) - self._flush_bg_loading_exception() - with self._lock: - if self._app is not None: - rv = self._app - else: - rv = self._load_unlocked() - return rv(environ, start_response) - - -class ScriptInfo(object): - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - """ - - def __init__(self, app_import_path=None, create_app=None, set_debug_flag=True): - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path or os.environ.get("FLASK_APP") - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data = {} - self.set_debug_flag = set_debug_flag - self._loaded_app = None - - def load_app(self): - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - __traceback_hide__ = True # noqa: F841 - - if self._loaded_app is not None: - return self._loaded_app - - app = None - - if self.create_app is not None: - app = call_factory(self, self.create_app) - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, 1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(self, import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(self, import_name, None, raise_if_not_found=False) - - if app: - break - - if not app: - raise NoAppException( - "Could not locate a Flask application. You did not provide " - 'the "FLASK_APP" environment variable, and a "wsgi.py" or ' - '"app.py" module was not found in the current directory.' - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - - -def with_appcontext(f): - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. If callbacks are registered directly - to the ``app.cli`` object then they are wrapped with this function - by default unless it's disabled. - """ - - @click.pass_context - def decorator(__ctx, *args, **kwargs): - with __ctx.ensure_object(ScriptInfo).load_app().app_context(): - return __ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f): - if wrap_for_ctx: - f = with_appcontext(f) - return click.Group.command(self, *args, **kwargs)(f) - - return decorator - - def group(self, *args, **kwargs): - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return click.Group.group(self, *args, **kwargs) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. - - For information as of why this is useful see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag based on the active - environment - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands=True, - create_app=None, - add_version_option=True, - load_dotenv=True, - set_debug_flag=True, - **extra - ): - params = list(extra.pop("params", None) or ()) - - if add_version_option: - params.append(version_option) - - AppGroup.__init__(self, params=params, **extra) - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self): - if self._loaded_plugin_commands: - return - try: - import pkg_resources - except ImportError: - self._loaded_plugin_commands = True - return - - for ep in pkg_resources.iter_entry_points("flask.commands"): - self.add_command(ep.load(), ep.name) - self._loaded_plugin_commands = True - - def get_command(self, ctx, name): - self._load_plugin_commands() - - # We load built-in commands first as these should always be the - # same no matter what the app does. If the app does want to - # override this it needs to make a custom instance of this group - # and not attach the default commands. - # - # This also means that the script stays functional in case the - # application completely fails. - rv = AppGroup.get_command(self, ctx, name) - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - try: - rv = info.load_app().cli.get_command(ctx, name) - if rv is not None: - return rv - except NoAppException: - pass - - def list_commands(self, ctx): - self._load_plugin_commands() - - # The commands available is the list of both the application (if - # available) plus the builtin commands. - rv = set(click.Group.list_commands(self, ctx)) - info = ctx.ensure_object(ScriptInfo) - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except Exception: - # Here we intentionally swallow all exceptions as we don't - # want the help page to break if the app does not exist. - # If someone attempts to use the command we try to create - # the app again and this will give us the error. - # However, we will not do so silently because that would confuse - # users. - traceback.print_exc() - return sorted(rv) - - def main(self, *args, **kwargs): - # Set a global flag that indicates that we were invoked from the - # command line interface. This is detected by Flask.run to make the - # call into a no-op. This is necessary to avoid ugly errors when the - # script that is loaded here also attempts to start a server. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - if get_load_dotenv(self.load_dotenv): - load_dotenv() - - obj = kwargs.get("obj") - - if obj is None: - obj = ScriptInfo( - create_app=self.create_app, set_debug_flag=self.set_debug_flag - ) - - kwargs["obj"] = obj - kwargs.setdefault("auto_envvar_prefix", "FLASK") - return super(FlaskGroup, self).main(*args, **kwargs) - - -def _path_is_ancestor(path, other): - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv(path=None): - """Load "dotenv" files in order of precedence to set environment variables. - - If an env var is already set it is not overwritten, so earlier files in the - list are preferred over later files. - - Changes the current working directory to the location of the first file - found, with the assumption that it is in the top level project directory - and will be where the Python path should import local packages from. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location instead of searching. - :return: ``True`` if a file was loaded. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - if dotenv is None: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env or .flaskenv files present." - ' Do "pip install python-dotenv" to use them.', - fg="yellow", - err=True, - ) - - return False - - # if the given path specifies the actual file then return True, - # else False - if path is not None: - if os.path.isfile(path): - return dotenv.load_dotenv(path) - - return False - - new_dir = None - - for name in (".env", ".flaskenv"): - path = dotenv.find_dotenv(name, usecwd=True) - - if not path: - continue - - if new_dir is None: - new_dir = os.path.dirname(path) - - dotenv.load_dotenv(path) - - if new_dir and os.getcwd() != new_dir: - os.chdir(new_dir) - - return new_dir is not None # at least one file was located and loaded - - -def show_server_banner(env, debug, app_import_path, eager_loading): - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if os.environ.get("WERKZEUG_RUN_MAIN") == "true": - return - - if app_import_path is not None: - message = ' * Serving Flask app "{0}"'.format(app_import_path) - - if not eager_loading: - message += " (lazy loading)" - - click.echo(message) - - click.echo(" * Environment: {0}".format(env)) - - if env == "production": - click.secho( - " WARNING: This is a development server. " - "Do not use it in a production deployment.", - fg="red", - ) - click.secho(" Use a production WSGI server instead.", dim=True) - - if debug is not None: - click.echo(" * Debug mode: {0}".format("on" if debug else "off")) - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self): - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert(self, value, param, ctx): - if ssl is None: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import OpenSSL # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires pyOpenSSL.", ctx, param - ) - - return value - - obj = import_string(value, silent=True) - - if sys.version_info < (2, 7, 9): - if obj: - return obj - else: - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx, param, value): - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - if sys.version_info < (2, 7, 9): - is_context = cert and not isinstance(cert, (text_type, bytes)) - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert(self, value, param, ctx): - items = self.split_envvar_value(value) - super_convert = super(SeparatedPathType, self).convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", type=CertParamType(), help="Specify a certificate file to use HTTPS." -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--eager-loading/--lazy-loader", - default=None, - help="Enable or disable eager loading. By default eager " - "loading is enabled if the reloader is disabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - " are separated by '{}'.".format(os.path.pathsep) - ), -) -@pass_script_info -def run_command( - info, host, port, reload, debugger, eager_loading, with_threads, cert, extra_files -): - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default if - FLASK_ENV=development or FLASK_DEBUG=1. - """ - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - if eager_loading is None: - eager_loading = not reload - - show_server_banner(get_env(), debug, info.app_import_path, eager_loading) - app = DispatchingApp(info.load_app, use_eager_loading=eager_loading) - - from werkzeug.serving import run_simple - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - ) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command(): - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to it's configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - from .globals import _app_ctx_stack - - app = _app_ctx_stack.top.app - banner = "Python %s on %s\nApp: %s [%s]\nInstance: %s" % ( - sys.version, - sys.platform, - app.import_name, - app.env, - app.instance_path, - ) - ctx = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup, "r") as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(app.make_shell_context()) - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "rule", "match")), - default="endpoint", - help=( - 'Method to sort routes by. "match" is the order that Flask will match ' - "routes when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort, all_methods): - """Show all registered routes with endpoints and methods.""" - - rules = list(current_app.url_map.iter_rules()) - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set(() if all_methods else ("HEAD", "OPTIONS")) - - if sort in ("endpoint", "rule"): - rules = sorted(rules, key=attrgetter(sort)) - elif sort == "methods": - rules = sorted(rules, key=lambda rule: sorted(rule.methods)) - - rule_methods = [", ".join(sorted(rule.methods - ignored_methods)) for rule in rules] - - headers = ("Endpoint", "Methods", "Rule") - widths = ( - max(len(rule.endpoint) for rule in rules), - max(len(methods) for methods in rule_methods), - max(len(rule.rule) for rule in rules), - ) - widths = [max(len(h), w) for h, w in zip(headers, widths)] - row = "{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}".format(*widths) - - click.echo(row.format(*headers).strip()) - click.echo(row.format(*("-" * width for width in widths))) - - for rule, methods in zip(rules, rule_methods): - click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip()) - - -cli = FlaskGroup( - help="""\ -A general utility script for Flask applications. - -Provides commands from Flask, extensions, and the application. Loads the -application defined in the FLASK_APP environment variable, or from a wsgi.py -file. Setting the FLASK_ENV environment variable to 'development' will enable -debug mode. - -\b - {prefix}{cmd} FLASK_APP=hello.py - {prefix}{cmd} FLASK_ENV=development - {prefix}flask run -""".format( - cmd="export" if os.name == "posix" else "set", - prefix="$ " if os.name == "posix" else "> ", - ) -) - - -def main(as_module=False): - cli.main(prog_name="python -m flask" if as_module else None) - - -if __name__ == "__main__": - main(as_module=True) diff --git a/venv/lib/python3.6/site-packages/flask/config.py b/venv/lib/python3.6/site-packages/flask/config.py deleted file mode 100644 index 809de33..0000000 --- a/venv/lib/python3.6/site-packages/flask/config.py +++ /dev/null @@ -1,269 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.config - ~~~~~~~~~~~~ - - Implements the configuration related objects. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import errno -import os -import types - -from werkzeug.utils import import_string - -from . import json -from ._compat import iteritems -from ._compat import string_types - - -class ConfigAttribute(object): - """Makes an attribute forward to the config""" - - def __init__(self, name, get_converter=None): - self.__name__ = name - self.get_converter = get_converter - - def __get__(self, obj, type=None): - if obj is None: - return self - rv = obj.config[self.__name__] - if self.get_converter is not None: - rv = self.get_converter(rv) - return rv - - def __set__(self, obj, value): - obj.config[self.__name__] = value - - -class Config(dict): - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__(self, root_path, defaults=None): - dict.__init__(self, defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name, silent=False): - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: bool. ``True`` if able to load config, ``False`` otherwise. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - "The environment variable %r is not set " - "and as such configuration could not be " - "loaded. Set this variable and make it " - "point to a configuration file" % variable_name - ) - return self.from_pyfile(rv, silent=silent) - - def from_pyfile(self, filename, silent=False): - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except IOError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = "Unable to load configuration file (%s)" % e.strerror - raise - self.from_object(d) - return True - - def from_object(self, obj): - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, string_types): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_json(self, filename, silent=False): - """Updates the values in the config from a JSON file. This function - behaves as if the JSON object was a dictionary and passed to the - :meth:`from_mapping` function. - - :param filename: the filename of the JSON file. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - - .. versionadded:: 0.11 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename) as json_file: - obj = json.loads(json_file.read()) - except IOError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - e.strerror = "Unable to load configuration file (%s)" % e.strerror - raise - return self.from_mapping(obj) - - def from_mapping(self, *mapping, **kwargs): - """Updates the config like :meth:`update` ignoring items with non-upper - keys. - - .. versionadded:: 0.11 - """ - mappings = [] - if len(mapping) == 1: - if hasattr(mapping[0], "items"): - mappings.append(mapping[0].items()) - else: - mappings.append(mapping[0]) - elif len(mapping) > 1: - raise TypeError( - "expected at most 1 positional argument, got %d" % len(mapping) - ) - mappings.append(kwargs.items()) - for mapping in mappings: - for (key, value) in mapping: - if key.isupper(): - self[key] = value - return True - - def get_namespace(self, namespace, lowercase=True, trim_namespace=True): - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in iteritems(self): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self): - return "<%s %s>" % (self.__class__.__name__, dict.__repr__(self)) diff --git a/venv/lib/python3.6/site-packages/flask/ctx.py b/venv/lib/python3.6/site-packages/flask/ctx.py deleted file mode 100644 index 172f6a0..0000000 --- a/venv/lib/python3.6/site-packages/flask/ctx.py +++ /dev/null @@ -1,475 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.ctx - ~~~~~~~~~ - - Implements the objects required to keep the context. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import sys -from functools import update_wrapper - -from werkzeug.exceptions import HTTPException - -from ._compat import BROKEN_PYPY_CTXMGR_EXIT -from ._compat import reraise -from .globals import _app_ctx_stack -from .globals import _request_ctx_stack -from .signals import appcontext_popped -from .signals import appcontext_pushed - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals(object): - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - def get(self, name, default=None): - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name, default=_sentinel): - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raise a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name, default=None): - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param: default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item): - return item in self.__dict__ - - def __iter__(self): - return iter(self.__dict__) - - def __repr__(self): - top = _app_ctx_stack.top - if top is not None: - return "" % top.app.name - return object.__repr__(self) - - -def after_this_request(f): - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - _request_ctx_stack.top._after_request_functions.append(f) - return f - - -def copy_current_request_context(f): - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - top = _request_ctx_stack.top - if top is None: - raise RuntimeError( - "This decorator can only be used at local scopes " - "when a request context is on the stack. For instance within " - "view functions." - ) - reqctx = top.copy() - - def wrapper(*args, **kwargs): - with reqctx: - return f(*args, **kwargs) - - return update_wrapper(wrapper, f) - - -def has_request_context(): - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _request_ctx_stack.top is not None - - -def has_app_context(): - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _app_ctx_stack.top is not None - - -class AppContext(object): - """The application context binds an application object implicitly - to the current thread or greenlet, similar to how the - :class:`RequestContext` binds request information. The application - context is also implicitly created if a request context is created - but the application is not on top of the individual application - context. - """ - - def __init__(self, app): - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g = app.app_ctx_globals_class() - - # Like request context, app contexts can be pushed multiple times - # but there a basic "refcount" is enough to track them. - self._refcnt = 0 - - def push(self): - """Binds the app context to the current context.""" - self._refcnt += 1 - if hasattr(sys, "exc_clear"): - sys.exc_clear() - _app_ctx_stack.push(self) - appcontext_pushed.send(self.app) - - def pop(self, exc=_sentinel): - """Pops the app context.""" - try: - self._refcnt -= 1 - if self._refcnt <= 0: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - rv = _app_ctx_stack.pop() - assert rv is self, "Popped wrong app context. (%r instead of %r)" % (rv, self) - appcontext_popped.send(self.app) - - def __enter__(self): - self.push() - return self - - def __exit__(self, exc_type, exc_value, tb): - self.pop(exc_value) - - if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: - reraise(exc_type, exc_value, tb) - - -class RequestContext(object): - """The request context contains all request relevant information. It is - created at the beginning of the request and pushed to the - `_request_ctx_stack` and removed at the end of it. It will create the - URL adapter and request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the request - for you. In debug mode the request context is kept around if - exceptions happen so that interactive debuggers have a chance to - introspect the data. With 0.4 this can also be forced for requests - that did not fail and outside of ``DEBUG`` mode. By setting - ``'flask._preserve_context'`` to ``True`` on the WSGI environment the - context will not pop itself at the end of the request. This is used by - the :meth:`~flask.Flask.test_client` for example to implement the - deferred cleanup functionality. - - You might find this helpful for unittests where you need the - information from the context local around for a little longer. Make - sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in - that situation, otherwise your unittests will leak memory. - """ - - def __init__(self, app, environ, request=None, session=None): - self.app = app - if request is None: - request = app.request_class(environ) - self.request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes = None - self.session = session - - # Request contexts can be pushed multiple times and interleaved with - # other request contexts. Now only if the last level is popped we - # get rid of them. Additionally if an application context is missing - # one is created implicitly so for each level we add this information - self._implicit_app_ctx_stack = [] - - # indicator if the context was preserved. Next time another context - # is pushed the preserved context is popped. - self.preserved = False - - # remembers the exception for pop if there is one in case the context - # preservation kicks in. - self._preserved_exc = None - - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions = [] - - @property - def g(self): - return _app_ctx_stack.top.g - - @g.setter - def g(self, value): - _app_ctx_stack.top.g = value - - def copy(self): - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self): - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) - self.request.url_rule, self.request.view_args = result - except HTTPException as e: - self.request.routing_exception = e - - def push(self): - """Binds the request context to the current context.""" - # If an exception occurs in debug mode or if context preservation is - # activated under exception situations exactly one context stays - # on the stack. The rationale is that you want to access that - # information under debug situations. However if someone forgets to - # pop that context again we want to make sure that on the next push - # it's invalidated, otherwise we run at risk that something leaks - # memory. This is usually only a problem in test suite since this - # functionality is not active in production environments. - top = _request_ctx_stack.top - if top is not None and top.preserved: - top.pop(top._preserved_exc) - - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _app_ctx_stack.top - if app_ctx is None or app_ctx.app != self.app: - app_ctx = self.app.app_context() - app_ctx.push() - self._implicit_app_ctx_stack.append(app_ctx) - else: - self._implicit_app_ctx_stack.append(None) - - if hasattr(sys, "exc_clear"): - sys.exc_clear() - - _request_ctx_stack.push(self) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc=_sentinel): - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - app_ctx = self._implicit_app_ctx_stack.pop() - - try: - clear_request = False - if not self._implicit_app_ctx_stack: - self.preserved = False - self._preserved_exc = None - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - # If this interpreter supports clearing the exception information - # we do that now. This will only go into effect on Python 2.x, - # on 3.x it disappears automatically at the end of the exception - # stack. - if hasattr(sys, "exc_clear"): - sys.exc_clear() - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - clear_request = True - finally: - rv = _request_ctx_stack.pop() - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - rv.request.environ["werkzeug.request"] = None - - # Get rid of the app as well if necessary. - if app_ctx is not None: - app_ctx.pop(exc) - - assert rv is self, "Popped wrong request context. (%r instead of %r)" % ( - rv, - self, - ) - - def auto_pop(self, exc): - if self.request.environ.get("flask._preserve_context") or ( - exc is not None and self.app.preserve_context_on_exception - ): - self.preserved = True - self._preserved_exc = exc - else: - self.pop(exc) - - def __enter__(self): - self.push() - return self - - def __exit__(self, exc_type, exc_value, tb): - # do not pop the request stack if we are in debug mode and an - # exception happened. This will allow the debugger to still - # access the request object in the interactive shell. Furthermore - # the context can be force kept alive for the test client. - # See flask.testing for how this works. - self.auto_pop(exc_value) - - if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None: - reraise(exc_type, exc_value, tb) - - def __repr__(self): - return "<%s '%s' [%s] of %s>" % ( - self.__class__.__name__, - self.request.url, - self.request.method, - self.app.name, - ) diff --git a/venv/lib/python3.6/site-packages/flask/debughelpers.py b/venv/lib/python3.6/site-packages/flask/debughelpers.py deleted file mode 100644 index e475bd1..0000000 --- a/venv/lib/python3.6/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.debughelpers - ~~~~~~~~~~~~~~~~~~ - - Various helpers to make the development experience better. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import os -from warnings import warn - -from ._compat import implements_to_string -from ._compat import text_type -from .app import Flask -from .blueprints import Blueprint -from .globals import _request_ctx_stack - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -@implements_to_string -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request, key): - form_matches = request.form.getlist(key) - buf = [ - 'You tried to access the file "%s" in the request.files ' - "dictionary but it does not exist. The mimetype for the request " - 'is "%s" instead of "multipart/form-data" which means that no ' - "file contents were transmitted. To fix this error you should " - 'provide enctype="multipart/form-data" in your form.' - % (key, request.mimetype) - ] - if form_matches: - buf.append( - "\n\nThe browser instead transmitted some file names. " - "This was submitted: %s" % ", ".join('"%s"' % x for x in form_matches) - ) - self.msg = "".join(buf) - - def __str__(self): - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised by Flask in debug mode if it detects a - redirect caused by the routing system when the request method is not - GET, HEAD or OPTIONS. Reasoning: form data will be dropped. - """ - - def __init__(self, request): - exc = request.routing_exception - buf = [ - "A request was sent to this URL (%s) but a redirect was " - 'issued automatically by the routing system to "%s".' - % (request.url, exc.new_url) - ] - - # In case just a slash was appended we can be extra helpful - if request.base_url + "/" == exc.new_url.split("?")[0]: - buf.append( - " The URL was defined with a trailing slash so " - "Flask will automatically redirect to the URL " - "with the trailing slash if it was accessed " - "without one." - ) - - buf.append( - " Make sure to directly send your %s-request to this URL " - "since we can't make browsers or HTTP clients redirect " - "with form data reliably or without user interaction." % request.method - ) - buf.append("\n\nNote: this exception is only raised in debug mode") - AssertionError.__init__(self, "".join(buf).encode("utf-8")) - - -def attach_enctype_error_multidict(request): - """Since Flask 0.8 we're monkeypatching the files object in case a - request is detected that does not use multipart form data but the files - object is accessed. - """ - oldcls = request.files.__class__ - - class newcls(oldcls): - def __getitem__(self, key): - try: - return oldcls.__getitem__(self, key) - except KeyError: - if key not in request.form: - raise - raise DebugFilesKeyError(request, key) - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader): - yield "class: %s.%s" % (type(loader).__module__, type(loader).__name__) - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, (str, text_type)) for x in value): - continue - yield "%s:" % key - for item in value: - yield " - %s" % item - continue - elif not isinstance(value, (str, text_type, int, float, bool)): - continue - yield "%s: %r" % (key, value) - - -def explain_template_loading_attempts(app, template, attempts): - """This should help developers understand what failed""" - info = ['Locating template "%s":' % template] - total_found = 0 - blueprint = None - reqctx = _request_ctx_stack.top - if reqctx is not None and reqctx.request.blueprint is not None: - blueprint = reqctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, Flask): - src_info = 'application "%s"' % srcobj.import_name - elif isinstance(srcobj, Blueprint): - src_info = 'blueprint "%s" (%s)' % (srcobj.name, srcobj.import_name) - else: - src_info = repr(srcobj) - - info.append("% 5d: trying loader of %s" % (idx + 1, src_info)) - - for line in _dump_loader_info(loader): - info.append(" %s" % line) - - if triple is None: - detail = "no match" - else: - detail = "found (%r)" % (triple[1] or "") - total_found += 1 - info.append(" -> %s" % detail) - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that " - 'belongs to the blueprint "%s".' % blueprint - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See http://flask.pocoo.org/docs/blueprints/#templates") - - app.logger.info("\n".join(info)) - - -def explain_ignored_app_run(): - if os.environ.get("WERKZEUG_RUN_MAIN") != "true": - warn( - Warning( - "Silently ignoring app.run() because the " - "application is run from the flask command line " - "executable. Consider putting app.run() behind an " - 'if __name__ == "__main__" guard to silence this ' - "warning." - ), - stacklevel=3, - ) diff --git a/venv/lib/python3.6/site-packages/flask/globals.py b/venv/lib/python3.6/site-packages/flask/globals.py deleted file mode 100644 index 6d32dcf..0000000 --- a/venv/lib/python3.6/site-packages/flask/globals.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.globals - ~~~~~~~~~~~~~ - - Defines all the global objects that are proxies to the current - active context. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -from functools import partial - -from werkzeug.local import LocalProxy -from werkzeug.local import LocalStack - - -_request_ctx_err_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_app_ctx_err_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -to interface with the current application object in some way. To solve -this, set up an application context with app.app_context(). See the -documentation for more information.\ -""" - - -def _lookup_req_object(name): - top = _request_ctx_stack.top - if top is None: - raise RuntimeError(_request_ctx_err_msg) - return getattr(top, name) - - -def _lookup_app_object(name): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError(_app_ctx_err_msg) - return getattr(top, name) - - -def _find_app(): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError(_app_ctx_err_msg) - return top.app - - -# context locals -_request_ctx_stack = LocalStack() -_app_ctx_stack = LocalStack() -current_app = LocalProxy(_find_app) -request = LocalProxy(partial(_lookup_req_object, "request")) -session = LocalProxy(partial(_lookup_req_object, "session")) -g = LocalProxy(partial(_lookup_app_object, "g")) diff --git a/venv/lib/python3.6/site-packages/flask/helpers.py b/venv/lib/python3.6/site-packages/flask/helpers.py deleted file mode 100644 index 3f401a5..0000000 --- a/venv/lib/python3.6/site-packages/flask/helpers.py +++ /dev/null @@ -1,1153 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.helpers - ~~~~~~~~~~~~~ - - Implements various helpers. - - :copyright: 2010 Pallets - :license: BSD-3-Clause -""" -import io -import mimetypes -import os -import pkgutil -import posixpath -import socket -import sys -import unicodedata -from functools import update_wrapper -from threading import RLock -from time import time -from zlib import adler32 - -from jinja2 import FileSystemLoader -from werkzeug.datastructures import Headers -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import NotFound -from werkzeug.exceptions import RequestedRangeNotSatisfiable -from werkzeug.routing import BuildError -from werkzeug.urls import url_quote -from werkzeug.wsgi import wrap_file - -from ._compat import fspath -from ._compat import PY2 -from ._compat import string_types -from ._compat import text_type -from .globals import _app_ctx_stack -from .globals import _request_ctx_stack -from .globals import current_app -from .globals import request -from .globals import session -from .signals import message_flashed - -# sentinel -_missing = object() - - -# what separators does this operating system provide that are not a slash? -# this is used by the send_from_directory function to ensure that nobody is -# able to access files from outside the filesystem. -_os_alt_seps = list( - sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, "/") -) - - -def get_env(): - """Get the environment the app is running in, indicated by the - :envvar:`FLASK_ENV` environment variable. The default is - ``'production'``. - """ - return os.environ.get("FLASK_ENV") or "production" - - -def get_debug_flag(): - """Get whether debug mode should be enabled for the app, indicated - by the :envvar:`FLASK_DEBUG` environment variable. The default is - ``True`` if :func:`.get_env` returns ``'development'``, or ``False`` - otherwise. - """ - val = os.environ.get("FLASK_DEBUG") - - if not val: - return get_env() == "development" - - return val.lower() not in ("0", "false", "no") - - -def get_load_dotenv(default=True): - """Get whether the user has disabled loading dotenv files by setting - :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load the - files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -def _endpoint_from_view_func(view_func): - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def stream_with_context(generator_or_function): - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) - except TypeError: - - def decorator(*args, **kwargs): - gen = generator_or_function(*args, **kwargs) - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) - - def generator(): - ctx = _request_ctx_stack.top - if ctx is None: - raise RuntimeError( - "Attempted to stream with context but " - "there was no context in the first place to keep around." - ) - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - for item in gen: - yield item - finally: - if hasattr(gen, "close"): - gen.close() - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args): - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for(endpoint, **values): - """Generates a URL to the given endpoint with the method provided. - - Variable arguments that are unknown to the target endpoint are appended - to the generated URL as query arguments. If the value of a query argument - is ``None``, the whole pair is skipped. In case blueprints are active - you can shortcut references to the same blueprint by prefixing the - local endpoint with a dot (``.``). - - This will reference the index function local to the current blueprint:: - - url_for('.index') - - For more information, head over to the :ref:`Quickstart `. - - Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when - generating URLs outside of a request context. - - To integrate applications, :class:`Flask` has a hook to intercept URL build - errors through :attr:`Flask.url_build_error_handlers`. The `url_for` - function results in a :exc:`~werkzeug.routing.BuildError` when the current - app does not have a URL for the given endpoint and values. When it does, the - :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if - it is not ``None``, which can return a string to use as the result of - `url_for` (instead of `url_for`'s default to raise the - :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. - An example:: - - def external_url_handler(error, endpoint, values): - "Looks up an external URL when `url_for` cannot build a URL." - # This is an example of hooking the build_error_handler. - # Here, lookup_url is some utility function you've built - # which looks up the endpoint in some external URL registry. - url = lookup_url(endpoint, **values) - if url is None: - # External lookup did not have a URL. - # Re-raise the BuildError, in context of original traceback. - exc_type, exc_value, tb = sys.exc_info() - if exc_value is error: - raise exc_type, exc_value, tb - else: - raise error - # url_for will use this result, instead of raising BuildError. - return url - - app.url_build_error_handlers.append(external_url_handler) - - Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and - `endpoint` and `values` are the arguments passed into `url_for`. Note - that this is for building URLs outside the current application, and not for - handling 404 NotFound errors. - - .. versionadded:: 0.10 - The `_scheme` parameter was added. - - .. versionadded:: 0.9 - The `_anchor` and `_method` parameters were added. - - .. versionadded:: 0.9 - Calls :meth:`Flask.handle_build_error` on - :exc:`~werkzeug.routing.BuildError`. - - :param endpoint: the endpoint of the URL (name of the function) - :param values: the variable arguments of the URL rule - :param _external: if set to ``True``, an absolute URL is generated. Server - address can be changed via ``SERVER_NAME`` configuration variable which - falls back to the `Host` header, then to the IP and port of the request. - :param _scheme: a string specifying the desired URL scheme. The `_external` - parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default - behavior uses the same scheme as the current request, or - ``PREFERRED_URL_SCHEME`` from the :ref:`app configuration ` if no - request context is available. As of Werkzeug 0.10, this also can be set - to an empty string to build protocol-relative URLs. - :param _anchor: if provided this is added as anchor to the URL. - :param _method: if provided this explicitly specifies an HTTP method. - """ - appctx = _app_ctx_stack.top - reqctx = _request_ctx_stack.top - - if appctx is None: - raise RuntimeError( - "Attempted to generate a URL without the application context being" - " pushed. This has to be executed when application context is" - " available." - ) - - # If request specific information is available we have some extra - # features that support "relative" URLs. - if reqctx is not None: - url_adapter = reqctx.url_adapter - blueprint_name = request.blueprint - - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = blueprint_name + endpoint - else: - endpoint = endpoint[1:] - - external = values.pop("_external", False) - - # Otherwise go with the url adapter from the appctx and make - # the URLs external by default. - else: - url_adapter = appctx.url_adapter - - if url_adapter is None: - raise RuntimeError( - "Application was not able to create a URL adapter for request" - " independent URL generation. You might be able to fix this by" - " setting the SERVER_NAME config variable." - ) - - external = values.pop("_external", True) - - anchor = values.pop("_anchor", None) - method = values.pop("_method", None) - scheme = values.pop("_scheme", None) - appctx.app.inject_url_defaults(endpoint, values) - - # This is not the best way to deal with this but currently the - # underlying Werkzeug router does not support overriding the scheme on - # a per build call basis. - old_scheme = None - if scheme is not None: - if not external: - raise ValueError("When specifying _scheme, _external must be True") - old_scheme = url_adapter.url_scheme - url_adapter.url_scheme = scheme - - try: - try: - rv = url_adapter.build( - endpoint, values, method=method, force_external=external - ) - finally: - if old_scheme is not None: - url_adapter.url_scheme = old_scheme - except BuildError as error: - # We need to inject the values again so that the app callback can - # deal with that sort of stuff. - values["_external"] = external - values["_anchor"] = anchor - values["_method"] = method - values["_scheme"] = scheme - return appctx.app.handle_url_build_error(error, endpoint, values) - - if anchor is not None: - rv += "#" + url_quote(anchor) - return rv - - -def get_template_attribute(template_name, attribute): - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message, category="message"): - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - message_flashed.send( - current_app._get_current_object(), message=message, category=category - ) - - -def get_flashed_messages(with_categories=False, category_filter=()): - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :ref:`message-flashing-pattern` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: whitelist of categories to limit return values - """ - flashes = _request_ctx_stack.top.flashes - if flashes is None: - _request_ctx_stack.top.flashes = flashes = ( - session.pop("_flashes") if "_flashes" in session else [] - ) - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def send_file( - filename_or_fp, - mimetype=None, - as_attachment=False, - attachment_filename=None, - add_etags=True, - cache_timeout=None, - conditional=False, - last_modified=None, -): - """Sends the contents of a file to the client. This will use the - most efficient method available and configured. By default it will - try to use the WSGI server's file_wrapper support. Alternatively - you can set the application's :attr:`~Flask.use_x_sendfile` attribute - to ``True`` to directly emit an ``X-Sendfile`` header. This however - requires support of the underlying webserver for ``X-Sendfile``. - - By default it will try to guess the mimetype for you, but you can - also explicitly provide one. For extra security you probably want - to send certain files as attachment (HTML for instance). The mimetype - guessing requires a `filename` or an `attachment_filename` to be - provided. - - ETags will also be attached automatically if a `filename` is provided. You - can turn this off by setting `add_etags=False`. - - If `conditional=True` and `filename` is provided, this method will try to - upgrade the response stream to support range requests. This will allow - the request to be answered with partial content response. - - Please never pass filenames to this function from user sources; - you should use :func:`send_from_directory` instead. - - .. versionadded:: 0.2 - - .. versionadded:: 0.5 - The `add_etags`, `cache_timeout` and `conditional` parameters were - added. The default behavior is now to attach etags. - - .. versionchanged:: 0.7 - mimetype guessing and etag support for file objects was - deprecated because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. This functionality - will be removed in Flask 1.0 - - .. versionchanged:: 0.9 - cache_timeout pulls its default from application config, when None. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file objects. If - you want to use automatic mimetype and etag support, pass a filepath via - `filename_or_fp` or `attachment_filename`. - - .. versionchanged:: 0.12 - The `attachment_filename` is preferred over `filename` for MIME-type - detection. - - .. versionchanged:: 1.0 - UTF-8 filenames, as specified in `RFC 2231`_, are supported. - - .. _RFC 2231: https://tools.ietf.org/html/rfc2231#section-4 - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.1 - Filename may be a :class:`~os.PathLike` object. - - .. versionadded:: 1.1 - Partial content supports :class:`~io.BytesIO`. - - :param filename_or_fp: the filename of the file to send. - This is relative to the :attr:`~Flask.root_path` - if a relative path is specified. - Alternatively a file object might be provided in - which case ``X-Sendfile`` might not work and fall - back to the traditional method. Make sure that the - file pointer is positioned at the start of data to - send before calling :func:`send_file`. - :param mimetype: the mimetype of the file if provided. If a file path is - given, auto detection happens as fallback, otherwise an - error will be raised. - :param as_attachment: set to ``True`` if you want to send this file with - a ``Content-Disposition: attachment`` header. - :param attachment_filename: the filename for the attachment if it - differs from the file's filename. - :param add_etags: set to ``False`` to disable attaching of etags. - :param conditional: set to ``True`` to enable conditional responses. - - :param cache_timeout: the timeout in seconds for the headers. When ``None`` - (default), this value is set by - :meth:`~Flask.get_send_file_max_age` of - :data:`~flask.current_app`. - :param last_modified: set the ``Last-Modified`` header to this value, - a :class:`~datetime.datetime` or timestamp. - If a file was passed, this overrides its mtime. - """ - mtime = None - fsize = None - - if hasattr(filename_or_fp, "__fspath__"): - filename_or_fp = fspath(filename_or_fp) - - if isinstance(filename_or_fp, string_types): - filename = filename_or_fp - if not os.path.isabs(filename): - filename = os.path.join(current_app.root_path, filename) - file = None - if attachment_filename is None: - attachment_filename = os.path.basename(filename) - else: - file = filename_or_fp - filename = None - - if mimetype is None: - if attachment_filename is not None: - mimetype = ( - mimetypes.guess_type(attachment_filename)[0] - or "application/octet-stream" - ) - - if mimetype is None: - raise ValueError( - "Unable to infer MIME-type because no filename is available. " - "Please set either `attachment_filename`, pass a filepath to " - "`filename_or_fp` or set your own MIME-type via `mimetype`." - ) - - headers = Headers() - if as_attachment: - if attachment_filename is None: - raise TypeError("filename unavailable, required for sending as attachment") - - if not isinstance(attachment_filename, text_type): - attachment_filename = attachment_filename.decode("utf-8") - - try: - attachment_filename = attachment_filename.encode("ascii") - except UnicodeEncodeError: - filenames = { - "filename": unicodedata.normalize("NFKD", attachment_filename).encode( - "ascii", "ignore" - ), - "filename*": "UTF-8''%s" % url_quote(attachment_filename, safe=b""), - } - else: - filenames = {"filename": attachment_filename} - - headers.add("Content-Disposition", "attachment", **filenames) - - if current_app.use_x_sendfile and filename: - if file is not None: - file.close() - headers["X-Sendfile"] = filename - fsize = os.path.getsize(filename) - headers["Content-Length"] = fsize - data = None - else: - if file is None: - file = open(filename, "rb") - mtime = os.path.getmtime(filename) - fsize = os.path.getsize(filename) - headers["Content-Length"] = fsize - elif isinstance(file, io.BytesIO): - try: - fsize = file.getbuffer().nbytes - except AttributeError: - # Python 2 doesn't have getbuffer - fsize = len(file.getvalue()) - headers["Content-Length"] = fsize - data = wrap_file(request.environ, file) - - rv = current_app.response_class( - data, mimetype=mimetype, headers=headers, direct_passthrough=True - ) - - if last_modified is not None: - rv.last_modified = last_modified - elif mtime is not None: - rv.last_modified = mtime - - rv.cache_control.public = True - if cache_timeout is None: - cache_timeout = current_app.get_send_file_max_age(filename) - if cache_timeout is not None: - rv.cache_control.max_age = cache_timeout - rv.expires = int(time() + cache_timeout) - - if add_etags and filename is not None: - from warnings import warn - - try: - rv.set_etag( - "%s-%s-%s" - % ( - os.path.getmtime(filename), - os.path.getsize(filename), - adler32( - filename.encode("utf-8") - if isinstance(filename, text_type) - else filename - ) - & 0xFFFFFFFF, - ) - ) - except OSError: - warn( - "Access %s failed, maybe it does not exist, so ignore etags in " - "headers" % filename, - stacklevel=2, - ) - - if conditional: - try: - rv = rv.make_conditional(request, accept_ranges=True, complete_length=fsize) - except RequestedRangeNotSatisfiable: - if file is not None: - file.close() - raise - # make sure we don't send x-sendfile for servers that - # ignore the 304 status code for x-sendfile. - if rv.status_code == 304: - rv.headers.pop("x-sendfile", None) - return rv - - -def safe_join(directory, *pathnames): - """Safely join `directory` and zero or more untrusted `pathnames` - components. - - Example usage:: - - @app.route('/wiki/') - def wiki_page(filename): - filename = safe_join(app.config['WIKI_FOLDER'], filename) - with open(filename, 'rb') as fd: - content = fd.read() # Read and process the file content... - - :param directory: the trusted base directory. - :param pathnames: the untrusted pathnames relative to that directory. - :raises: :class:`~werkzeug.exceptions.NotFound` if one or more passed - paths fall out of its boundaries. - """ - - parts = [directory] - - for filename in pathnames: - if filename != "": - filename = posixpath.normpath(filename) - - if ( - any(sep in filename for sep in _os_alt_seps) - or os.path.isabs(filename) - or filename == ".." - or filename.startswith("../") - ): - raise NotFound() - - parts.append(filename) - - return posixpath.join(*parts) - - -def send_from_directory(directory, filename, **options): - """Send a file from a given directory with :func:`send_file`. This - is a secure way to quickly expose static files from an upload folder - or something similar. - - Example usage:: - - @app.route('/uploads/') - def download_file(filename): - return send_from_directory(app.config['UPLOAD_FOLDER'], - filename, as_attachment=True) - - .. admonition:: Sending files and Performance - - It is strongly recommended to activate either ``X-Sendfile`` support in - your webserver or (if no authentication happens) to tell the webserver - to serve files for the given path on its own without calling into the - web application for improved performance. - - .. versionadded:: 0.5 - - :param directory: the directory where all the files are stored. - :param filename: the filename relative to that directory to - download. - :param options: optional keyword arguments that are directly - forwarded to :func:`send_file`. - """ - filename = fspath(filename) - directory = fspath(directory) - filename = safe_join(directory, filename) - if not os.path.isabs(filename): - filename = os.path.join(current_app.root_path, filename) - try: - if not os.path.isfile(filename): - raise NotFound() - except (TypeError, ValueError): - raise BadRequest() - options.setdefault("conditional", True) - return send_file(filename, **options) - - -def get_root_path(import_name): - """Returns the path to a package or cwd if that cannot be found. This - returns the path of a package or the folder that contains a module. - - Not to be confused with the package path returned by :func:`find_package`. - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - if mod is not None and hasattr(mod, "__file__"): - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - loader = pkgutil.get_loader(import_name) - - # Loader does not exist or we're referring to an unloaded main module - # or a main module without path (interactive sessions), go with the - # current working directory. - if loader is None or import_name == "__main__": - return os.getcwd() - - # For .egg, zipimporter does not have get_filename until Python 2.7. - # Some other loaders might exhibit the same behavior. - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a filepath it might be because we are a - # namespace package. In this case we pick the root path from the - # first module that is contained in our package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided " - 'module "%s". This can happen because the ' - "module came from an import hook that does " - "not provide file name information or because " - "it's a namespace package. In this case " - "the root path needs to be explicitly " - "provided." % import_name - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) - - -def _matching_loader_thinks_module_is_package(loader, mod_name): - """Given the loader that loaded a module and the module this function - attempts to figure out if the given module is actually a package. - """ - # If the loader can tell us if something is a package, we can - # directly ask the loader. - if hasattr(loader, "is_package"): - return loader.is_package(mod_name) - # importlib's namespace loaders do not have this functionality but - # all the modules it loads are packages, so we can take advantage of - # this information. - elif ( - loader.__class__.__module__ == "_frozen_importlib" - and loader.__class__.__name__ == "NamespaceLoader" - ): - return True - # Otherwise we need to fail with an error that explains what went - # wrong. - raise AttributeError( - ( - "%s.is_package() method is missing but is required by Flask of " - "PEP 302 import hooks. If you do not use import hooks and " - "you encounter this error please file a bug against Flask." - ) - % loader.__class__.__name__ - ) - - -def _find_package_path(root_mod_name): - """Find the path where the module's root exists in""" - if sys.version_info >= (3, 4): - import importlib.util - - try: - spec = importlib.util.find_spec(root_mod_name) - if spec is None: - raise ValueError("not found") - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - *we* raised `ValueError` due to `spec` being `None` - except (ImportError, ValueError): - pass # handled below - else: - # namespace package - if spec.origin in {"namespace", None}: - return os.path.dirname(next(iter(spec.submodule_search_locations))) - # a package (with __init__.py) - elif spec.submodule_search_locations: - return os.path.dirname(os.path.dirname(spec.origin)) - # just a normal module - else: - return os.path.dirname(spec.origin) - - # we were unable to find the `package_path` using PEP 451 loaders - loader = pkgutil.get_loader(root_mod_name) - if loader is None or root_mod_name == "__main__": - # import name is not found, or interactive/main module - return os.getcwd() - else: - # For .egg, zipimporter does not have get_filename until Python 2.7. - if hasattr(loader, "get_filename"): - filename = loader.get_filename(root_mod_name) - elif hasattr(loader, "archive"): - # zipimporter's loader.archive points to the .egg or .zip - # archive filename is dropped in call to dirname below. - filename = loader.archive - else: - # At least one loader is missing both get_filename and archive: - # Google App Engine's HardenedModulesHook - # - # Fall back to imports. - __import__(root_mod_name) - filename = sys.modules[root_mod_name].__file__ - package_path = os.path.abspath(os.path.dirname(filename)) - - # In case the root module is a package we need to chop of the - # rightmost part. This needs to go through a helper function - # because of python 3.3 namespace packages. - if _matching_loader_thinks_module_is_package(loader, root_mod_name): - package_path = os.path.dirname(package_path) - - return package_path - - -def find_package(import_name): - """Finds a package and returns the prefix (or None if the package is - not installed) as well as the folder that contains the package or - module as a tuple. The package path returned is the module that would - have to be added to the pythonpath in order to make it possible to - import the module. The prefix is the path below which a UNIX like - folder structure exists (lib, share etc.). - """ - root_mod_name, _, _ = import_name.partition(".") - package_path = _find_package_path(root_mod_name) - site_parent, site_folder = os.path.split(package_path) - py_prefix = os.path.abspath(sys.prefix) - if package_path.startswith(py_prefix): - return py_prefix, package_path - elif site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - # Windows like installations - if folder.lower() == "lib": - base_dir = parent - # UNIX like installations - elif os.path.basename(parent).lower() == "lib": - base_dir = os.path.dirname(parent) - else: - base_dir = site_parent - return base_dir, package_path - return None, package_path - - -class locked_cached_property(object): - """A decorator that converts a function into a lazy property. The - function wrapped is called the first time to retrieve the result - and then that calculated result is used the next time you access - the value. Works like the one in Werkzeug but has a lock for - thread safety. - """ - - def __init__(self, func, name=None, doc=None): - self.__name__ = name or func.__name__ - self.__module__ = func.__module__ - self.__doc__ = doc or func.__doc__ - self.func = func - self.lock = RLock() - - def __get__(self, obj, type=None): - if obj is None: - return self - with self.lock: - value = obj.__dict__.get(self.__name__, _missing) - if value is _missing: - value = self.func(obj) - obj.__dict__[self.__name__] = value - return value - - -class _PackageBoundObject(object): - #: The name of the package or module that this app belongs to. Do not - #: change this once it is set by the constructor. - import_name = None - - #: Location of the template files to be added to the template lookup. - #: ``None`` if templates should not be added. - template_folder = None - - #: Absolute path to the package on the filesystem. Used to look up - #: resources contained in the package. - root_path = None - - def __init__(self, import_name, template_folder=None, root_path=None): - self.import_name = import_name - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - self.root_path = root_path - self._static_folder = None - self._static_url_path = None - - # circular import - from .cli import AppGroup - - #: The Click command group for registration of CLI commands - #: on the application and associated blueprints. These commands - #: are accessible via the :command:`flask` command once the - #: application has been discovered and blueprints registered. - self.cli = AppGroup() - - @property - def static_folder(self): - """The absolute path to the configured static folder.""" - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - - @static_folder.setter - def static_folder(self, value): - self._static_folder = value - - @property - def static_url_path(self): - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return ("/" + basename).rstrip("/") - - @static_url_path.setter - def static_url_path(self, value): - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - @property - def has_static_folder(self): - """This is ``True`` if the package bound object's container has a - folder for static files. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @locked_cached_property - def jinja_loader(self): - """The Jinja loader for this package bound object. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - - def get_send_file_max_age(self, filename): - """Provides default cache_timeout for the :func:`send_file` functions. - - By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from - the configuration of :data:`~flask.current_app`. - - Static file functions such as :func:`send_from_directory` use this - function, and :func:`send_file` calls this function on - :data:`~flask.current_app` when the given cache_timeout is ``None``. If a - cache_timeout is given in :func:`send_file`, that timeout is used; - otherwise, this method is called. - - This allows subclasses to change the behavior when sending files based - on the filename. For example, to set the cache timeout for .js files - to 60 seconds:: - - class MyFlask(flask.Flask): - def get_send_file_max_age(self, name): - if name.lower().endswith('.js'): - return 60 - return flask.Flask.get_send_file_max_age(self, name) - - .. versionadded:: 0.9 - """ - return total_seconds(current_app.send_file_max_age_default) - - def send_static_file(self, filename): - """Function used internally to send static files from the static - folder to the browser. - - .. versionadded:: 0.5 - """ - if not self.has_static_folder: - raise RuntimeError("No static folder for this object") - # Ensure get_send_file_max_age is called in all cases. - # Here, we ensure get_send_file_max_age is called for Blueprints. - cache_timeout = self.get_send_file_max_age(filename) - return send_from_directory( - self.static_folder, filename, cache_timeout=cache_timeout - ) - - def open_resource(self, resource, mode="rb"): - """Opens a resource from the application's resource folder. To see - how this works, consider the following folder structure:: - - /myapplication.py - /schema.sql - /static - /style.css - /templates - /layout.html - /index.html - - If you want to open the :file:`schema.sql` file you would do the - following:: - - with app.open_resource('schema.sql') as f: - contents = f.read() - do_something_with(contents) - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: Open file in this mode. Only reading is supported, - valid values are "r" (or "rt") and "rb". - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading") - - return open(os.path.join(self.root_path, resource), mode) - - -def total_seconds(td): - """Returns the total seconds from a timedelta object. - - :param timedelta td: the timedelta to be converted in seconds - - :returns: number of seconds - :rtype: int - """ - return td.days * 60 * 60 * 24 + td.seconds - - -def is_ip(value): - """Determine if the given string is an IP address. - - Python 2 on Windows doesn't provide ``inet_pton``, so this only - checks IPv4 addresses in that environment. - - :param value: value to check - :type value: str - - :return: True if string is an IP address - :rtype: bool - """ - if PY2 and os.name == "nt": - try: - socket.inet_aton(value) - return True - except socket.error: - return False - - for family in (socket.AF_INET, socket.AF_INET6): - try: - socket.inet_pton(family, value) - except socket.error: - pass - else: - return True - - return False diff --git a/venv/lib/python3.6/site-packages/flask/json/__init__.py b/venv/lib/python3.6/site-packages/flask/json/__init__.py deleted file mode 100644 index a141068..0000000 --- a/venv/lib/python3.6/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -""" -flask.json -~~~~~~~~~~ - -:copyright: 2010 Pallets -:license: BSD-3-Clause -""" -import codecs -import io -import uuid -from datetime import date -from datetime import datetime - -from itsdangerous import json as _json -from jinja2 import Markup -from werkzeug.http import http_date - -from .._compat import PY2 -from .._compat import text_type -from ..globals import current_app -from ..globals import request - -try: - import dataclasses -except ImportError: - dataclasses = None - -# Figure out if simplejson escapes slashes. This behavior was changed -# from one version to another without reason. -_slash_escape = "\\/" not in _json.dumps("/") - - -__all__ = [ - "dump", - "dumps", - "load", - "loads", - "htmlsafe_dump", - "htmlsafe_dumps", - "JSONDecoder", - "JSONEncoder", - "jsonify", -] - - -def _wrap_reader_for_text(fp, encoding): - if isinstance(fp.read(0), bytes): - fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) - return fp - - -def _wrap_writer_for_text(fp, encoding): - try: - fp.write("") - except TypeError: - fp = io.TextIOWrapper(fp, encoding) - return fp - - -class JSONEncoder(_json.JSONEncoder): - """The default Flask JSON encoder. This one extends the default - encoder by also supporting ``datetime``, ``UUID``, ``dataclasses``, - and ``Markup`` objects. - - ``datetime`` objects are serialized as RFC 822 datetime strings. - This is the same as the HTTP date format. - - In order to support more data types, override the :meth:`default` - method. - """ - - def default(self, o): - """Implement this method in a subclass such that it returns a - serializable object for ``o``, or calls the base implementation (to - raise a :exc:`TypeError`). - - For example, to support arbitrary iterators, you could implement - default like this:: - - def default(self, o): - try: - iterable = iter(o) - except TypeError: - pass - else: - return list(iterable) - return JSONEncoder.default(self, o) - """ - if isinstance(o, datetime): - return http_date(o.utctimetuple()) - if isinstance(o, date): - return http_date(o.timetuple()) - if isinstance(o, uuid.UUID): - return str(o) - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - if hasattr(o, "__html__"): - return text_type(o.__html__()) - return _json.JSONEncoder.default(self, o) - - -class JSONDecoder(_json.JSONDecoder): - """The default JSON decoder. This one does not change the behavior from - the default simplejson decoder. Consult the :mod:`json` documentation - for more information. This decoder is not only used for the load - functions of this module but also :attr:`~flask.Request`. - """ - - -def _dump_arg_defaults(kwargs, app=None): - """Inject default arguments for dump functions.""" - if app is None: - app = current_app - - if app: - bp = app.blueprints.get(request.blueprint) if request else None - kwargs.setdefault( - "cls", bp.json_encoder if bp and bp.json_encoder else app.json_encoder - ) - - if not app.config["JSON_AS_ASCII"]: - kwargs.setdefault("ensure_ascii", False) - - kwargs.setdefault("sort_keys", app.config["JSON_SORT_KEYS"]) - else: - kwargs.setdefault("sort_keys", True) - kwargs.setdefault("cls", JSONEncoder) - - -def _load_arg_defaults(kwargs, app=None): - """Inject default arguments for load functions.""" - if app is None: - app = current_app - - if app: - bp = app.blueprints.get(request.blueprint) if request else None - kwargs.setdefault( - "cls", bp.json_decoder if bp and bp.json_decoder else app.json_decoder - ) - else: - kwargs.setdefault("cls", JSONDecoder) - - -def detect_encoding(data): - """Detect which UTF codec was used to encode the given bytes. - - The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is - accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big - or little endian. Some editors or libraries may prepend a BOM. - - :param data: Bytes in unknown UTF encoding. - :return: UTF encoding name - """ - head = data[:4] - - if head[:3] == codecs.BOM_UTF8: - return "utf-8-sig" - - if b"\x00" not in head: - return "utf-8" - - if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): - return "utf-32" - - if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): - return "utf-16" - - if len(head) == 4: - if head[:3] == b"\x00\x00\x00": - return "utf-32-be" - - if head[::2] == b"\x00\x00": - return "utf-16-be" - - if head[1:] == b"\x00\x00\x00": - return "utf-32-le" - - if head[1::2] == b"\x00\x00": - return "utf-16-le" - - if len(head) == 2: - return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le" - - return "utf-8" - - -def dumps(obj, app=None, **kwargs): - """Serialize ``obj`` to a JSON-formatted string. If there is an - app context pushed, use the current app's configured encoder - (:attr:`~flask.Flask.json_encoder`), or fall back to the default - :class:`JSONEncoder`. - - Takes the same arguments as the built-in :func:`json.dumps`, and - does some extra configuration based on the application. If the - simplejson package is installed, it is preferred. - - :param obj: Object to serialize to JSON. - :param app: App instance to use to configure the JSON encoder. - Uses ``current_app`` if not given, and falls back to the default - encoder when not in an app context. - :param kwargs: Extra arguments passed to :func:`json.dumps`. - - .. versionchanged:: 1.0.3 - - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - _dump_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - rv = _json.dumps(obj, **kwargs) - if encoding is not None and isinstance(rv, text_type): - rv = rv.encode(encoding) - return rv - - -def dump(obj, fp, app=None, **kwargs): - """Like :func:`dumps` but writes into a file object.""" - _dump_arg_defaults(kwargs, app=app) - encoding = kwargs.pop("encoding", None) - if encoding is not None: - fp = _wrap_writer_for_text(fp, encoding) - _json.dump(obj, fp, **kwargs) - - -def loads(s, app=None, **kwargs): - """Deserialize an object from a JSON-formatted string ``s``. If - there is an app context pushed, use the current app's configured - decoder (:attr:`~flask.Flask.json_decoder`), or fall back to the - default :class:`JSONDecoder`. - - Takes the same arguments as the built-in :func:`json.loads`, and - does some extra configuration based on the application. If the - simplejson package is installed, it is preferred. - - :param s: JSON string to deserialize. - :param app: App instance to use to configure the JSON decoder. - Uses ``current_app`` if not given, and falls back to the default - encoder when not in an app context. - :param kwargs: Extra arguments passed to :func:`json.dumps`. - - .. versionchanged:: 1.0.3 - - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - _load_arg_defaults(kwargs, app=app) - if isinstance(s, bytes): - encoding = kwargs.pop("encoding", None) - if encoding is None: - encoding = detect_encoding(s) - s = s.decode(encoding) - return _json.loads(s, **kwargs) - - -def load(fp, app=None, **kwargs): - """Like :func:`loads` but reads from a file object.""" - _load_arg_defaults(kwargs, app=app) - if not PY2: - fp = _wrap_reader_for_text(fp, kwargs.pop("encoding", None) or "utf-8") - return _json.load(fp, **kwargs) - - -def htmlsafe_dumps(obj, **kwargs): - """Works exactly like :func:`dumps` but is safe for use in ``') - # => <script> do_nasty_stuff() </script> - # sanitize_html('Click here for $100') - # => Click here for $100 - def sanitize_token(self, token): - - # accommodate filters which use token_type differently - token_type = token["type"] - if token_type in ("StartTag", "EndTag", "EmptyTag"): - name = token["name"] - namespace = token["namespace"] - if ((namespace, name) in self.allowed_elements or - (namespace is None and - (namespaces["html"], name) in self.allowed_elements)): - return self.allowed_token(token) - else: - return self.disallowed_token(token) - elif token_type == "Comment": - pass - else: - return token - - def allowed_token(self, token): - if "data" in token: - attrs = token["data"] - attr_names = set(attrs.keys()) - - # Remove forbidden attributes - for to_remove in (attr_names - self.allowed_attributes): - del token["data"][to_remove] - attr_names.remove(to_remove) - - # Remove attributes with disallowed URL values - for attr in (attr_names & self.attr_val_is_uri): - assert attr in attrs - # I don't have a clue where this regexp comes from or why it matches those - # characters, nor why we call unescape. I just know it's always been here. - # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all - # this will do is remove *more* than it otherwise would. - val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '', - unescape(attrs[attr])).lower() - # remove replacement characters from unescaped characters - val_unescaped = val_unescaped.replace("\ufffd", "") - try: - uri = urlparse.urlparse(val_unescaped) - except ValueError: - uri = None - del attrs[attr] - if uri and uri.scheme: - if uri.scheme not in self.allowed_protocols: - del attrs[attr] - if uri.scheme == 'data': - m = data_content_type.match(uri.path) - if not m: - del attrs[attr] - elif m.group('content_type') not in self.allowed_content_types: - del attrs[attr] - - for attr in self.svg_attr_val_allows_ref: - if attr in attrs: - attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)', - ' ', - unescape(attrs[attr])) - if (token["name"] in self.svg_allow_local_href and - (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*', - attrs[(namespaces['xlink'], 'href')])): - del attrs[(namespaces['xlink'], 'href')] - if (None, 'style') in attrs: - attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')]) - token["data"] = attrs - return token - - def disallowed_token(self, token): - token_type = token["type"] - if token_type == "EndTag": - token["data"] = "" % token["name"] - elif token["data"]: - assert token_type in ("StartTag", "EmptyTag") - attrs = [] - for (ns, name), v in token["data"].items(): - attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) - token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) - else: - token["data"] = "<%s>" % token["name"] - if token.get("selfClosing"): - token["data"] = token["data"][:-1] + "/>" - - token["type"] = "Characters" - - del token["name"] - return token - - def sanitize_css(self, style): - # disallow urls - style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style) - - # gauntlet - if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): - return '' - if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style): - return '' - - clean = [] - for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style): - if not value: - continue - if prop.lower() in self.allowed_css_properties: - clean.append(prop + ': ' + value + ';') - elif prop.split('-')[0].lower() in ['background', 'border', 'margin', - 'padding']: - for keyword in value.split(): - if keyword not in self.allowed_css_keywords and \ - not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword): # noqa - break - else: - clean.append(prop + ': ' + value + ';') - elif prop.lower() in self.allowed_svg_properties: - clean.append(prop + ': ' + value + ';') - - return ' '.join(clean) diff --git a/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/whitespace.py b/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/whitespace.py deleted file mode 100644 index 0d12584..0000000 --- a/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/filters/whitespace.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals - -import re - -from . import base -from ..constants import rcdataElements, spaceCharacters -spaceCharacters = "".join(spaceCharacters) - -SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) - - -class Filter(base.Filter): - """Collapses whitespace except in pre, textarea, and script elements""" - spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) - - def __iter__(self): - preserve = 0 - for token in base.Filter.__iter__(self): - type = token["type"] - if type == "StartTag" \ - and (preserve or token["name"] in self.spacePreserveElements): - preserve += 1 - - elif type == "EndTag" and preserve: - preserve -= 1 - - elif not preserve and type == "SpaceCharacters" and token["data"]: - # Test on token["data"] above to not introduce spaces where there were not - token["data"] = " " - - elif not preserve and type == "Characters": - token["data"] = collapse_spaces(token["data"]) - - yield token - - -def collapse_spaces(text): - return SPACES_REGEX.sub(' ', text) diff --git a/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/html5parser.py b/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/html5parser.py deleted file mode 100644 index ae41a13..0000000 --- a/venv/lib/python3.6/site-packages/pip-10.0.1-py3.6.egg/pip/_vendor/html5lib/html5parser.py +++ /dev/null @@ -1,2791 +0,0 @@ -from __future__ import absolute_import, division, unicode_literals -from pip._vendor.six import with_metaclass, viewkeys - -import types -from collections import OrderedDict - -from . import _inputstream -from . import _tokenizer - -from . import treebuilders -from .treebuilders.base import Marker - -from . import _utils -from .constants import ( - spaceCharacters, asciiUpper2Lower, - specialElements, headingElements, cdataElements, rcdataElements, - tokenTypes, tagTokenTypes, - namespaces, - htmlIntegrationPointElements, mathmlTextIntegrationPointElements, - adjustForeignAttributes as adjustForeignAttributesMap, - adjustMathMLAttributes, adjustSVGAttributes, - E, - _ReparseException -) - - -def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML document as a string or file-like object into a tree - - :arg doc: the document to parse as a string or file-like object - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import parse - >>> parse('

This is a doc

') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parse(doc, **kwargs) - - -def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML fragment as a string or file-like object into a tree - - :arg doc: the fragment to parse as a string or file-like object - - :arg container: the container context to parse the fragment in - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import parseFragment - >>> parseFragment('this is a fragment') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parseFragment(doc, container=container, **kwargs) - - -def method_decorator_metaclass(function): - class Decorated(type): - def __new__(meta, classname, bases, classDict): - for attributeName, attribute in classDict.items(): - if isinstance(attribute, types.FunctionType): - attribute = function(attribute) - - classDict[attributeName] = attribute - return type.__new__(meta, classname, bases, classDict) - return Decorated - - -class HTMLParser(object): - """HTML parser - - Generates a tree structure from a stream of (possibly malformed) HTML. - - """ - - def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): - """ - :arg tree: a treebuilder class controlling the type of tree that will be - returned. Built in treebuilders can be accessed through - html5lib.treebuilders.getTreeBuilder(treeType) - - :arg strict: raise an exception when a parse error is encountered - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :arg debug: whether or not to enable debug mode which logs things - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() # generates parser with etree builder - >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict - - """ - - # Raise an exception on the first error encountered - self.strict = strict - - if tree is None: - tree = treebuilders.getTreeBuilder("etree") - self.tree = tree(namespaceHTMLElements) - self.errors = [] - - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) - - def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): - - self.innerHTMLMode = innerHTML - self.container = container - self.scripting = scripting - self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) - self.reset() - - try: - self.mainLoop() - except _ReparseException: - self.reset() - self.mainLoop() - - def reset(self): - self.tree.reset() - self.firstStartTag = False - self.errors = [] - self.log = [] # only used with debug mode - # "quirks" / "limited quirks" / "no quirks" - self.compatMode = "no quirks" - - if self.innerHTMLMode: - self.innerHTML = self.container.lower() - - if self.innerHTML in cdataElements: - self.tokenizer.state = self.tokenizer.rcdataState - elif self.innerHTML in rcdataElements: - self.tokenizer.state = self.tokenizer.rawtextState - elif self.innerHTML == 'plaintext': - self.tokenizer.state = self.tokenizer.plaintextState - else: - # state already is data state - # self.tokenizer.state = self.tokenizer.dataState - pass - self.phase = self.phases["beforeHtml"] - self.phase.insertHtmlElement() - self.resetInsertionMode() - else: - self.innerHTML = False # pylint:disable=redefined-variable-type - self.phase = self.phases["initial"] - - self.lastPhase = None - - self.beforeRCDataPhase = None - - self.framesetOK = True - - @property - def documentEncoding(self): - """Name of the character encoding that was used to decode the input stream, or - :obj:`None` if that is not determined yet - - """ - if not hasattr(self, 'tokenizer'): - return None - return self.tokenizer.stream.charEncoding[0].name - - def isHTMLIntegrationPoint(self, element): - if (element.name == "annotation-xml" and - element.namespace == namespaces["mathml"]): - return ("encoding" in element.attributes and - element.attributes["encoding"].translate( - asciiUpper2Lower) in - ("text/html", "application/xhtml+xml")) - else: - return (element.namespace, element.name) in htmlIntegrationPointElements - - def isMathMLTextIntegrationPoint(self, element): - return (element.namespace, element.name) in mathmlTextIntegrationPointElements - - def mainLoop(self): - CharactersToken = tokenTypes["Characters"] - SpaceCharactersToken = tokenTypes["SpaceCharacters"] - StartTagToken = tokenTypes["StartTag"] - EndTagToken = tokenTypes["EndTag"] - CommentToken = tokenTypes["Comment"] - DoctypeToken = tokenTypes["Doctype"] - ParseErrorToken = tokenTypes["ParseError"] - - for token in self.normalizedTokens(): - prev_token = None - new_token = token - while new_token is not None: - prev_token = new_token - currentNode = self.tree.openElements[-1] if self.tree.openElements else None - currentNodeNamespace = currentNode.namespace if currentNode else None - currentNodeName = currentNode.name if currentNode else None - - type = new_token["type"] - - if type == ParseErrorToken: - self.parseError(new_token["data"], new_token.get("datavars", {})) - new_token = None - else: - if (len(self.tree.openElements) == 0 or - currentNodeNamespace == self.tree.defaultNamespace or - (self.isMathMLTextIntegrationPoint(currentNode) and - ((type == StartTagToken and - token["name"] not in frozenset(["mglyph", "malignmark"])) or - type in (CharactersToken, SpaceCharactersToken))) or - (currentNodeNamespace == namespaces["mathml"] and - currentNodeName == "annotation-xml" and - type == StartTagToken and - token["name"] == "svg") or - (self.isHTMLIntegrationPoint(currentNode) and - type in (StartTagToken, CharactersToken, SpaceCharactersToken))): - phase = self.phase - else: - phase = self.phases["inForeignContent"] - - if type == CharactersToken: - new_token = phase.processCharacters(new_token) - elif type == SpaceCharactersToken: - new_token = phase.processSpaceCharacters(new_token) - elif type == StartTagToken: - new_token = phase.processStartTag(new_token) - elif type == EndTagToken: - new_token = phase.processEndTag(new_token) - elif type == CommentToken: - new_token = phase.processComment(new_token) - elif type == DoctypeToken: - new_token = phase.processDoctype(new_token) - - if (type == StartTagToken and prev_token["selfClosing"] and - not prev_token["selfClosingAcknowledged"]): - self.parseError("non-void-element-with-trailing-solidus", - {"name": prev_token["name"]}) - - # When the loop finishes it's EOF - reprocess = True - phases = [] - while reprocess: - phases.append(self.phase) - reprocess = self.phase.processEOF() - if reprocess: - assert self.phase not in phases - - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - - def parse(self, stream, *args, **kwargs): - """Parse a HTML document into a well-formed tree - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element). - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parse('

This is a doc

') - - - """ - self._parse(stream, False, None, *args, **kwargs) - return self.tree.getDocument() - - def parseFragment(self, stream, *args, **kwargs): - """Parse a HTML fragment into a well-formed tree fragment - - :arg container: name of the element we're setting the innerHTML - property if set to None, default to 'div' - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parseFragment('this is a fragment') - - - """ - self._parse(stream, True, *args, **kwargs) - return self.tree.getFragment() - - def parseError(self, errorcode="XXX-undefined-error", datavars=None): - # XXX The idea is to make errorcode mandatory. - if datavars is None: - datavars = {} - self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) - if self.strict: - raise ParseError(E[errorcode] % datavars) - - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - - def adjustMathMLAttributes(self, token): - adjust_attributes(token, adjustMathMLAttributes) - - def adjustSVGAttributes(self, token): - adjust_attributes(token, adjustSVGAttributes) - - def adjustForeignAttributes(self, token): - adjust_attributes(token, adjustForeignAttributesMap) - - def reparseTokenNormal(self, token): - # pylint:disable=unused-argument - self.parser.phase() - - def resetInsertionMode(self): - # The name of this method is mostly historical. (It's also used in the - # specification.) - last = False - newModes = { - "select": "inSelect", - "td": "inCell", - "th": "inCell", - "tr": "inRow", - "tbody": "inTableBody", - "thead": "inTableBody", - "tfoot": "inTableBody", - "caption": "inCaption", - "colgroup": "inColumnGroup", - "table": "inTable", - "head": "inBody", - "body": "inBody", - "frameset": "inFrameset", - "html": "beforeHead" - } - for node in self.tree.openElements[::-1]: - nodeName = node.name - new_phase = None - if node == self.tree.openElements[0]: - assert self.innerHTML - last = True - nodeName = self.innerHTML - # Check for conditions that should only happen in the innerHTML - # case - if nodeName in ("select", "colgroup", "head", "html"): - assert self.innerHTML - - if not last and node.namespace != self.tree.defaultNamespace: - continue - - if nodeName in newModes: - new_phase = self.phases[newModes[nodeName]] - break - elif last: - new_phase = self.phases["inBody"] - break - - self.phase = new_phase - - def parseRCDataRawtext(self, token, contentType): - # Generic RCDATA/RAWTEXT Parsing algorithm - assert contentType in ("RAWTEXT", "RCDATA") - - self.tree.insertElement(token) - - if contentType == "RAWTEXT": - self.tokenizer.state = self.tokenizer.rawtextState - else: - self.tokenizer.state = self.tokenizer.rcdataState - - self.originalPhase = self.phase - - self.phase = self.phases["text"] - - -@_utils.memoize -def getPhases(debug): - def log(function): - """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) - - def wrapped(self, *args, **kwargs): - if function.__name__.startswith("process") and len(args) > 0: - token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise - if token['type'] in tagTokenTypes: - info["name"] = token['name'] - - self.parser.log.append((self.parser.tokenizer.state.__name__, - self.parser.phase.__class__.__name__, - self.__class__.__name__, - function.__name__, - info)) - return function(self, *args, **kwargs) - else: - return function(self, *args, **kwargs) - return wrapped - - def getMetaclass(use_metaclass, metaclass_func): - if use_metaclass: - return method_decorator_metaclass(metaclass_func) - else: - return type - - # pylint:disable=unused-argument - class Phase(with_metaclass(getMetaclass(debug, log))): - """Base class for helper object that implements each phase of processing - """ - - def __init__(self, parser, tree): - self.parser = parser - self.tree = tree - - def processEOF(self): - raise NotImplementedError - - def processComment(self, token): - # For most phases the following is correct. Where it's not it will be - # overridden. - self.tree.insertComment(token, self.tree.openElements[-1]) - - def processDoctype(self, token): - self.parser.parseError("unexpected-doctype") - - def processCharacters(self, token): - self.tree.insertText(token["data"]) - - def processSpaceCharacters(self, token): - self.tree.insertText(token["data"]) - - def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) - - def startTagHtml(self, token): - if not self.parser.firstStartTag and token["name"] == "html": - self.parser.parseError("non-html-root") - # XXX Need a check here to see if the first start tag token emitted is - # this token... If it's not, invoke self.parser.parseError(). - for attr, value in token["data"].items(): - if attr not in self.tree.openElements[0].attributes: - self.tree.openElements[0].attributes[attr] = value - self.parser.firstStartTag = False - - def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) - - class InitialPhase(Phase): - def processSpaceCharacters(self, token): - pass - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - correct = token["correct"] - - if (name != "html" or publicId is not None or - systemId is not None and systemId != "about:legacy-compat"): - self.parser.parseError("unknown-doctype") - - if publicId is None: - publicId = "" - - self.tree.insertDoctype(token) - - if publicId != "": - publicId = publicId.translate(asciiUpper2Lower) - - if (not correct or token["name"] != "html" or - publicId.startswith( - ("+//silmaril//dtd html pro v0r11 19970101//", - "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", - "-//as//dtd html 3.0 aswedit + extensions//", - "-//ietf//dtd html 2.0 level 1//", - "-//ietf//dtd html 2.0 level 2//", - "-//ietf//dtd html 2.0 strict level 1//", - "-//ietf//dtd html 2.0 strict level 2//", - "-//ietf//dtd html 2.0 strict//", - "-//ietf//dtd html 2.0//", - "-//ietf//dtd html 2.1e//", - "-//ietf//dtd html 3.0//", - "-//ietf//dtd html 3.2 final//", - "-//ietf//dtd html 3.2//", - "-//ietf//dtd html 3//", - "-//ietf//dtd html level 0//", - "-//ietf//dtd html level 1//", - "-//ietf//dtd html level 2//", - "-//ietf//dtd html level 3//", - "-//ietf//dtd html strict level 0//", - "-//ietf//dtd html strict level 1//", - "-//ietf//dtd html strict level 2//", - "-//ietf//dtd html strict level 3//", - "-//ietf//dtd html strict//", - "-//ietf//dtd html//", - "-//metrius//dtd metrius presentational//", - "-//microsoft//dtd internet explorer 2.0 html strict//", - "-//microsoft//dtd internet explorer 2.0 html//", - "-//microsoft//dtd internet explorer 2.0 tables//", - "-//microsoft//dtd internet explorer 3.0 html strict//", - "-//microsoft//dtd internet explorer 3.0 html//", - "-//microsoft//dtd internet explorer 3.0 tables//", - "-//netscape comm. corp.//dtd html//", - "-//netscape comm. corp.//dtd strict html//", - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", - "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", - "-//spyglass//dtd html 2.0 extended//", - "-//sq//dtd html 2.0 hotmetal + extensions//", - "-//sun microsystems corp.//dtd hotjava html//", - "-//sun microsystems corp.//dtd hotjava strict html//", - "-//w3c//dtd html 3 1995-03-24//", - "-//w3c//dtd html 3.2 draft//", - "-//w3c//dtd html 3.2 final//", - "-//w3c//dtd html 3.2//", - "-//w3c//dtd html 3.2s draft//", - "-//w3c//dtd html 4.0 frameset//", - "-//w3c//dtd html 4.0 transitional//", - "-//w3c//dtd html experimental 19960712//", - "-//w3c//dtd html experimental 970421//", - "-//w3c//dtd w3 html//", - "-//w3o//dtd w3 html 3.0//", - "-//webtechs//dtd mozilla html 2.0//", - "-//webtechs//dtd mozilla html//")) or - publicId in ("-//w3o//dtd w3 html strict 3.0//en//", - "-/w3c/dtd html 4.0 transitional/en", - "html") or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is None or - systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): - self.parser.compatMode = "quirks" - elif (publicId.startswith( - ("-//w3c//dtd xhtml 1.0 frameset//", - "-//w3c//dtd xhtml 1.0 transitional//")) or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is not None): - self.parser.compatMode = "limited quirks" - - self.parser.phase = self.parser.phases["beforeHtml"] - - def anythingElse(self): - self.parser.compatMode = "quirks" - self.parser.phase = self.parser.phases["beforeHtml"] - - def processCharacters(self, token): - self.parser.parseError("expected-doctype-but-got-chars") - self.anythingElse() - return token - - def processStartTag(self, token): - self.parser.parseError("expected-doctype-but-got-start-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEndTag(self, token): - self.parser.parseError("expected-doctype-but-got-end-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEOF(self): - self.parser.parseError("expected-doctype-but-got-eof") - self.anythingElse() - return True - - class BeforeHtmlPhase(Phase): - # helper methods - def insertHtmlElement(self): - self.tree.insertRoot(impliedTagToken("html", "StartTag")) - self.parser.phase = self.parser.phases["beforeHead"] - - # other - def processEOF(self): - self.insertHtmlElement() - return True - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.insertHtmlElement() - return token - - def processStartTag(self, token): - if token["name"] == "html": - self.parser.firstStartTag = True - self.insertHtmlElement() - return token - - def processEndTag(self, token): - if token["name"] not in ("head", "body", "html", "br"): - self.parser.parseError("unexpected-end-tag-before-html", - {"name": token["name"]}) - else: - self.insertHtmlElement() - return token - - class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.startTagHead(impliedTagToken("head", "StartTag")) - return True - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.tree.insertElement(token) - self.tree.headPointer = self.tree.openElements[-1] - self.parser.phase = self.parser.phases["inHead"] - - def startTagOther(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagImplyHead(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagOther(self, token): - self.parser.parseError("end-tag-after-implied-root", - {"name": token["name"]}) - - class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther - - # the real thing - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.parser.parseError("two-heads-are-not-better-than-one") - - def startTagBaseLinkCommand(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - def startTagMeta(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - attributes = token["data"] - if self.parser.tokenizer.stream.charEncoding[1] == "tentative": - if "charset" in attributes: - self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) - elif ("content" in attributes and - "http-equiv" in attributes and - attributes["http-equiv"].lower() == "content-type"): - # Encoding it as UTF-8 here is a hack, as really we should pass - # the abstract Unicode string, and just use the - # ContentAttrParser on that, but using UTF-8 allows all chars - # to be encoded and as a ASCII-superset works. - data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) - parser = _inputstream.ContentAttrParser(data) - codec = parser.parse() - self.parser.tokenizer.stream.changeEncoding(codec) - - def startTagTitle(self, token): - self.parser.parseRCDataRawtext(token, "RCDATA") - - def startTagNoFramesStyle(self, token): - # Need to decide whether to implement the scripting-disabled case - self.parser.parseRCDataRawtext(token, "RAWTEXT") - - def startTagNoscript(self, token): - if self.parser.scripting: - self.parser.parseRCDataRawtext(token, "RAWTEXT") - else: - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inHeadNoscript"] - - def startTagScript(self, token): - self.tree.insertElement(token) - self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState - self.parser.originalPhase = self.parser.phase - self.parser.phase = self.parser.phases["text"] - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHead(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "head", "Expected head got %s" % node.name - self.parser.phase = self.parser.phases["afterHead"] - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.endTagHead(impliedTagToken("head")) - - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.parser.parseError("eof-in-head-noscript") - self.anythingElse() - return True - - def processComment(self, token): - return self.parser.phases["inHead"].processComment(token) - - def processCharacters(self, token): - self.parser.parseError("char-in-head-noscript") - self.anythingElse() - return token - - def processSpaceCharacters(self, token): - return self.parser.phases["inHead"].processSpaceCharacters(token) - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBaseLinkCommand(self, token): - return self.parser.phases["inHead"].processStartTag(token) - - def startTagHeadNoscript(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagNoscript(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "noscript", "Expected noscript got %s" % node.name - self.parser.phase = self.parser.phases["inHead"] - - def endTagBr(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - # Caller must raise parse error first! - self.endTagNoscript(impliedTagToken("noscript")) - - class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBody(self, token): - self.parser.framesetOK = False - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inBody"] - - def startTagFrameset(self, token): - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inFrameset"] - - def startTagFromHead(self, token): - self.parser.parseError("unexpected-start-tag-out-of-my-head", - {"name": token["name"]}) - self.tree.openElements.append(self.tree.headPointer) - self.parser.phases["inHead"].processStartTag(token) - for node in self.tree.openElements[::-1]: - if node.name == "head": - self.tree.openElements.remove(node) - break - - def startTagHead(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.tree.insertElement(impliedTagToken("body", "StartTag")) - self.parser.phase = self.parser.phases["inBody"] - self.parser.framesetOK = True - - class InBodyPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody - # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - # Set this to the default handler - self.processSpaceCharacters = self.processSpaceCharactersNonPre - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def isMatchingFormattingElement(self, node1, node2): - return (node1.name == node2.name and - node1.namespace == node2.namespace and - node1.attributes == node2.attributes) - - # helper - def addFormattingElement(self, token): - self.tree.insertElement(token) - element = self.tree.openElements[-1] - - matchingElements = [] - for node in self.tree.activeFormattingElements[::-1]: - if node is Marker: - break - elif self.isMatchingFormattingElement(node, element): - matchingElements.append(node) - - assert len(matchingElements) <= 3 - if len(matchingElements) == 3: - self.tree.activeFormattingElements.remove(matchingElements[-1]) - self.tree.activeFormattingElements.append(element) - - # the real deal - def processEOF(self): - allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", - "tfoot", "th", "thead", "tr", "body", - "html")) - for node in self.tree.openElements[::-1]: - if node.name not in allowed_elements: - self.parser.parseError("expected-closing-tag-but-got-eof") - break - # Stop parsing - - def processSpaceCharactersDropNewline(self, token): - # Sometimes (start of
, , and 
-  
-
-
- The debugger caught an exception in your WSGI application. You can now - look at the traceback which led to the error. - If you enable JavaScript you can also use additional features such as code - execution (if the evalex feature is enabled), automatic pasting of the - exceptions and much more. -
-""" - + FOOTER - + """ - -""" -) - -CONSOLE_HTML = ( - HEADER - + u"""\ -

Interactive Console

-
-In this console you can execute Python expressions in the context of the -application. The initial namespace was created by the debugger automatically. -
-
The Console requires JavaScript.
-""" - + FOOTER -) - -SUMMARY_HTML = u"""\ -
- %(title)s -
    %(frames)s
- %(description)s -
-""" - -FRAME_HTML = u"""\ -
-

File "%(filename)s", - line %(lineno)s, - in %(function_name)s

-
%(lines)s
-
-""" - -SOURCE_LINE_HTML = u"""\ - - %(lineno)s - %(code)s - -""" - - -def render_console_html(secret, evalex_trusted=True): - return CONSOLE_HTML % { - "evalex": "true", - "evalex_trusted": "true" if evalex_trusted else "false", - "console": "true", - "title": "Console", - "secret": secret, - "traceback_id": -1, - } - - -def get_current_traceback( - ignore_system_exceptions=False, show_hidden_frames=False, skip=0 -): - """Get the current exception info as `Traceback` object. Per default - calling this method will reraise system exceptions such as generator exit, - system exit or others. This behavior can be disabled by passing `False` - to the function as first parameter. - """ - exc_type, exc_value, tb = sys.exc_info() - if ignore_system_exceptions and exc_type in system_exceptions: - reraise(exc_type, exc_value, tb) - for _ in range_type(skip): - if tb.tb_next is None: - break - tb = tb.tb_next - tb = Traceback(exc_type, exc_value, tb) - if not show_hidden_frames: - tb.filter_hidden_frames() - return tb - - -class Line(object): - """Helper for the source renderer.""" - - __slots__ = ("lineno", "code", "in_frame", "current") - - def __init__(self, lineno, code): - self.lineno = lineno - self.code = code - self.in_frame = False - self.current = False - - @property - def classes(self): - rv = ["line"] - if self.in_frame: - rv.append("in-frame") - if self.current: - rv.append("current") - return rv - - def render(self): - return SOURCE_LINE_HTML % { - "classes": u" ".join(self.classes), - "lineno": self.lineno, - "code": escape(self.code), - } - - -class Traceback(object): - """Wraps a traceback.""" - - def __init__(self, exc_type, exc_value, tb): - self.exc_type = exc_type - self.exc_value = exc_value - self.tb = tb - - exception_type = exc_type.__name__ - if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}: - exception_type = exc_type.__module__ + "." + exception_type - self.exception_type = exception_type - - self.groups = [] - memo = set() - while True: - self.groups.append(Group(exc_type, exc_value, tb)) - memo.add(id(exc_value)) - if PY2: - break - exc_value = exc_value.__cause__ or exc_value.__context__ - if exc_value is None or id(exc_value) in memo: - break - exc_type = type(exc_value) - tb = exc_value.__traceback__ - self.groups.reverse() - self.frames = [frame for group in self.groups for frame in group.frames] - - def filter_hidden_frames(self): - """Remove the frames according to the paste spec.""" - for group in self.groups: - group.filter_hidden_frames() - - self.frames[:] = [frame for group in self.groups for frame in group.frames] - - @property - def is_syntax_error(self): - """Is it a syntax error?""" - return isinstance(self.exc_value, SyntaxError) - - @property - def exception(self): - """String representation of the final exception.""" - return self.groups[-1].exception - - def log(self, logfile=None): - """Log the ASCII traceback into a file object.""" - if logfile is None: - logfile = sys.stderr - tb = self.plaintext.rstrip() + u"\n" - logfile.write(to_native(tb, "utf-8", "replace")) - - def paste(self): - """Create a paste and return the paste id.""" - data = json.dumps( - { - "description": "Werkzeug Internal Server Error", - "public": False, - "files": {"traceback.txt": {"content": self.plaintext}}, - } - ).encode("utf-8") - try: - from urllib2 import urlopen - except ImportError: - from urllib.request import urlopen - rv = urlopen("https://api.github.com/gists", data=data) - resp = json.loads(rv.read().decode("utf-8")) - rv.close() - return {"url": resp["html_url"], "id": resp["id"]} - - def render_summary(self, include_title=True): - """Render the traceback for the interactive console.""" - title = "" - classes = ["traceback"] - if not self.frames: - classes.append("noframe-traceback") - frames = [] - else: - library_frames = sum(frame.is_library for frame in self.frames) - mark_lib = 0 < library_frames < len(self.frames) - frames = [group.render(mark_lib=mark_lib) for group in self.groups] - - if include_title: - if self.is_syntax_error: - title = u"Syntax Error" - else: - title = u"Traceback (most recent call last):" - - if self.is_syntax_error: - description_wrapper = u"
%s
" - else: - description_wrapper = u"
%s
" - - return SUMMARY_HTML % { - "classes": u" ".join(classes), - "title": u"

%s

" % title if title else u"", - "frames": u"\n".join(frames), - "description": description_wrapper % escape(self.exception), - } - - def render_full(self, evalex=False, secret=None, evalex_trusted=True): - """Render the Full HTML page with the traceback info.""" - exc = escape(self.exception) - return PAGE_HTML % { - "evalex": "true" if evalex else "false", - "evalex_trusted": "true" if evalex_trusted else "false", - "console": "false", - "title": exc, - "exception": exc, - "exception_type": escape(self.exception_type), - "summary": self.render_summary(include_title=False), - "plaintext": escape(self.plaintext), - "plaintext_cs": re.sub("-{2,}", "-", self.plaintext), - "traceback_id": self.id, - "secret": secret, - } - - @cached_property - def plaintext(self): - return u"\n".join([group.render_text() for group in self.groups]) - - @property - def id(self): - return id(self) - - -class Group(object): - """A group of frames for an exception in a traceback. On Python 3, - if the exception has a ``__cause__`` or ``__context__``, there are - multiple exception groups. - """ - - def __init__(self, exc_type, exc_value, tb): - self.exc_type = exc_type - self.exc_value = exc_value - self.info = None - if not PY2: - if exc_value.__cause__ is not None: - self.info = ( - u"The above exception was the direct cause of the" - u" following exception" - ) - elif exc_value.__context__ is not None: - self.info = ( - u"During handling of the above exception, another" - u" exception occurred" - ) - - self.frames = [] - while tb is not None: - self.frames.append(Frame(exc_type, exc_value, tb)) - tb = tb.tb_next - - def filter_hidden_frames(self): - new_frames = [] - hidden = False - - for frame in self.frames: - hide = frame.hide - if hide in ("before", "before_and_this"): - new_frames = [] - hidden = False - if hide == "before_and_this": - continue - elif hide in ("reset", "reset_and_this"): - hidden = False - if hide == "reset_and_this": - continue - elif hide in ("after", "after_and_this"): - hidden = True - if hide == "after_and_this": - continue - elif hide or hidden: - continue - new_frames.append(frame) - - # if we only have one frame and that frame is from the codeop - # module, remove it. - if len(new_frames) == 1 and self.frames[0].module == "codeop": - del self.frames[:] - - # if the last frame is missing something went terrible wrong :( - elif self.frames[-1] in new_frames: - self.frames[:] = new_frames - - @property - def exception(self): - """String representation of the exception.""" - buf = traceback.format_exception_only(self.exc_type, self.exc_value) - rv = "".join(buf).strip() - return to_unicode(rv, "utf-8", "replace") - - def render(self, mark_lib=True): - out = [] - if self.info is not None: - out.append(u'
  • %s:
    ' % self.info) - for frame in self.frames: - out.append( - u"%s" - % ( - u' title="%s"' % escape(frame.info) if frame.info else u"", - frame.render(mark_lib=mark_lib), - ) - ) - return u"\n".join(out) - - def render_text(self): - out = [] - if self.info is not None: - out.append(u"\n%s:\n" % self.info) - out.append(u"Traceback (most recent call last):") - for frame in self.frames: - out.append(frame.render_text()) - out.append(self.exception) - return u"\n".join(out) - - -class Frame(object): - """A single frame in a traceback.""" - - def __init__(self, exc_type, exc_value, tb): - self.lineno = tb.tb_lineno - self.function_name = tb.tb_frame.f_code.co_name - self.locals = tb.tb_frame.f_locals - self.globals = tb.tb_frame.f_globals - - fn = inspect.getsourcefile(tb) or inspect.getfile(tb) - if fn[-4:] in (".pyo", ".pyc"): - fn = fn[:-1] - # if it's a file on the file system resolve the real filename. - if os.path.isfile(fn): - fn = os.path.realpath(fn) - self.filename = to_unicode(fn, get_filesystem_encoding()) - self.module = self.globals.get("__name__") - self.loader = self.globals.get("__loader__") - self.code = tb.tb_frame.f_code - - # support for paste's traceback extensions - self.hide = self.locals.get("__traceback_hide__", False) - info = self.locals.get("__traceback_info__") - if info is not None: - info = to_unicode(info, "utf-8", "replace") - self.info = info - - def render(self, mark_lib=True): - """Render a single frame in a traceback.""" - return FRAME_HTML % { - "id": self.id, - "filename": escape(self.filename), - "lineno": self.lineno, - "function_name": escape(self.function_name), - "lines": self.render_line_context(), - "library": "library" if mark_lib and self.is_library else "", - } - - @cached_property - def is_library(self): - return any( - self.filename.startswith(path) for path in sysconfig.get_paths().values() - ) - - def render_text(self): - return u' File "%s", line %s, in %s\n %s' % ( - self.filename, - self.lineno, - self.function_name, - self.current_line.strip(), - ) - - def render_line_context(self): - before, current, after = self.get_context_lines() - rv = [] - - def render_line(line, cls): - line = line.expandtabs().rstrip() - stripped_line = line.strip() - prefix = len(line) - len(stripped_line) - rv.append( - '
    %s%s
    ' - % (cls, " " * prefix, escape(stripped_line) or " ") - ) - - for line in before: - render_line(line, "before") - render_line(current, "current") - for line in after: - render_line(line, "after") - - return "\n".join(rv) - - def get_annotated_lines(self): - """Helper function that returns lines with extra information.""" - lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)] - - # find function definition and mark lines - if hasattr(self.code, "co_firstlineno"): - lineno = self.code.co_firstlineno - 1 - while lineno > 0: - if _funcdef_re.match(lines[lineno].code): - break - lineno -= 1 - try: - offset = len(inspect.getblock([x.code + "\n" for x in lines[lineno:]])) - except TokenError: - offset = 0 - for line in lines[lineno : lineno + offset]: - line.in_frame = True - - # mark current line - try: - lines[self.lineno - 1].current = True - except IndexError: - pass - - return lines - - def eval(self, code, mode="single"): - """Evaluate code in the context of the frame.""" - if isinstance(code, string_types): - if PY2 and isinstance(code, text_type): # noqa - code = UTF8_COOKIE + code.encode("utf-8") - code = compile(code, "", mode) - return eval(code, self.globals, self.locals) - - @cached_property - def sourcelines(self): - """The sourcecode of the file as list of unicode strings.""" - # get sourcecode from loader or file - source = None - if self.loader is not None: - try: - if hasattr(self.loader, "get_source"): - source = self.loader.get_source(self.module) - elif hasattr(self.loader, "get_source_by_code"): - source = self.loader.get_source_by_code(self.code) - except Exception: - # we munch the exception so that we don't cause troubles - # if the loader is broken. - pass - - if source is None: - try: - f = open(to_native(self.filename, get_filesystem_encoding()), mode="rb") - except IOError: - return [] - try: - source = f.read() - finally: - f.close() - - # already unicode? return right away - if isinstance(source, text_type): - return source.splitlines() - - # yes. it should be ascii, but we don't want to reject too many - # characters in the debugger if something breaks - charset = "utf-8" - if source.startswith(UTF8_COOKIE): - source = source[3:] - else: - for idx, match in enumerate(_line_re.finditer(source)): - match = _coding_re.search(match.group()) - if match is not None: - charset = match.group(1) - break - if idx > 1: - break - - # on broken cookies we fall back to utf-8 too - charset = to_native(charset) - try: - codecs.lookup(charset) - except LookupError: - charset = "utf-8" - - return source.decode(charset, "replace").splitlines() - - def get_context_lines(self, context=5): - before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1] - past = self.sourcelines[self.lineno : self.lineno + context] - return (before, self.current_line, past) - - @property - def current_line(self): - try: - return self.sourcelines[self.lineno - 1] - except IndexError: - return u"" - - @cached_property - def console(self): - return Console(self.globals, self.locals) - - @property - def id(self): - return id(self) diff --git a/venv/lib/python3.6/site-packages/werkzeug/exceptions.py b/venv/lib/python3.6/site-packages/werkzeug/exceptions.py deleted file mode 100644 index bfd20dc..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/exceptions.py +++ /dev/null @@ -1,786 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.exceptions - ~~~~~~~~~~~~~~~~~~~ - - This module implements a number of Python exceptions you can raise from - within your views to trigger a standard non-200 response. - - - Usage Example - ------------- - - :: - - from werkzeug.wrappers import BaseRequest - from werkzeug.wsgi import responder - from werkzeug.exceptions import HTTPException, NotFound - - def view(request): - raise NotFound() - - @responder - def application(environ, start_response): - request = BaseRequest(environ) - try: - return view(request) - except HTTPException as e: - return e - - - As you can see from this example those exceptions are callable WSGI - applications. Because of Python 2.4 compatibility those do not extend - from the response objects but only from the python exception class. - - As a matter of fact they are not Werkzeug response objects. However you - can get a response object by calling ``get_response()`` on a HTTP - exception. - - Keep in mind that you have to pass an environment to ``get_response()`` - because some errors fetch additional information from the WSGI - environment. - - If you want to hook in a different exception page to say, a 404 status - code, you can add a second except for a specific subclass of an error:: - - @responder - def application(environ, start_response): - request = BaseRequest(environ) - try: - return view(request) - except NotFound, e: - return not_found(request) - except HTTPException, e: - return e - - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import sys - -import werkzeug - -# Because of bootstrapping reasons we need to manually patch ourselves -# onto our parent module. -werkzeug.exceptions = sys.modules[__name__] - -from ._compat import implements_to_string -from ._compat import integer_types -from ._compat import iteritems -from ._compat import text_type -from ._internal import _get_environ -from .wrappers import Response - - -@implements_to_string -class HTTPException(Exception): - """Baseclass for all HTTP exceptions. This exception can be called as WSGI - application to render a default error page or you can catch the subclasses - of it independently and render nicer error messages. - """ - - code = None - description = None - - def __init__(self, description=None, response=None): - super(HTTPException, self).__init__() - if description is not None: - self.description = description - self.response = response - - @classmethod - def wrap(cls, exception, name=None): - """Create an exception that is a subclass of the calling HTTP - exception and the ``exception`` argument. - - The first argument to the class will be passed to the - wrapped ``exception``, the rest to the HTTP exception. If - ``e.args`` is not empty and ``e.show_exception`` is ``True``, - the wrapped exception message is added to the HTTP error - description. - - .. versionchanged:: 0.15.5 - The ``show_exception`` attribute controls whether the - description includes the wrapped exception message. - - .. versionchanged:: 0.15.0 - The description includes the wrapped exception message. - """ - - class newcls(cls, exception): - _description = cls.description - show_exception = False - - def __init__(self, arg=None, *args, **kwargs): - super(cls, self).__init__(*args, **kwargs) - - if arg is None: - exception.__init__(self) - else: - exception.__init__(self, arg) - - @property - def description(self): - if self.show_exception: - return "{}\n{}: {}".format( - self._description, exception.__name__, exception.__str__(self) - ) - - return self._description - - @description.setter - def description(self, value): - self._description = value - - newcls.__module__ = sys._getframe(1).f_globals.get("__name__") - name = name or cls.__name__ + exception.__name__ - newcls.__name__ = newcls.__qualname__ = name - return newcls - - @property - def name(self): - """The status name.""" - return HTTP_STATUS_CODES.get(self.code, "Unknown Error") - - def get_description(self, environ=None): - """Get the description.""" - return u"

    %s

    " % escape(self.description).replace("\n", "
    ") - - def get_body(self, environ=None): - """Get the HTML body.""" - return text_type( - ( - u'\n' - u"%(code)s %(name)s\n" - u"

    %(name)s

    \n" - u"%(description)s\n" - ) - % { - "code": self.code, - "name": escape(self.name), - "description": self.get_description(environ), - } - ) - - def get_headers(self, environ=None): - """Get a list of headers.""" - return [("Content-Type", "text/html")] - - def get_response(self, environ=None): - """Get a response object. If one was passed to the exception - it's returned directly. - - :param environ: the optional environ for the request. This - can be used to modify the response depending - on how the request looked like. - :return: a :class:`Response` object or a subclass thereof. - """ - if self.response is not None: - return self.response - if environ is not None: - environ = _get_environ(environ) - headers = self.get_headers(environ) - return Response(self.get_body(environ), self.code, headers) - - def __call__(self, environ, start_response): - """Call the exception as WSGI application. - - :param environ: the WSGI environment. - :param start_response: the response callable provided by the WSGI - server. - """ - response = self.get_response(environ) - return response(environ, start_response) - - def __str__(self): - code = self.code if self.code is not None else "???" - return "%s %s: %s" % (code, self.name, self.description) - - def __repr__(self): - code = self.code if self.code is not None else "???" - return "<%s '%s: %s'>" % (self.__class__.__name__, code, self.name) - - -class BadRequest(HTTPException): - """*400* `Bad Request` - - Raise if the browser sends something to the application the application - or server cannot handle. - """ - - code = 400 - description = ( - "The browser (or proxy) sent a request that this server could " - "not understand." - ) - - -class ClientDisconnected(BadRequest): - """Internal exception that is raised if Werkzeug detects a disconnected - client. Since the client is already gone at that point attempting to - send the error message to the client might not work and might ultimately - result in another exception in the server. Mainly this is here so that - it is silenced by default as far as Werkzeug is concerned. - - Since disconnections cannot be reliably detected and are unspecified - by WSGI to a large extent this might or might not be raised if a client - is gone. - - .. versionadded:: 0.8 - """ - - -class SecurityError(BadRequest): - """Raised if something triggers a security error. This is otherwise - exactly like a bad request error. - - .. versionadded:: 0.9 - """ - - -class BadHost(BadRequest): - """Raised if the submitted host is badly formatted. - - .. versionadded:: 0.11.2 - """ - - -class Unauthorized(HTTPException): - """*401* ``Unauthorized`` - - Raise if the user is not authorized to access a resource. - - The ``www_authenticate`` argument should be used to set the - ``WWW-Authenticate`` header. This is used for HTTP basic auth and - other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate` - to create correctly formatted values. Strictly speaking a 401 - response is invalid if it doesn't provide at least one value for - this header, although real clients typically don't care. - - :param description: Override the default message used for the body - of the response. - :param www-authenticate: A single value, or list of values, for the - WWW-Authenticate header. - - .. versionchanged:: 0.15.3 - If the ``www_authenticate`` argument is not set, the - ``WWW-Authenticate`` header is not set. - - .. versionchanged:: 0.15.3 - The ``response`` argument was restored. - - .. versionchanged:: 0.15.1 - ``description`` was moved back as the first argument, restoring - its previous position. - - .. versionchanged:: 0.15.0 - ``www_authenticate`` was added as the first argument, ahead of - ``description``. - """ - - code = 401 - description = ( - "The server could not verify that you are authorized to access" - " the URL requested. You either supplied the wrong credentials" - " (e.g. a bad password), or your browser doesn't understand" - " how to supply the credentials required." - ) - - def __init__(self, description=None, response=None, www_authenticate=None): - HTTPException.__init__(self, description, response) - - if www_authenticate is not None: - if not isinstance(www_authenticate, (tuple, list)): - www_authenticate = (www_authenticate,) - - self.www_authenticate = www_authenticate - - def get_headers(self, environ=None): - headers = HTTPException.get_headers(self, environ) - if self.www_authenticate: - headers.append( - ("WWW-Authenticate", ", ".join([str(x) for x in self.www_authenticate])) - ) - return headers - - -class Forbidden(HTTPException): - """*403* `Forbidden` - - Raise if the user doesn't have the permission for the requested resource - but was authenticated. - """ - - code = 403 - description = ( - "You don't have the permission to access the requested" - " resource. It is either read-protected or not readable by the" - " server." - ) - - -class NotFound(HTTPException): - """*404* `Not Found` - - Raise if a resource does not exist and never existed. - """ - - code = 404 - description = ( - "The requested URL was not found on the server. If you entered" - " the URL manually please check your spelling and try again." - ) - - -class MethodNotAllowed(HTTPException): - """*405* `Method Not Allowed` - - Raise if the server used a method the resource does not handle. For - example `POST` if the resource is view only. Especially useful for REST. - - The first argument for this exception should be a list of allowed methods. - Strictly speaking the response would be invalid if you don't provide valid - methods in the header which you can do with that list. - """ - - code = 405 - description = "The method is not allowed for the requested URL." - - def __init__(self, valid_methods=None, description=None): - """Takes an optional list of valid http methods - starting with werkzeug 0.3 the list will be mandatory.""" - HTTPException.__init__(self, description) - self.valid_methods = valid_methods - - def get_headers(self, environ=None): - headers = HTTPException.get_headers(self, environ) - if self.valid_methods: - headers.append(("Allow", ", ".join(self.valid_methods))) - return headers - - -class NotAcceptable(HTTPException): - """*406* `Not Acceptable` - - Raise if the server can't return any content conforming to the - `Accept` headers of the client. - """ - - code = 406 - - description = ( - "The resource identified by the request is only capable of" - " generating response entities which have content" - " characteristics not acceptable according to the accept" - " headers sent in the request." - ) - - -class RequestTimeout(HTTPException): - """*408* `Request Timeout` - - Raise to signalize a timeout. - """ - - code = 408 - description = ( - "The server closed the network connection because the browser" - " didn't finish the request within the specified time." - ) - - -class Conflict(HTTPException): - """*409* `Conflict` - - Raise to signal that a request cannot be completed because it conflicts - with the current state on the server. - - .. versionadded:: 0.7 - """ - - code = 409 - description = ( - "A conflict happened while processing the request. The" - " resource might have been modified while the request was being" - " processed." - ) - - -class Gone(HTTPException): - """*410* `Gone` - - Raise if a resource existed previously and went away without new location. - """ - - code = 410 - description = ( - "The requested URL is no longer available on this server and" - " there is no forwarding address. If you followed a link from a" - " foreign page, please contact the author of this page." - ) - - -class LengthRequired(HTTPException): - """*411* `Length Required` - - Raise if the browser submitted data but no ``Content-Length`` header which - is required for the kind of processing the server does. - """ - - code = 411 - description = ( - "A request with this method requires a valid Content-" - "Length header." - ) - - -class PreconditionFailed(HTTPException): - """*412* `Precondition Failed` - - Status code used in combination with ``If-Match``, ``If-None-Match``, or - ``If-Unmodified-Since``. - """ - - code = 412 - description = ( - "The precondition on the request for the URL failed positive evaluation." - ) - - -class RequestEntityTooLarge(HTTPException): - """*413* `Request Entity Too Large` - - The status code one should return if the data submitted exceeded a given - limit. - """ - - code = 413 - description = "The data value transmitted exceeds the capacity limit." - - -class RequestURITooLarge(HTTPException): - """*414* `Request URI Too Large` - - Like *413* but for too long URLs. - """ - - code = 414 - description = ( - "The length of the requested URL exceeds the capacity limit for" - " this server. The request cannot be processed." - ) - - -class UnsupportedMediaType(HTTPException): - """*415* `Unsupported Media Type` - - The status code returned if the server is unable to handle the media type - the client transmitted. - """ - - code = 415 - description = ( - "The server does not support the media type transmitted in the request." - ) - - -class RequestedRangeNotSatisfiable(HTTPException): - """*416* `Requested Range Not Satisfiable` - - The client asked for an invalid part of the file. - - .. versionadded:: 0.7 - """ - - code = 416 - description = "The server cannot provide the requested range." - - def __init__(self, length=None, units="bytes", description=None): - """Takes an optional `Content-Range` header value based on ``length`` - parameter. - """ - HTTPException.__init__(self, description) - self.length = length - self.units = units - - def get_headers(self, environ=None): - headers = HTTPException.get_headers(self, environ) - if self.length is not None: - headers.append(("Content-Range", "%s */%d" % (self.units, self.length))) - return headers - - -class ExpectationFailed(HTTPException): - """*417* `Expectation Failed` - - The server cannot meet the requirements of the Expect request-header. - - .. versionadded:: 0.7 - """ - - code = 417 - description = "The server could not meet the requirements of the Expect header" - - -class ImATeapot(HTTPException): - """*418* `I'm a teapot` - - The server should return this if it is a teapot and someone attempted - to brew coffee with it. - - .. versionadded:: 0.7 - """ - - code = 418 - description = "This server is a teapot, not a coffee machine" - - -class UnprocessableEntity(HTTPException): - """*422* `Unprocessable Entity` - - Used if the request is well formed, but the instructions are otherwise - incorrect. - """ - - code = 422 - description = ( - "The request was well-formed but was unable to be followed due" - " to semantic errors." - ) - - -class Locked(HTTPException): - """*423* `Locked` - - Used if the resource that is being accessed is locked. - """ - - code = 423 - description = "The resource that is being accessed is locked." - - -class FailedDependency(HTTPException): - """*424* `Failed Dependency` - - Used if the method could not be performed on the resource - because the requested action depended on another action and that action failed. - """ - - code = 424 - description = ( - "The method could not be performed on the resource because the" - " requested action depended on another action and that action" - " failed." - ) - - -class PreconditionRequired(HTTPException): - """*428* `Precondition Required` - - The server requires this request to be conditional, typically to prevent - the lost update problem, which is a race condition between two or more - clients attempting to update a resource through PUT or DELETE. By requiring - each client to include a conditional header ("If-Match" or "If-Unmodified- - Since") with the proper value retained from a recent GET request, the - server ensures that each client has at least seen the previous revision of - the resource. - """ - - code = 428 - description = ( - "This request is required to be conditional; try using" - ' "If-Match" or "If-Unmodified-Since".' - ) - - -class TooManyRequests(HTTPException): - """*429* `Too Many Requests` - - The server is limiting the rate at which this user receives responses, and - this request exceeds that rate. (The server may use any convenient method - to identify users and their request rates). The server may include a - "Retry-After" header to indicate how long the user should wait before - retrying. - """ - - code = 429 - description = "This user has exceeded an allotted request count. Try again later." - - -class RequestHeaderFieldsTooLarge(HTTPException): - """*431* `Request Header Fields Too Large` - - The server refuses to process the request because the header fields are too - large. One or more individual fields may be too large, or the set of all - headers is too large. - """ - - code = 431 - description = "One or more header fields exceeds the maximum size." - - -class UnavailableForLegalReasons(HTTPException): - """*451* `Unavailable For Legal Reasons` - - This status code indicates that the server is denying access to the - resource as a consequence of a legal demand. - """ - - code = 451 - description = "Unavailable for legal reasons." - - -class InternalServerError(HTTPException): - """*500* `Internal Server Error` - - Raise if an internal server error occurred. This is a good fallback if an - unknown error occurred in the dispatcher. - """ - - code = 500 - description = ( - "The server encountered an internal error and was unable to" - " complete your request. Either the server is overloaded or" - " there is an error in the application." - ) - - -class NotImplemented(HTTPException): - """*501* `Not Implemented` - - Raise if the application does not support the action requested by the - browser. - """ - - code = 501 - description = "The server does not support the action requested by the browser." - - -class BadGateway(HTTPException): - """*502* `Bad Gateway` - - If you do proxying in your application you should return this status code - if you received an invalid response from the upstream server it accessed - in attempting to fulfill the request. - """ - - code = 502 - description = ( - "The proxy server received an invalid response from an upstream server." - ) - - -class ServiceUnavailable(HTTPException): - """*503* `Service Unavailable` - - Status code you should return if a service is temporarily unavailable. - """ - - code = 503 - description = ( - "The server is temporarily unable to service your request due" - " to maintenance downtime or capacity problems. Please try" - " again later." - ) - - -class GatewayTimeout(HTTPException): - """*504* `Gateway Timeout` - - Status code you should return if a connection to an upstream server - times out. - """ - - code = 504 - description = "The connection to an upstream server timed out." - - -class HTTPVersionNotSupported(HTTPException): - """*505* `HTTP Version Not Supported` - - The server does not support the HTTP protocol version used in the request. - """ - - code = 505 - description = ( - "The server does not support the HTTP protocol version used in the request." - ) - - -default_exceptions = {} -__all__ = ["HTTPException"] - - -def _find_exceptions(): - for _name, obj in iteritems(globals()): - try: - is_http_exception = issubclass(obj, HTTPException) - except TypeError: - is_http_exception = False - if not is_http_exception or obj.code is None: - continue - __all__.append(obj.__name__) - old_obj = default_exceptions.get(obj.code, None) - if old_obj is not None and issubclass(obj, old_obj): - continue - default_exceptions[obj.code] = obj - - -_find_exceptions() -del _find_exceptions - - -class Aborter(object): - """When passed a dict of code -> exception items it can be used as - callable that raises exceptions. If the first argument to the - callable is an integer it will be looked up in the mapping, if it's - a WSGI application it will be raised in a proxy exception. - - The rest of the arguments are forwarded to the exception constructor. - """ - - def __init__(self, mapping=None, extra=None): - if mapping is None: - mapping = default_exceptions - self.mapping = dict(mapping) - if extra is not None: - self.mapping.update(extra) - - def __call__(self, code, *args, **kwargs): - if not args and not kwargs and not isinstance(code, integer_types): - raise HTTPException(response=code) - if code not in self.mapping: - raise LookupError("no exception for %r" % code) - raise self.mapping[code](*args, **kwargs) - - -def abort(status, *args, **kwargs): - """Raises an :py:exc:`HTTPException` for the given status code or WSGI - application:: - - abort(404) # 404 Not Found - abort(Response('Hello World')) - - Can be passed a WSGI application or a status code. If a status code is - given it's looked up in the list of exceptions and will raise that - exception, if passed a WSGI application it will wrap it in a proxy WSGI - exception and raise that:: - - abort(404) - abort(Response('Hello World')) - - """ - return _aborter(status, *args, **kwargs) - - -_aborter = Aborter() - - -#: An exception that is used to signal both a :exc:`KeyError` and a -#: :exc:`BadRequest`. Used by many of the datastructures. -BadRequestKeyError = BadRequest.wrap(KeyError) - -# imported here because of circular dependencies of werkzeug.utils -from .http import HTTP_STATUS_CODES -from .utils import escape diff --git a/venv/lib/python3.6/site-packages/werkzeug/filesystem.py b/venv/lib/python3.6/site-packages/werkzeug/filesystem.py deleted file mode 100644 index d016cae..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/filesystem.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.filesystem - ~~~~~~~~~~~~~~~~~~~ - - Various utilities for the local filesystem. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import codecs -import sys -import warnings - -# We do not trust traditional unixes. -has_likely_buggy_unicode_filesystem = ( - sys.platform.startswith("linux") or "bsd" in sys.platform -) - - -def _is_ascii_encoding(encoding): - """Given an encoding this figures out if the encoding is actually ASCII (which - is something we don't actually want in most cases). This is necessary - because ASCII comes under many names such as ANSI_X3.4-1968. - """ - if encoding is None: - return False - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): - """The warning used by Werkzeug to signal a broken filesystem. Will only be - used once per runtime.""" - - -_warned_about_filesystem_encoding = False - - -def get_filesystem_encoding(): - """Returns the filesystem encoding that should be used. Note that this is - different from the Python understanding of the filesystem encoding which - might be deeply flawed. Do not use this value against Python's unicode APIs - because it might be different. See :ref:`filesystem-encoding` for the exact - behavior. - - The concept of a filesystem encoding in generally is not something you - should rely on. As such if you ever need to use this function except for - writing wrapper code reconsider. - """ - global _warned_about_filesystem_encoding - rv = sys.getfilesystemencoding() - if has_likely_buggy_unicode_filesystem and not rv or _is_ascii_encoding(rv): - if not _warned_about_filesystem_encoding: - warnings.warn( - "Detected a misconfigured UNIX filesystem: Will use" - " UTF-8 as filesystem encoding instead of {0!r}".format(rv), - BrokenFilesystemWarning, - ) - _warned_about_filesystem_encoding = True - return "utf-8" - return rv diff --git a/venv/lib/python3.6/site-packages/werkzeug/formparser.py b/venv/lib/python3.6/site-packages/werkzeug/formparser.py deleted file mode 100644 index 0ddc5c8..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/formparser.py +++ /dev/null @@ -1,586 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.formparser - ~~~~~~~~~~~~~~~~~~~ - - This module implements the form parsing. It supports url-encoded forms - as well as non-nested multipart uploads. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import codecs -import re -from functools import update_wrapper -from itertools import chain -from itertools import repeat -from itertools import tee - -from ._compat import BytesIO -from ._compat import text_type -from ._compat import to_native -from .datastructures import FileStorage -from .datastructures import Headers -from .datastructures import MultiDict -from .http import parse_options_header -from .urls import url_decode_stream -from .wsgi import get_content_length -from .wsgi import get_input_stream -from .wsgi import make_line_iter - -# there are some platforms where SpooledTemporaryFile is not available. -# In that case we need to provide a fallback. -try: - from tempfile import SpooledTemporaryFile -except ImportError: - from tempfile import TemporaryFile - - SpooledTemporaryFile = None - - -#: an iterator that yields empty strings -_empty_string_iter = repeat("") - -#: a regular expression for multipart boundaries -_multipart_boundary_re = re.compile("^[ -~]{0,200}[!-~]$") - -#: supported http encodings that are also available in python we support -#: for multipart messages. -_supported_multipart_encodings = frozenset(["base64", "quoted-printable"]) - - -def default_stream_factory( - total_content_length, filename, content_type, content_length=None -): - """The stream factory that is used per default.""" - max_size = 1024 * 500 - if SpooledTemporaryFile is not None: - return SpooledTemporaryFile(max_size=max_size, mode="wb+") - if total_content_length is None or total_content_length > max_size: - return TemporaryFile("wb+") - return BytesIO() - - -def parse_form_data( - environ, - stream_factory=None, - charset="utf-8", - errors="replace", - max_form_memory_size=None, - max_content_length=None, - cls=None, - silent=True, -): - """Parse the form data in the environ and return it as tuple in the form - ``(stream, form, files)``. You should only call this method if the - transport method is `POST`, `PUT`, or `PATCH`. - - If the mimetype of the data transmitted is `multipart/form-data` the - files multidict will be filled with `FileStorage` objects. If the - mimetype is unknown the input stream is wrapped and returned as first - argument, else the stream is empty. - - This is a shortcut for the common usage of :class:`FormDataParser`. - - Have a look at :ref:`dealing-with-request-data` for more details. - - .. versionadded:: 0.5 - The `max_form_memory_size`, `max_content_length` and - `cls` parameters were added. - - .. versionadded:: 0.5.1 - The optional `silent` flag was added. - - :param environ: the WSGI environment to be used for parsing. - :param stream_factory: An optional callable that returns a new read and - writeable file descriptor. This callable works - the same as :meth:`~BaseResponse._get_file_stream`. - :param charset: The character set for URL and url encoded form data. - :param errors: The encoding error behavior. - :param max_form_memory_size: the maximum number of bytes to be accepted for - in-memory stored form data. If the data - exceeds the value specified an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param max_content_length: If this is provided and the transmitted data - is longer than this value an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param silent: If set to False parsing errors will not be caught. - :return: A tuple in the form ``(stream, form, files)``. - """ - return FormDataParser( - stream_factory, - charset, - errors, - max_form_memory_size, - max_content_length, - cls, - silent, - ).parse_from_environ(environ) - - -def exhaust_stream(f): - """Helper decorator for methods that exhausts the stream on return.""" - - def wrapper(self, stream, *args, **kwargs): - try: - return f(self, stream, *args, **kwargs) - finally: - exhaust = getattr(stream, "exhaust", None) - if exhaust is not None: - exhaust() - else: - while 1: - chunk = stream.read(1024 * 64) - if not chunk: - break - - return update_wrapper(wrapper, f) - - -class FormDataParser(object): - """This class implements parsing of form data for Werkzeug. By itself - it can parse multipart and url encoded form data. It can be subclassed - and extended but for most mimetypes it is a better idea to use the - untouched stream and expose it as separate attributes on a request - object. - - .. versionadded:: 0.8 - - :param stream_factory: An optional callable that returns a new read and - writeable file descriptor. This callable works - the same as :meth:`~BaseResponse._get_file_stream`. - :param charset: The character set for URL and url encoded form data. - :param errors: The encoding error behavior. - :param max_form_memory_size: the maximum number of bytes to be accepted for - in-memory stored form data. If the data - exceeds the value specified an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param max_content_length: If this is provided and the transmitted data - is longer than this value an - :exc:`~exceptions.RequestEntityTooLarge` - exception is raised. - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param silent: If set to False parsing errors will not be caught. - """ - - def __init__( - self, - stream_factory=None, - charset="utf-8", - errors="replace", - max_form_memory_size=None, - max_content_length=None, - cls=None, - silent=True, - ): - if stream_factory is None: - stream_factory = default_stream_factory - self.stream_factory = stream_factory - self.charset = charset - self.errors = errors - self.max_form_memory_size = max_form_memory_size - self.max_content_length = max_content_length - if cls is None: - cls = MultiDict - self.cls = cls - self.silent = silent - - def get_parse_func(self, mimetype, options): - return self.parse_functions.get(mimetype) - - def parse_from_environ(self, environ): - """Parses the information from the environment as form data. - - :param environ: the WSGI environment to be used for parsing. - :return: A tuple in the form ``(stream, form, files)``. - """ - content_type = environ.get("CONTENT_TYPE", "") - content_length = get_content_length(environ) - mimetype, options = parse_options_header(content_type) - return self.parse(get_input_stream(environ), mimetype, content_length, options) - - def parse(self, stream, mimetype, content_length, options=None): - """Parses the information from the given stream, mimetype, - content length and mimetype parameters. - - :param stream: an input stream - :param mimetype: the mimetype of the data - :param content_length: the content length of the incoming data - :param options: optional mimetype parameters (used for - the multipart boundary for instance) - :return: A tuple in the form ``(stream, form, files)``. - """ - if ( - self.max_content_length is not None - and content_length is not None - and content_length > self.max_content_length - ): - raise exceptions.RequestEntityTooLarge() - if options is None: - options = {} - - parse_func = self.get_parse_func(mimetype, options) - if parse_func is not None: - try: - return parse_func(self, stream, mimetype, content_length, options) - except ValueError: - if not self.silent: - raise - - return stream, self.cls(), self.cls() - - @exhaust_stream - def _parse_multipart(self, stream, mimetype, content_length, options): - parser = MultiPartParser( - self.stream_factory, - self.charset, - self.errors, - max_form_memory_size=self.max_form_memory_size, - cls=self.cls, - ) - boundary = options.get("boundary") - if boundary is None: - raise ValueError("Missing boundary") - if isinstance(boundary, text_type): - boundary = boundary.encode("ascii") - form, files = parser.parse(stream, boundary, content_length) - return stream, form, files - - @exhaust_stream - def _parse_urlencoded(self, stream, mimetype, content_length, options): - if ( - self.max_form_memory_size is not None - and content_length is not None - and content_length > self.max_form_memory_size - ): - raise exceptions.RequestEntityTooLarge() - form = url_decode_stream(stream, self.charset, errors=self.errors, cls=self.cls) - return stream, form, self.cls() - - #: mapping of mimetypes to parsing functions - parse_functions = { - "multipart/form-data": _parse_multipart, - "application/x-www-form-urlencoded": _parse_urlencoded, - "application/x-url-encoded": _parse_urlencoded, - } - - -def is_valid_multipart_boundary(boundary): - """Checks if the string given is a valid multipart boundary.""" - return _multipart_boundary_re.match(boundary) is not None - - -def _line_parse(line): - """Removes line ending characters and returns a tuple (`stripped_line`, - `is_terminated`). - """ - if line[-2:] in ["\r\n", b"\r\n"]: - return line[:-2], True - elif line[-1:] in ["\r", "\n", b"\r", b"\n"]: - return line[:-1], True - return line, False - - -def parse_multipart_headers(iterable): - """Parses multipart headers from an iterable that yields lines (including - the trailing newline symbol). The iterable has to be newline terminated. - - The iterable will stop at the line where the headers ended so it can be - further consumed. - - :param iterable: iterable of strings that are newline terminated - """ - result = [] - for line in iterable: - line = to_native(line) - line, line_terminated = _line_parse(line) - if not line_terminated: - raise ValueError("unexpected end of line in multipart header") - if not line: - break - elif line[0] in " \t" and result: - key, value = result[-1] - result[-1] = (key, value + "\n " + line[1:]) - else: - parts = line.split(":", 1) - if len(parts) == 2: - result.append((parts[0].strip(), parts[1].strip())) - - # we link the list to the headers, no need to create a copy, the - # list was not shared anyways. - return Headers(result) - - -_begin_form = "begin_form" -_begin_file = "begin_file" -_cont = "cont" -_end = "end" - - -class MultiPartParser(object): - def __init__( - self, - stream_factory=None, - charset="utf-8", - errors="replace", - max_form_memory_size=None, - cls=None, - buffer_size=64 * 1024, - ): - self.charset = charset - self.errors = errors - self.max_form_memory_size = max_form_memory_size - self.stream_factory = ( - default_stream_factory if stream_factory is None else stream_factory - ) - self.cls = MultiDict if cls is None else cls - - # make sure the buffer size is divisible by four so that we can base64 - # decode chunk by chunk - assert buffer_size % 4 == 0, "buffer size has to be divisible by 4" - # also the buffer size has to be at least 1024 bytes long or long headers - # will freak out the system - assert buffer_size >= 1024, "buffer size has to be at least 1KB" - - self.buffer_size = buffer_size - - def _fix_ie_filename(self, filename): - """Internet Explorer 6 transmits the full file name if a file is - uploaded. This function strips the full path if it thinks the - filename is Windows-like absolute. - """ - if filename[1:3] == ":\\" or filename[:2] == "\\\\": - return filename.split("\\")[-1] - return filename - - def _find_terminator(self, iterator): - """The terminator might have some additional newlines before it. - There is at least one application that sends additional newlines - before headers (the python setuptools package). - """ - for line in iterator: - if not line: - break - line = line.strip() - if line: - return line - return b"" - - def fail(self, message): - raise ValueError(message) - - def get_part_encoding(self, headers): - transfer_encoding = headers.get("content-transfer-encoding") - if ( - transfer_encoding is not None - and transfer_encoding in _supported_multipart_encodings - ): - return transfer_encoding - - def get_part_charset(self, headers): - # Figure out input charset for current part - content_type = headers.get("content-type") - if content_type: - mimetype, ct_params = parse_options_header(content_type) - return ct_params.get("charset", self.charset) - return self.charset - - def start_file_streaming(self, filename, headers, total_content_length): - if isinstance(filename, bytes): - filename = filename.decode(self.charset, self.errors) - filename = self._fix_ie_filename(filename) - content_type = headers.get("content-type") - try: - content_length = int(headers["content-length"]) - except (KeyError, ValueError): - content_length = 0 - container = self.stream_factory( - total_content_length=total_content_length, - filename=filename, - content_type=content_type, - content_length=content_length, - ) - return filename, container - - def in_memory_threshold_reached(self, bytes): - raise exceptions.RequestEntityTooLarge() - - def validate_boundary(self, boundary): - if not boundary: - self.fail("Missing boundary") - if not is_valid_multipart_boundary(boundary): - self.fail("Invalid boundary: %s" % boundary) - if len(boundary) > self.buffer_size: # pragma: no cover - # this should never happen because we check for a minimum size - # of 1024 and boundaries may not be longer than 200. The only - # situation when this happens is for non debug builds where - # the assert is skipped. - self.fail("Boundary longer than buffer size") - - def parse_lines(self, file, boundary, content_length, cap_at_buffer=True): - """Generate parts of - ``('begin_form', (headers, name))`` - ``('begin_file', (headers, name, filename))`` - ``('cont', bytestring)`` - ``('end', None)`` - - Always obeys the grammar - parts = ( begin_form cont* end | - begin_file cont* end )* - """ - next_part = b"--" + boundary - last_part = next_part + b"--" - - iterator = chain( - make_line_iter( - file, - limit=content_length, - buffer_size=self.buffer_size, - cap_at_buffer=cap_at_buffer, - ), - _empty_string_iter, - ) - - terminator = self._find_terminator(iterator) - - if terminator == last_part: - return - elif terminator != next_part: - self.fail("Expected boundary at start of multipart data") - - while terminator != last_part: - headers = parse_multipart_headers(iterator) - - disposition = headers.get("content-disposition") - if disposition is None: - self.fail("Missing Content-Disposition header") - disposition, extra = parse_options_header(disposition) - transfer_encoding = self.get_part_encoding(headers) - name = extra.get("name") - filename = extra.get("filename") - - # if no content type is given we stream into memory. A list is - # used as a temporary container. - if filename is None: - yield _begin_form, (headers, name) - - # otherwise we parse the rest of the headers and ask the stream - # factory for something we can write in. - else: - yield _begin_file, (headers, name, filename) - - buf = b"" - for line in iterator: - if not line: - self.fail("unexpected end of stream") - - if line[:2] == b"--": - terminator = line.rstrip() - if terminator in (next_part, last_part): - break - - if transfer_encoding is not None: - if transfer_encoding == "base64": - transfer_encoding = "base64_codec" - try: - line = codecs.decode(line, transfer_encoding) - except Exception: - self.fail("could not decode transfer encoded chunk") - - # we have something in the buffer from the last iteration. - # this is usually a newline delimiter. - if buf: - yield _cont, buf - buf = b"" - - # If the line ends with windows CRLF we write everything except - # the last two bytes. In all other cases however we write - # everything except the last byte. If it was a newline, that's - # fine, otherwise it does not matter because we will write it - # the next iteration. this ensures we do not write the - # final newline into the stream. That way we do not have to - # truncate the stream. However we do have to make sure that - # if something else than a newline is in there we write it - # out. - if line[-2:] == b"\r\n": - buf = b"\r\n" - cutoff = -2 - else: - buf = line[-1:] - cutoff = -1 - yield _cont, line[:cutoff] - - else: # pragma: no cover - raise ValueError("unexpected end of part") - - # if we have a leftover in the buffer that is not a newline - # character we have to flush it, otherwise we will chop of - # certain values. - if buf not in (b"", b"\r", b"\n", b"\r\n"): - yield _cont, buf - - yield _end, None - - def parse_parts(self, file, boundary, content_length): - """Generate ``('file', (name, val))`` and - ``('form', (name, val))`` parts. - """ - in_memory = 0 - - for ellt, ell in self.parse_lines(file, boundary, content_length): - if ellt == _begin_file: - headers, name, filename = ell - is_file = True - guard_memory = False - filename, container = self.start_file_streaming( - filename, headers, content_length - ) - _write = container.write - - elif ellt == _begin_form: - headers, name = ell - is_file = False - container = [] - _write = container.append - guard_memory = self.max_form_memory_size is not None - - elif ellt == _cont: - _write(ell) - # if we write into memory and there is a memory size limit we - # count the number of bytes in memory and raise an exception if - # there is too much data in memory. - if guard_memory: - in_memory += len(ell) - if in_memory > self.max_form_memory_size: - self.in_memory_threshold_reached(in_memory) - - elif ellt == _end: - if is_file: - container.seek(0) - yield ( - "file", - (name, FileStorage(container, filename, name, headers=headers)), - ) - else: - part_charset = self.get_part_charset(headers) - yield ( - "form", - (name, b"".join(container).decode(part_charset, self.errors)), - ) - - def parse(self, file, boundary, content_length): - formstream, filestream = tee( - self.parse_parts(file, boundary, content_length), 2 - ) - form = (p[1] for p in formstream if p[0] == "form") - files = (p[1] for p in filestream if p[0] == "file") - return self.cls(form), self.cls(files) - - -from . import exceptions diff --git a/venv/lib/python3.6/site-packages/werkzeug/http.py b/venv/lib/python3.6/site-packages/werkzeug/http.py deleted file mode 100644 index af32007..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/http.py +++ /dev/null @@ -1,1303 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.http - ~~~~~~~~~~~~~ - - Werkzeug comes with a bunch of utilities that help Werkzeug to deal with - HTTP data. Most of the classes and functions provided by this module are - used by the wrappers, but they are useful on their own, too, especially if - the response and request objects are not used. - - This covers some of the more HTTP centric features of WSGI, some other - utilities such as cookie handling are documented in the `werkzeug.utils` - module. - - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import base64 -import re -import warnings -from datetime import datetime -from datetime import timedelta -from hashlib import md5 -from time import gmtime -from time import time - -from ._compat import integer_types -from ._compat import iteritems -from ._compat import PY2 -from ._compat import string_types -from ._compat import text_type -from ._compat import to_bytes -from ._compat import to_unicode -from ._compat import try_coerce_native -from ._internal import _cookie_parse_impl -from ._internal import _cookie_quote -from ._internal import _make_cookie_domain - -try: - from email.utils import parsedate_tz -except ImportError: - from email.Utils import parsedate_tz - -try: - from urllib.request import parse_http_list as _parse_list_header - from urllib.parse import unquote_to_bytes as _unquote -except ImportError: - from urllib2 import parse_http_list as _parse_list_header - from urllib2 import unquote as _unquote - -_cookie_charset = "latin1" -_basic_auth_charset = "utf-8" -# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231 -_accept_re = re.compile( - r""" - ( # media-range capturing-parenthesis - [^\s;,]+ # type/subtype - (?:[ \t]*;[ \t]* # ";" - (?: # parameter non-capturing-parenthesis - [^\s;,q][^\s;,]* # token that doesn't start with "q" - | # or - q[^\s;,=][^\s;,]* # token that is more than just "q" - ) - )* # zero or more parameters - ) # end of media-range - (?:[ \t]*;[ \t]*q= # weight is a "q" parameter - (\d*(?:\.\d+)?) # qvalue capturing-parentheses - [^,]* # "extension" accept params: who cares? - )? # accept params are optional - """, - re.VERBOSE, -) -_token_chars = frozenset( - "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~" -) -_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') -_unsafe_header_chars = set('()<>@,;:"/[]?={} \t') -_option_header_piece_re = re.compile( - r""" - ;\s*,?\s* # newlines were replaced with commas - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^\s;,=*]+ # token - ) - (?:\*(?P\d+))? # *1, optional continuation index - \s* - (?: # optionally followed by =value - (?: # equals sign, possibly with encoding - \*\s*=\s* # * indicates extended notation - (?: # optional encoding - (?P[^\s]+?) - '(?P[^\s]*?)' - )? - | - =\s* # basic notation - ) - (?P - "[^"\\]*(?:\\.[^"\\]*)*" # quoted string - | - [^;,]+ # token - )? - )? - \s* - """, - flags=re.VERBOSE, -) -_option_header_start_mime_type = re.compile(r",\s*([^;,\s]+)([;,]\s*.+)?") - -_entity_headers = frozenset( - [ - "allow", - "content-encoding", - "content-language", - "content-length", - "content-location", - "content-md5", - "content-range", - "content-type", - "expires", - "last-modified", - ] -) -_hop_by_hop_headers = frozenset( - [ - "connection", - "keep-alive", - "proxy-authenticate", - "proxy-authorization", - "te", - "trailer", - "transfer-encoding", - "upgrade", - ] -) - - -HTTP_STATUS_CODES = { - 100: "Continue", - 101: "Switching Protocols", - 102: "Processing", - 200: "OK", - 201: "Created", - 202: "Accepted", - 203: "Non Authoritative Information", - 204: "No Content", - 205: "Reset Content", - 206: "Partial Content", - 207: "Multi Status", - 226: "IM Used", # see RFC 3229 - 300: "Multiple Choices", - 301: "Moved Permanently", - 302: "Found", - 303: "See Other", - 304: "Not Modified", - 305: "Use Proxy", - 307: "Temporary Redirect", - 308: "Permanent Redirect", - 400: "Bad Request", - 401: "Unauthorized", - 402: "Payment Required", # unused - 403: "Forbidden", - 404: "Not Found", - 405: "Method Not Allowed", - 406: "Not Acceptable", - 407: "Proxy Authentication Required", - 408: "Request Timeout", - 409: "Conflict", - 410: "Gone", - 411: "Length Required", - 412: "Precondition Failed", - 413: "Request Entity Too Large", - 414: "Request URI Too Long", - 415: "Unsupported Media Type", - 416: "Requested Range Not Satisfiable", - 417: "Expectation Failed", - 418: "I'm a teapot", # see RFC 2324 - 421: "Misdirected Request", # see RFC 7540 - 422: "Unprocessable Entity", - 423: "Locked", - 424: "Failed Dependency", - 426: "Upgrade Required", - 428: "Precondition Required", # see RFC 6585 - 429: "Too Many Requests", - 431: "Request Header Fields Too Large", - 449: "Retry With", # proprietary MS extension - 451: "Unavailable For Legal Reasons", - 500: "Internal Server Error", - 501: "Not Implemented", - 502: "Bad Gateway", - 503: "Service Unavailable", - 504: "Gateway Timeout", - 505: "HTTP Version Not Supported", - 507: "Insufficient Storage", - 510: "Not Extended", -} - - -def wsgi_to_bytes(data): - """coerce wsgi unicode represented bytes to real ones""" - if isinstance(data, bytes): - return data - return data.encode("latin1") # XXX: utf8 fallback? - - -def bytes_to_wsgi(data): - assert isinstance(data, bytes), "data must be bytes" - if isinstance(data, str): - return data - else: - return data.decode("latin1") - - -def quote_header_value(value, extra_chars="", allow_token=True): - """Quote a header value if necessary. - - .. versionadded:: 0.5 - - :param value: the value to quote. - :param extra_chars: a list of extra characters to skip quoting. - :param allow_token: if this is enabled token values are returned - unchanged. - """ - if isinstance(value, bytes): - value = bytes_to_wsgi(value) - value = str(value) - if allow_token: - token_chars = _token_chars | set(extra_chars) - if set(value).issubset(token_chars): - return value - return '"%s"' % value.replace("\\", "\\\\").replace('"', '\\"') - - -def unquote_header_value(value, is_filename=False): - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - - .. versionadded:: 0.5 - - :param value: the header value to unquote. - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != "\\\\": - return value.replace("\\\\", "\\").replace('\\"', '"') - return value - - -def dump_options_header(header, options): - """The reverse function to :func:`parse_options_header`. - - :param header: the header to dump - :param options: a dict of options to append. - """ - segments = [] - if header is not None: - segments.append(header) - for key, value in iteritems(options): - if value is None: - segments.append(key) - else: - segments.append("%s=%s" % (key, quote_header_value(value))) - return "; ".join(segments) - - -def dump_header(iterable, allow_token=True): - """Dump an HTTP header again. This is the reversal of - :func:`parse_list_header`, :func:`parse_set_header` and - :func:`parse_dict_header`. This also quotes strings that include an - equals sign unless you pass it as dict of key, value pairs. - - >>> dump_header({'foo': 'bar baz'}) - 'foo="bar baz"' - >>> dump_header(('foo', 'bar baz')) - 'foo, "bar baz"' - - :param iterable: the iterable or dict of values to quote. - :param allow_token: if set to `False` tokens as values are disallowed. - See :func:`quote_header_value` for more details. - """ - if isinstance(iterable, dict): - items = [] - for key, value in iteritems(iterable): - if value is None: - items.append(key) - else: - items.append( - "%s=%s" % (key, quote_header_value(value, allow_token=allow_token)) - ) - else: - items = [quote_header_value(x, allow_token=allow_token) for x in iterable] - return ", ".join(items) - - -def parse_list_header(value): - """Parse lists as described by RFC 2068 Section 2. - - In particular, parse comma-separated lists where the elements of - the list may include quoted-strings. A quoted-string could - contain a comma. A non-quoted string could have quotes in the - middle. Quotes are removed automatically after parsing. - - It basically works like :func:`parse_set_header` just that items - may appear multiple times and case sensitivity is preserved. - - The return value is a standard :class:`list`: - - >>> parse_list_header('token, "quoted value"') - ['token', 'quoted value'] - - To create a header from the :class:`list` again, use the - :func:`dump_header` function. - - :param value: a string with a list header. - :return: :class:`list` - """ - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = unquote_header_value(item[1:-1]) - result.append(item) - return result - - -def parse_dict_header(value, cls=dict): - """Parse lists of key, value pairs as described by RFC 2068 Section 2 and - convert them into a python dict (or any other mapping object created from - the type with a dict like interface provided by the `cls` argument): - - >>> d = parse_dict_header('foo="is a fish", bar="as well"') - >>> type(d) is dict - True - >>> sorted(d.items()) - [('bar', 'as well'), ('foo', 'is a fish')] - - If there is no value for a key it will be `None`: - - >>> parse_dict_header('key_without_value') - {'key_without_value': None} - - To create a header from the :class:`dict` again, use the - :func:`dump_header` function. - - .. versionchanged:: 0.9 - Added support for `cls` argument. - - :param value: a string with a dict header. - :param cls: callable to use for storage of parsed results. - :return: an instance of `cls` - """ - result = cls() - if not isinstance(value, text_type): - # XXX: validate - value = bytes_to_wsgi(value) - for item in _parse_list_header(value): - if "=" not in item: - result[item] = None - continue - name, value = item.split("=", 1) - if value[:1] == value[-1:] == '"': - value = unquote_header_value(value[1:-1]) - result[name] = value - return result - - -def parse_options_header(value, multiple=False): - """Parse a ``Content-Type`` like header into a tuple with the content - type and the options: - - >>> parse_options_header('text/html; charset=utf8') - ('text/html', {'charset': 'utf8'}) - - This should not be used to parse ``Cache-Control`` like headers that use - a slightly different format. For these headers use the - :func:`parse_dict_header` function. - - .. versionchanged:: 0.15 - :rfc:`2231` parameter continuations are handled. - - .. versionadded:: 0.5 - - :param value: the header to parse. - :param multiple: Whether try to parse and return multiple MIME types - :return: (mimetype, options) or (mimetype, options, mimetype, options, …) - if multiple=True - """ - if not value: - return "", {} - - result = [] - - value = "," + value.replace("\n", ",") - while value: - match = _option_header_start_mime_type.match(value) - if not match: - break - result.append(match.group(1)) # mimetype - options = {} - # Parse options - rest = match.group(2) - continued_encoding = None - while rest: - optmatch = _option_header_piece_re.match(rest) - if not optmatch: - break - option, count, encoding, language, option_value = optmatch.groups() - # Continuations don't have to supply the encoding after the - # first line. If we're in a continuation, track the current - # encoding to use for subsequent lines. Reset it when the - # continuation ends. - if not count: - continued_encoding = None - else: - if not encoding: - encoding = continued_encoding - continued_encoding = encoding - option = unquote_header_value(option) - if option_value is not None: - option_value = unquote_header_value(option_value, option == "filename") - if encoding is not None: - option_value = _unquote(option_value).decode(encoding) - if count: - # Continuations append to the existing value. For - # simplicity, this ignores the possibility of - # out-of-order indices, which shouldn't happen anyway. - options[option] = options.get(option, "") + option_value - else: - options[option] = option_value - rest = rest[optmatch.end() :] - result.append(options) - if multiple is False: - return tuple(result) - value = rest - - return tuple(result) if result else ("", {}) - - -def parse_accept_header(value, cls=None): - """Parses an HTTP Accept-* header. This does not implement a complete - valid algorithm but one that supports at least value and quality - extraction. - - Returns a new :class:`Accept` object (basically a list of ``(value, quality)`` - tuples sorted by the quality with some additional accessor methods). - - The second parameter can be a subclass of :class:`Accept` that is created - with the parsed values and returned. - - :param value: the accept header string to be parsed. - :param cls: the wrapper class for the return value (can be - :class:`Accept` or a subclass thereof) - :return: an instance of `cls`. - """ - if cls is None: - cls = Accept - - if not value: - return cls(None) - - result = [] - for match in _accept_re.finditer(value): - quality = match.group(2) - if not quality: - quality = 1 - else: - quality = max(min(float(quality), 1), 0) - result.append((match.group(1), quality)) - return cls(result) - - -def parse_cache_control_header(value, on_update=None, cls=None): - """Parse a cache control header. The RFC differs between response and - request cache control, this method does not. It's your responsibility - to not use the wrong control statements. - - .. versionadded:: 0.5 - The `cls` was added. If not specified an immutable - :class:`~werkzeug.datastructures.RequestCacheControl` is returned. - - :param value: a cache control header to be parsed. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.CacheControl` - object is changed. - :param cls: the class for the returned object. By default - :class:`~werkzeug.datastructures.RequestCacheControl` is used. - :return: a `cls` object. - """ - if cls is None: - cls = RequestCacheControl - if not value: - return cls(None, on_update) - return cls(parse_dict_header(value), on_update) - - -def parse_set_header(value, on_update=None): - """Parse a set-like header and return a - :class:`~werkzeug.datastructures.HeaderSet` object: - - >>> hs = parse_set_header('token, "quoted value"') - - The return value is an object that treats the items case-insensitively - and keeps the order of the items: - - >>> 'TOKEN' in hs - True - >>> hs.index('quoted value') - 1 - >>> hs - HeaderSet(['token', 'quoted value']) - - To create a header from the :class:`HeaderSet` again, use the - :func:`dump_header` function. - - :param value: a set header to be parsed. - :param on_update: an optional callable that is called every time a - value on the :class:`~werkzeug.datastructures.HeaderSet` - object is changed. - :return: a :class:`~werkzeug.datastructures.HeaderSet` - """ - if not value: - return HeaderSet(None, on_update) - return HeaderSet(parse_list_header(value), on_update) - - -def parse_authorization_header(value): - """Parse an HTTP basic/digest authorization header transmitted by the web - browser. The return value is either `None` if the header was invalid or - not given, otherwise an :class:`~werkzeug.datastructures.Authorization` - object. - - :param value: the authorization header to parse. - :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. - """ - if not value: - return - value = wsgi_to_bytes(value) - try: - auth_type, auth_info = value.split(None, 1) - auth_type = auth_type.lower() - except ValueError: - return - if auth_type == b"basic": - try: - username, password = base64.b64decode(auth_info).split(b":", 1) - except Exception: - return - return Authorization( - "basic", - { - "username": to_unicode(username, _basic_auth_charset), - "password": to_unicode(password, _basic_auth_charset), - }, - ) - elif auth_type == b"digest": - auth_map = parse_dict_header(auth_info) - for key in "username", "realm", "nonce", "uri", "response": - if key not in auth_map: - return - if "qop" in auth_map: - if not auth_map.get("nc") or not auth_map.get("cnonce"): - return - return Authorization("digest", auth_map) - - -def parse_www_authenticate_header(value, on_update=None): - """Parse an HTTP WWW-Authenticate header into a - :class:`~werkzeug.datastructures.WWWAuthenticate` object. - - :param value: a WWW-Authenticate header to parse. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.WWWAuthenticate` - object is changed. - :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. - """ - if not value: - return WWWAuthenticate(on_update=on_update) - try: - auth_type, auth_info = value.split(None, 1) - auth_type = auth_type.lower() - except (ValueError, AttributeError): - return WWWAuthenticate(value.strip().lower(), on_update=on_update) - return WWWAuthenticate(auth_type, parse_dict_header(auth_info), on_update) - - -def parse_if_range_header(value): - """Parses an if-range header which can be an etag or a date. Returns - a :class:`~werkzeug.datastructures.IfRange` object. - - .. versionadded:: 0.7 - """ - if not value: - return IfRange() - date = parse_date(value) - if date is not None: - return IfRange(date=date) - # drop weakness information - return IfRange(unquote_etag(value)[0]) - - -def parse_range_header(value, make_inclusive=True): - """Parses a range header into a :class:`~werkzeug.datastructures.Range` - object. If the header is missing or malformed `None` is returned. - `ranges` is a list of ``(start, stop)`` tuples where the ranges are - non-inclusive. - - .. versionadded:: 0.7 - """ - if not value or "=" not in value: - return None - - ranges = [] - last_end = 0 - units, rng = value.split("=", 1) - units = units.strip().lower() - - for item in rng.split(","): - item = item.strip() - if "-" not in item: - return None - if item.startswith("-"): - if last_end < 0: - return None - try: - begin = int(item) - except ValueError: - return None - end = None - last_end = -1 - elif "-" in item: - begin, end = item.split("-", 1) - begin = begin.strip() - end = end.strip() - if not begin.isdigit(): - return None - begin = int(begin) - if begin < last_end or last_end < 0: - return None - if end: - if not end.isdigit(): - return None - end = int(end) + 1 - if begin >= end: - return None - else: - end = None - last_end = end - ranges.append((begin, end)) - - return Range(units, ranges) - - -def parse_content_range_header(value, on_update=None): - """Parses a range header into a - :class:`~werkzeug.datastructures.ContentRange` object or `None` if - parsing is not possible. - - .. versionadded:: 0.7 - - :param value: a content range header to be parsed. - :param on_update: an optional callable that is called every time a value - on the :class:`~werkzeug.datastructures.ContentRange` - object is changed. - """ - if value is None: - return None - try: - units, rangedef = (value or "").strip().split(None, 1) - except ValueError: - return None - - if "/" not in rangedef: - return None - rng, length = rangedef.split("/", 1) - if length == "*": - length = None - elif length.isdigit(): - length = int(length) - else: - return None - - if rng == "*": - return ContentRange(units, None, None, length, on_update=on_update) - elif "-" not in rng: - return None - - start, stop = rng.split("-", 1) - try: - start = int(start) - stop = int(stop) + 1 - except ValueError: - return None - - if is_byte_range_valid(start, stop, length): - return ContentRange(units, start, stop, length, on_update=on_update) - - -def quote_etag(etag, weak=False): - """Quote an etag. - - :param etag: the etag to quote. - :param weak: set to `True` to tag it "weak". - """ - if '"' in etag: - raise ValueError("invalid etag") - etag = '"%s"' % etag - if weak: - etag = "W/" + etag - return etag - - -def unquote_etag(etag): - """Unquote a single etag: - - >>> unquote_etag('W/"bar"') - ('bar', True) - >>> unquote_etag('"bar"') - ('bar', False) - - :param etag: the etag identifier to unquote. - :return: a ``(etag, weak)`` tuple. - """ - if not etag: - return None, None - etag = etag.strip() - weak = False - if etag.startswith(("W/", "w/")): - weak = True - etag = etag[2:] - if etag[:1] == etag[-1:] == '"': - etag = etag[1:-1] - return etag, weak - - -def parse_etags(value): - """Parse an etag header. - - :param value: the tag header to parse - :return: an :class:`~werkzeug.datastructures.ETags` object. - """ - if not value: - return ETags() - strong = [] - weak = [] - end = len(value) - pos = 0 - while pos < end: - match = _etag_re.match(value, pos) - if match is None: - break - is_weak, quoted, raw = match.groups() - if raw == "*": - return ETags(star_tag=True) - elif quoted: - raw = quoted - if is_weak: - weak.append(raw) - else: - strong.append(raw) - pos = match.end() - return ETags(strong, weak) - - -def generate_etag(data): - """Generate an etag for some data.""" - return md5(data).hexdigest() - - -def parse_date(value): - """Parse one of the following date formats into a datetime object: - - .. sourcecode:: text - - Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123 - Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036 - Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format - - If parsing fails the return value is `None`. - - :param value: a string with a supported date format. - :return: a :class:`datetime.datetime` object. - """ - if value: - t = parsedate_tz(value.strip()) - if t is not None: - try: - year = t[0] - # unfortunately that function does not tell us if two digit - # years were part of the string, or if they were prefixed - # with two zeroes. So what we do is to assume that 69-99 - # refer to 1900, and everything below to 2000 - if year >= 0 and year <= 68: - year += 2000 - elif year >= 69 and year <= 99: - year += 1900 - return datetime(*((year,) + t[1:7])) - timedelta(seconds=t[-1] or 0) - except (ValueError, OverflowError): - return None - - -def _dump_date(d, delim): - """Used for `http_date` and `cookie_date`.""" - if d is None: - d = gmtime() - elif isinstance(d, datetime): - d = d.utctimetuple() - elif isinstance(d, (integer_types, float)): - d = gmtime(d) - return "%s, %02d%s%s%s%s %02d:%02d:%02d GMT" % ( - ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[d.tm_wday], - d.tm_mday, - delim, - ( - "Jan", - "Feb", - "Mar", - "Apr", - "May", - "Jun", - "Jul", - "Aug", - "Sep", - "Oct", - "Nov", - "Dec", - )[d.tm_mon - 1], - delim, - str(d.tm_year), - d.tm_hour, - d.tm_min, - d.tm_sec, - ) - - -def cookie_date(expires=None): - """Formats the time to ensure compatibility with Netscape's cookie - standard. - - Accepts a floating point number expressed in seconds since the epoch in, a - datetime object or a timetuple. All times in UTC. The :func:`parse_date` - function can be used to parse such a date. - - Outputs a string in the format ``Wdy, DD-Mon-YYYY HH:MM:SS GMT``. - - :param expires: If provided that date is used, otherwise the current. - """ - return _dump_date(expires, "-") - - -def http_date(timestamp=None): - """Formats the time to match the RFC1123 date format. - - Accepts a floating point number expressed in seconds since the epoch in, a - datetime object or a timetuple. All times in UTC. The :func:`parse_date` - function can be used to parse such a date. - - Outputs a string in the format ``Wdy, DD Mon YYYY HH:MM:SS GMT``. - - :param timestamp: If provided that date is used, otherwise the current. - """ - return _dump_date(timestamp, " ") - - -def parse_age(value=None): - """Parses a base-10 integer count of seconds into a timedelta. - - If parsing fails, the return value is `None`. - - :param value: a string consisting of an integer represented in base-10 - :return: a :class:`datetime.timedelta` object or `None`. - """ - if not value: - return None - try: - seconds = int(value) - except ValueError: - return None - if seconds < 0: - return None - try: - return timedelta(seconds=seconds) - except OverflowError: - return None - - -def dump_age(age=None): - """Formats the duration as a base-10 integer. - - :param age: should be an integer number of seconds, - a :class:`datetime.timedelta` object, or, - if the age is unknown, `None` (default). - """ - if age is None: - return - if isinstance(age, timedelta): - # do the equivalent of Python 2.7's timedelta.total_seconds(), - # but disregarding fractional seconds - age = age.seconds + (age.days * 24 * 3600) - - age = int(age) - if age < 0: - raise ValueError("age cannot be negative") - - return str(age) - - -def is_resource_modified( - environ, etag=None, data=None, last_modified=None, ignore_if_range=True -): - """Convenience method for conditional requests. - - :param environ: the WSGI environment of the request to be checked. - :param etag: the etag for the response for comparison. - :param data: or alternatively the data of the response to automatically - generate an etag using :func:`generate_etag`. - :param last_modified: an optional date of the last modification. - :param ignore_if_range: If `False`, `If-Range` header will be taken into - account. - :return: `True` if the resource was modified, otherwise `False`. - """ - if etag is None and data is not None: - etag = generate_etag(data) - elif data is not None: - raise TypeError("both data and etag given") - if environ["REQUEST_METHOD"] not in ("GET", "HEAD"): - return False - - unmodified = False - if isinstance(last_modified, string_types): - last_modified = parse_date(last_modified) - - # ensure that microsecond is zero because the HTTP spec does not transmit - # that either and we might have some false positives. See issue #39 - if last_modified is not None: - last_modified = last_modified.replace(microsecond=0) - - if_range = None - if not ignore_if_range and "HTTP_RANGE" in environ: - # https://tools.ietf.org/html/rfc7233#section-3.2 - # A server MUST ignore an If-Range header field received in a request - # that does not contain a Range header field. - if_range = parse_if_range_header(environ.get("HTTP_IF_RANGE")) - - if if_range is not None and if_range.date is not None: - modified_since = if_range.date - else: - modified_since = parse_date(environ.get("HTTP_IF_MODIFIED_SINCE")) - - if modified_since and last_modified and last_modified <= modified_since: - unmodified = True - - if etag: - etag, _ = unquote_etag(etag) - if if_range is not None and if_range.etag is not None: - unmodified = parse_etags(if_range.etag).contains(etag) - else: - if_none_match = parse_etags(environ.get("HTTP_IF_NONE_MATCH")) - if if_none_match: - # https://tools.ietf.org/html/rfc7232#section-3.2 - # "A recipient MUST use the weak comparison function when comparing - # entity-tags for If-None-Match" - unmodified = if_none_match.contains_weak(etag) - - # https://tools.ietf.org/html/rfc7232#section-3.1 - # "Origin server MUST use the strong comparison function when - # comparing entity-tags for If-Match" - if_match = parse_etags(environ.get("HTTP_IF_MATCH")) - if if_match: - unmodified = not if_match.is_strong(etag) - - return not unmodified - - -def remove_entity_headers(headers, allowed=("expires", "content-location")): - """Remove all entity headers from a list or :class:`Headers` object. This - operation works in-place. `Expires` and `Content-Location` headers are - by default not removed. The reason for this is :rfc:`2616` section - 10.3.5 which specifies some entity headers that should be sent. - - .. versionchanged:: 0.5 - added `allowed` parameter. - - :param headers: a list or :class:`Headers` object. - :param allowed: a list of headers that should still be allowed even though - they are entity headers. - """ - allowed = set(x.lower() for x in allowed) - headers[:] = [ - (key, value) - for key, value in headers - if not is_entity_header(key) or key.lower() in allowed - ] - - -def remove_hop_by_hop_headers(headers): - """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or - :class:`Headers` object. This operation works in-place. - - .. versionadded:: 0.5 - - :param headers: a list or :class:`Headers` object. - """ - headers[:] = [ - (key, value) for key, value in headers if not is_hop_by_hop_header(key) - ] - - -def is_entity_header(header): - """Check if a header is an entity header. - - .. versionadded:: 0.5 - - :param header: the header to test. - :return: `True` if it's an entity header, `False` otherwise. - """ - return header.lower() in _entity_headers - - -def is_hop_by_hop_header(header): - """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. - - .. versionadded:: 0.5 - - :param header: the header to test. - :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise. - """ - return header.lower() in _hop_by_hop_headers - - -def parse_cookie(header, charset="utf-8", errors="replace", cls=None): - """Parse a cookie. Either from a string or WSGI environ. - - Per default encoding errors are ignored. If you want a different behavior - you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a - :exc:`HTTPUnicodeError` is raised. - - .. versionchanged:: 0.5 - This function now returns a :class:`TypeConversionDict` instead of a - regular dict. The `cls` parameter was added. - - :param header: the header to be used to parse the cookie. Alternatively - this can be a WSGI environment. - :param charset: the charset for the cookie values. - :param errors: the error behavior for the charset decoding. - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`TypeConversionDict` is - used. - """ - if isinstance(header, dict): - header = header.get("HTTP_COOKIE", "") - elif header is None: - header = "" - - # If the value is an unicode string it's mangled through latin1. This - # is done because on PEP 3333 on Python 3 all headers are assumed latin1 - # which however is incorrect for cookies, which are sent in page encoding. - # As a result we - if isinstance(header, text_type): - header = header.encode("latin1", "replace") - - if cls is None: - cls = TypeConversionDict - - def _parse_pairs(): - for key, val in _cookie_parse_impl(header): - key = to_unicode(key, charset, errors, allow_none_charset=True) - if not key: - continue - val = to_unicode(val, charset, errors, allow_none_charset=True) - yield try_coerce_native(key), val - - return cls(_parse_pairs()) - - -def dump_cookie( - key, - value="", - max_age=None, - expires=None, - path="/", - domain=None, - secure=False, - httponly=False, - charset="utf-8", - sync_expires=True, - max_size=4093, - samesite=None, -): - """Creates a new Set-Cookie header without the ``Set-Cookie`` prefix - The parameters are the same as in the cookie Morsel object in the - Python standard library but it accepts unicode data, too. - - On Python 3 the return value of this function will be a unicode - string, on Python 2 it will be a native string. In both cases the - return value is usually restricted to ascii as the vast majority of - values are properly escaped, but that is no guarantee. If a unicode - string is returned it's tunneled through latin1 as required by - PEP 3333. - - The return value is not ASCII safe if the key contains unicode - characters. This is technically against the specification but - happens in the wild. It's strongly recommended to not use - non-ASCII values for the keys. - - :param max_age: should be a number of seconds, or `None` (default) if - the cookie should last only as long as the client's - browser session. Additionally `timedelta` objects - are accepted, too. - :param expires: should be a `datetime` object or unix timestamp. - :param path: limits the cookie to a given path, per default it will - span the whole domain. - :param domain: Use this if you want to set a cross-domain cookie. For - example, ``domain=".example.com"`` will set a cookie - that is readable by the domain ``www.example.com``, - ``foo.example.com`` etc. Otherwise, a cookie will only - be readable by the domain that set it. - :param secure: The cookie will only be available via HTTPS - :param httponly: disallow JavaScript to access the cookie. This is an - extension to the cookie standard and probably not - supported by all browsers. - :param charset: the encoding for unicode values. - :param sync_expires: automatically set expires if max_age is defined - but expires not. - :param max_size: Warn if the final header value exceeds this size. The - default, 4093, should be safely `supported by most browsers - `_. Set to 0 to disable this check. - :param samesite: Limits the scope of the cookie such that it will only - be attached to requests if those requests are "same-site". - - .. _`cookie`: http://browsercookielimits.squawky.net/ - """ - key = to_bytes(key, charset) - value = to_bytes(value, charset) - - if path is not None: - path = iri_to_uri(path, charset) - domain = _make_cookie_domain(domain) - if isinstance(max_age, timedelta): - max_age = (max_age.days * 60 * 60 * 24) + max_age.seconds - if expires is not None: - if not isinstance(expires, string_types): - expires = cookie_date(expires) - elif max_age is not None and sync_expires: - expires = to_bytes(cookie_date(time() + max_age)) - - samesite = samesite.title() if samesite else None - if samesite not in ("Strict", "Lax", None): - raise ValueError("invalid SameSite value; must be 'Strict', 'Lax' or None") - - buf = [key + b"=" + _cookie_quote(value)] - - # XXX: In theory all of these parameters that are not marked with `None` - # should be quoted. Because stdlib did not quote it before I did not - # want to introduce quoting there now. - for k, v, q in ( - (b"Domain", domain, True), - (b"Expires", expires, False), - (b"Max-Age", max_age, False), - (b"Secure", secure, None), - (b"HttpOnly", httponly, None), - (b"Path", path, False), - (b"SameSite", samesite, False), - ): - if q is None: - if v: - buf.append(k) - continue - - if v is None: - continue - - tmp = bytearray(k) - if not isinstance(v, (bytes, bytearray)): - v = to_bytes(text_type(v), charset) - if q: - v = _cookie_quote(v) - tmp += b"=" + v - buf.append(bytes(tmp)) - - # The return value will be an incorrectly encoded latin1 header on - # Python 3 for consistency with the headers object and a bytestring - # on Python 2 because that's how the API makes more sense. - rv = b"; ".join(buf) - if not PY2: - rv = rv.decode("latin1") - - # Warn if the final value of the cookie is less than the limit. If the - # cookie is too large, then it may be silently ignored, which can be quite - # hard to debug. - cookie_size = len(rv) - - if max_size and cookie_size > max_size: - value_size = len(value) - warnings.warn( - 'The "{key}" cookie is too large: the value was {value_size} bytes' - " but the header required {extra_size} extra bytes. The final size" - " was {cookie_size} bytes but the limit is {max_size} bytes." - " Browsers may silently ignore cookies larger than this.".format( - key=key, - value_size=value_size, - extra_size=cookie_size - value_size, - cookie_size=cookie_size, - max_size=max_size, - ), - stacklevel=2, - ) - - return rv - - -def is_byte_range_valid(start, stop, length): - """Checks if a given byte content range is valid for the given length. - - .. versionadded:: 0.7 - """ - if (start is None) != (stop is None): - return False - elif start is None: - return length is None or length >= 0 - elif length is None: - return 0 <= start < stop - elif start >= stop: - return False - return 0 <= start < length - - -# circular dependency fun -from .datastructures import Accept -from .datastructures import Authorization -from .datastructures import ContentRange -from .datastructures import ETags -from .datastructures import HeaderSet -from .datastructures import IfRange -from .datastructures import Range -from .datastructures import RequestCacheControl -from .datastructures import TypeConversionDict -from .datastructures import WWWAuthenticate -from .urls import iri_to_uri - -# DEPRECATED -from .datastructures import CharsetAccept as _CharsetAccept -from .datastructures import Headers as _Headers -from .datastructures import LanguageAccept as _LanguageAccept -from .datastructures import MIMEAccept as _MIMEAccept - - -class MIMEAccept(_MIMEAccept): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.http.MIMEAccept' has moved to 'werkzeug" - ".datastructures.MIMEAccept' as of version 0.5. This old" - " import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(MIMEAccept, self).__init__(*args, **kwargs) - - -class CharsetAccept(_CharsetAccept): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.http.CharsetAccept' has moved to 'werkzeug" - ".datastructures.CharsetAccept' as of version 0.5. This old" - " import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(CharsetAccept, self).__init__(*args, **kwargs) - - -class LanguageAccept(_LanguageAccept): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.http.LanguageAccept' has moved to 'werkzeug" - ".datastructures.LanguageAccept' as of version 0.5. This" - " old import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(LanguageAccept, self).__init__(*args, **kwargs) - - -class Headers(_Headers): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.http.Headers' has moved to 'werkzeug" - ".datastructures.Headers' as of version 0.5. This old" - " import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(Headers, self).__init__(*args, **kwargs) diff --git a/venv/lib/python3.6/site-packages/werkzeug/local.py b/venv/lib/python3.6/site-packages/werkzeug/local.py deleted file mode 100644 index 9a6088c..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/local.py +++ /dev/null @@ -1,421 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.local - ~~~~~~~~~~~~~~ - - This module implements context-local objects. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import copy -from functools import update_wrapper - -from ._compat import implements_bool -from ._compat import PY2 -from .wsgi import ClosingIterator - -# since each thread has its own greenlet we can just use those as identifiers -# for the context. If greenlets are not available we fall back to the -# current thread ident depending on where it is. -try: - from greenlet import getcurrent as get_ident -except ImportError: - try: - from thread import get_ident - except ImportError: - from _thread import get_ident - - -def release_local(local): - """Releases the contents of the local for the current context. - This makes it possible to use locals without a manager. - - Example:: - - >>> loc = Local() - >>> loc.foo = 42 - >>> release_local(loc) - >>> hasattr(loc, 'foo') - False - - With this function one can release :class:`Local` objects as well - as :class:`LocalStack` objects. However it is not possible to - release data held by proxies that way, one always has to retain - a reference to the underlying local object in order to be able - to release it. - - .. versionadded:: 0.6.1 - """ - local.__release_local__() - - -class Local(object): - __slots__ = ("__storage__", "__ident_func__") - - def __init__(self): - object.__setattr__(self, "__storage__", {}) - object.__setattr__(self, "__ident_func__", get_ident) - - def __iter__(self): - return iter(self.__storage__.items()) - - def __call__(self, proxy): - """Create a proxy for a name.""" - return LocalProxy(self, proxy) - - def __release_local__(self): - self.__storage__.pop(self.__ident_func__(), None) - - def __getattr__(self, name): - try: - return self.__storage__[self.__ident_func__()][name] - except KeyError: - raise AttributeError(name) - - def __setattr__(self, name, value): - ident = self.__ident_func__() - storage = self.__storage__ - try: - storage[ident][name] = value - except KeyError: - storage[ident] = {name: value} - - def __delattr__(self, name): - try: - del self.__storage__[self.__ident_func__()][name] - except KeyError: - raise AttributeError(name) - - -class LocalStack(object): - """This class works similar to a :class:`Local` but keeps a stack - of objects instead. This is best explained with an example:: - - >>> ls = LocalStack() - >>> ls.push(42) - >>> ls.top - 42 - >>> ls.push(23) - >>> ls.top - 23 - >>> ls.pop() - 23 - >>> ls.top - 42 - - They can be force released by using a :class:`LocalManager` or with - the :func:`release_local` function but the correct way is to pop the - item from the stack after using. When the stack is empty it will - no longer be bound to the current context (and as such released). - - By calling the stack without arguments it returns a proxy that resolves to - the topmost item on the stack. - - .. versionadded:: 0.6.1 - """ - - def __init__(self): - self._local = Local() - - def __release_local__(self): - self._local.__release_local__() - - def _get__ident_func__(self): - return self._local.__ident_func__ - - def _set__ident_func__(self, value): - object.__setattr__(self._local, "__ident_func__", value) - - __ident_func__ = property(_get__ident_func__, _set__ident_func__) - del _get__ident_func__, _set__ident_func__ - - def __call__(self): - def _lookup(): - rv = self.top - if rv is None: - raise RuntimeError("object unbound") - return rv - - return LocalProxy(_lookup) - - def push(self, obj): - """Pushes a new item to the stack""" - rv = getattr(self._local, "stack", None) - if rv is None: - self._local.stack = rv = [] - rv.append(obj) - return rv - - def pop(self): - """Removes the topmost item from the stack, will return the - old value or `None` if the stack was already empty. - """ - stack = getattr(self._local, "stack", None) - if stack is None: - return None - elif len(stack) == 1: - release_local(self._local) - return stack[-1] - else: - return stack.pop() - - @property - def top(self): - """The topmost item on the stack. If the stack is empty, - `None` is returned. - """ - try: - return self._local.stack[-1] - except (AttributeError, IndexError): - return None - - -class LocalManager(object): - """Local objects cannot manage themselves. For that you need a local - manager. You can pass a local manager multiple locals or add them later - by appending them to `manager.locals`. Every time the manager cleans up, - it will clean up all the data left in the locals for this context. - - The `ident_func` parameter can be added to override the default ident - function for the wrapped locals. - - .. versionchanged:: 0.6.1 - Instead of a manager the :func:`release_local` function can be used - as well. - - .. versionchanged:: 0.7 - `ident_func` was added. - """ - - def __init__(self, locals=None, ident_func=None): - if locals is None: - self.locals = [] - elif isinstance(locals, Local): - self.locals = [locals] - else: - self.locals = list(locals) - if ident_func is not None: - self.ident_func = ident_func - for local in self.locals: - object.__setattr__(local, "__ident_func__", ident_func) - else: - self.ident_func = get_ident - - def get_ident(self): - """Return the context identifier the local objects use internally for - this context. You cannot override this method to change the behavior - but use it to link other context local objects (such as SQLAlchemy's - scoped sessions) to the Werkzeug locals. - - .. versionchanged:: 0.7 - You can pass a different ident function to the local manager that - will then be propagated to all the locals passed to the - constructor. - """ - return self.ident_func() - - def cleanup(self): - """Manually clean up the data in the locals for this context. Call - this at the end of the request or use `make_middleware()`. - """ - for local in self.locals: - release_local(local) - - def make_middleware(self, app): - """Wrap a WSGI application so that cleaning up happens after - request end. - """ - - def application(environ, start_response): - return ClosingIterator(app(environ, start_response), self.cleanup) - - return application - - def middleware(self, func): - """Like `make_middleware` but for decorating functions. - - Example usage:: - - @manager.middleware - def application(environ, start_response): - ... - - The difference to `make_middleware` is that the function passed - will have all the arguments copied from the inner application - (name, docstring, module). - """ - return update_wrapper(self.make_middleware(func), func) - - def __repr__(self): - return "<%s storages: %d>" % (self.__class__.__name__, len(self.locals)) - - -@implements_bool -class LocalProxy(object): - """Acts as a proxy for a werkzeug local. Forwards all operations to - a proxied object. The only operations not supported for forwarding - are right handed operands and any kind of assignment. - - Example usage:: - - from werkzeug.local import Local - l = Local() - - # these are proxies - request = l('request') - user = l('user') - - - from werkzeug.local import LocalStack - _response_local = LocalStack() - - # this is a proxy - response = _response_local() - - Whenever something is bound to l.user / l.request the proxy objects - will forward all operations. If no object is bound a :exc:`RuntimeError` - will be raised. - - To create proxies to :class:`Local` or :class:`LocalStack` objects, - call the object as shown above. If you want to have a proxy to an - object looked up by a function, you can (as of Werkzeug 0.6.1) pass - a function to the :class:`LocalProxy` constructor:: - - session = LocalProxy(lambda: get_current_request().session) - - .. versionchanged:: 0.6.1 - The class can be instantiated with a callable as well now. - """ - - __slots__ = ("__local", "__dict__", "__name__", "__wrapped__") - - def __init__(self, local, name=None): - object.__setattr__(self, "_LocalProxy__local", local) - object.__setattr__(self, "__name__", name) - if callable(local) and not hasattr(local, "__release_local__"): - # "local" is a callable that is not an instance of Local or - # LocalManager: mark it as a wrapped function. - object.__setattr__(self, "__wrapped__", local) - - def _get_current_object(self): - """Return the current object. This is useful if you want the real - object behind the proxy at a time for performance reasons or because - you want to pass the object into a different context. - """ - if not hasattr(self.__local, "__release_local__"): - return self.__local() - try: - return getattr(self.__local, self.__name__) - except AttributeError: - raise RuntimeError("no object bound to %s" % self.__name__) - - @property - def __dict__(self): - try: - return self._get_current_object().__dict__ - except RuntimeError: - raise AttributeError("__dict__") - - def __repr__(self): - try: - obj = self._get_current_object() - except RuntimeError: - return "<%s unbound>" % self.__class__.__name__ - return repr(obj) - - def __bool__(self): - try: - return bool(self._get_current_object()) - except RuntimeError: - return False - - def __unicode__(self): - try: - return unicode(self._get_current_object()) # noqa - except RuntimeError: - return repr(self) - - def __dir__(self): - try: - return dir(self._get_current_object()) - except RuntimeError: - return [] - - def __getattr__(self, name): - if name == "__members__": - return dir(self._get_current_object()) - return getattr(self._get_current_object(), name) - - def __setitem__(self, key, value): - self._get_current_object()[key] = value - - def __delitem__(self, key): - del self._get_current_object()[key] - - if PY2: - __getslice__ = lambda x, i, j: x._get_current_object()[i:j] - - def __setslice__(self, i, j, seq): - self._get_current_object()[i:j] = seq - - def __delslice__(self, i, j): - del self._get_current_object()[i:j] - - __setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v) - __delattr__ = lambda x, n: delattr(x._get_current_object(), n) - __str__ = lambda x: str(x._get_current_object()) - __lt__ = lambda x, o: x._get_current_object() < o - __le__ = lambda x, o: x._get_current_object() <= o - __eq__ = lambda x, o: x._get_current_object() == o - __ne__ = lambda x, o: x._get_current_object() != o - __gt__ = lambda x, o: x._get_current_object() > o - __ge__ = lambda x, o: x._get_current_object() >= o - __cmp__ = lambda x, o: cmp(x._get_current_object(), o) # noqa - __hash__ = lambda x: hash(x._get_current_object()) - __call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw) - __len__ = lambda x: len(x._get_current_object()) - __getitem__ = lambda x, i: x._get_current_object()[i] - __iter__ = lambda x: iter(x._get_current_object()) - __contains__ = lambda x, i: i in x._get_current_object() - __add__ = lambda x, o: x._get_current_object() + o - __sub__ = lambda x, o: x._get_current_object() - o - __mul__ = lambda x, o: x._get_current_object() * o - __floordiv__ = lambda x, o: x._get_current_object() // o - __mod__ = lambda x, o: x._get_current_object() % o - __divmod__ = lambda x, o: x._get_current_object().__divmod__(o) - __pow__ = lambda x, o: x._get_current_object() ** o - __lshift__ = lambda x, o: x._get_current_object() << o - __rshift__ = lambda x, o: x._get_current_object() >> o - __and__ = lambda x, o: x._get_current_object() & o - __xor__ = lambda x, o: x._get_current_object() ^ o - __or__ = lambda x, o: x._get_current_object() | o - __div__ = lambda x, o: x._get_current_object().__div__(o) - __truediv__ = lambda x, o: x._get_current_object().__truediv__(o) - __neg__ = lambda x: -(x._get_current_object()) - __pos__ = lambda x: +(x._get_current_object()) - __abs__ = lambda x: abs(x._get_current_object()) - __invert__ = lambda x: ~(x._get_current_object()) - __complex__ = lambda x: complex(x._get_current_object()) - __int__ = lambda x: int(x._get_current_object()) - __long__ = lambda x: long(x._get_current_object()) # noqa - __float__ = lambda x: float(x._get_current_object()) - __oct__ = lambda x: oct(x._get_current_object()) - __hex__ = lambda x: hex(x._get_current_object()) - __index__ = lambda x: x._get_current_object().__index__() - __coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o) - __enter__ = lambda x: x._get_current_object().__enter__() - __exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw) - __radd__ = lambda x, o: o + x._get_current_object() - __rsub__ = lambda x, o: o - x._get_current_object() - __rmul__ = lambda x, o: o * x._get_current_object() - __rdiv__ = lambda x, o: o / x._get_current_object() - if PY2: - __rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o) - else: - __rtruediv__ = __rdiv__ - __rfloordiv__ = lambda x, o: o // x._get_current_object() - __rmod__ = lambda x, o: o % x._get_current_object() - __rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o) - __copy__ = lambda x: copy.copy(x._get_current_object()) - __deepcopy__ = lambda x, memo: copy.deepcopy(x._get_current_object(), memo) diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/__init__.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/__init__.py deleted file mode 100644 index 5e049f5..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Middleware -========== - -A WSGI middleware is a WSGI application that wraps another application -in order to observe or change its behavior. Werkzeug provides some -middleware for common use cases. - -.. toctree:: - :maxdepth: 1 - - proxy_fix - shared_data - dispatcher - http_proxy - lint - profiler - -The :doc:`interactive debugger ` is also a middleware that can -be applied manually, although it is typically used automatically with -the :doc:`development server `. - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/dispatcher.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/dispatcher.py deleted file mode 100644 index 2eb173e..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/dispatcher.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Application Dispatcher -====================== - -This middleware creates a single WSGI application that dispatches to -multiple other WSGI applications mounted at different URL paths. - -A common example is writing a Single Page Application, where you have a -backend API and a frontend written in JavaScript that does the routing -in the browser rather than requesting different pages from the server. -The frontend is a single HTML and JS file that should be served for any -path besides "/api". - -This example dispatches to an API app under "/api", an admin app -under "/admin", and an app that serves frontend files for all other -requests:: - - app = DispatcherMiddleware(serve_frontend, { - '/api': api_app, - '/admin': admin_app, - }) - -In production, you might instead handle this at the HTTP server level, -serving files or proxying to application servers based on location. The -API and admin apps would each be deployed with a separate WSGI server, -and the static files would be served directly by the HTTP server. - -.. autoclass:: DispatcherMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" - - -class DispatcherMiddleware(object): - """Combine multiple applications as a single WSGI application. - Requests are dispatched to an application based on the path it is - mounted under. - - :param app: The WSGI application to dispatch to if the request - doesn't match a mounted path. - :param mounts: Maps path prefixes to applications for dispatching. - """ - - def __init__(self, app, mounts=None): - self.app = app - self.mounts = mounts or {} - - def __call__(self, environ, start_response): - script = environ.get("PATH_INFO", "") - path_info = "" - - while "/" in script: - if script in self.mounts: - app = self.mounts[script] - break - - script, last_item = script.rsplit("/", 1) - path_info = "/%s%s" % (last_item, path_info) - else: - app = self.mounts.get(script, self.app) - - original_script_name = environ.get("SCRIPT_NAME", "") - environ["SCRIPT_NAME"] = original_script_name + script - environ["PATH_INFO"] = path_info - return app(environ, start_response) diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/http_proxy.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/http_proxy.py deleted file mode 100644 index bfdc071..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/http_proxy.py +++ /dev/null @@ -1,219 +0,0 @@ -""" -Basic HTTP Proxy -================ - -.. autoclass:: ProxyMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import socket - -from ..datastructures import EnvironHeaders -from ..http import is_hop_by_hop_header -from ..urls import url_parse -from ..urls import url_quote -from ..wsgi import get_input_stream - -try: - from http import client -except ImportError: - import httplib as client - - -class ProxyMiddleware(object): - """Proxy requests under a path to an external server, routing other - requests to the app. - - This middleware can only proxy HTTP requests, as that is the only - protocol handled by the WSGI server. Other protocols, such as - websocket requests, cannot be proxied at this layer. This should - only be used for development, in production a real proxying server - should be used. - - The middleware takes a dict that maps a path prefix to a dict - describing the host to be proxied to:: - - app = ProxyMiddleware(app, { - "/static/": { - "target": "http://127.0.0.1:5001/", - } - }) - - Each host has the following options: - - ``target``: - The target URL to dispatch to. This is required. - ``remove_prefix``: - Whether to remove the prefix from the URL before dispatching it - to the target. The default is ``False``. - ``host``: - ``""`` (default): - The host header is automatically rewritten to the URL of the - target. - ``None``: - The host header is unmodified from the client request. - Any other value: - The host header is overwritten with the value. - ``headers``: - A dictionary of headers to be sent with the request to the - target. The default is ``{}``. - ``ssl_context``: - A :class:`ssl.SSLContext` defining how to verify requests if the - target is HTTPS. The default is ``None``. - - In the example above, everything under ``"/static/"`` is proxied to - the server on port 5001. The host header is rewritten to the target, - and the ``"/static/"`` prefix is removed from the URLs. - - :param app: The WSGI application to wrap. - :param targets: Proxy target configurations. See description above. - :param chunk_size: Size of chunks to read from input stream and - write to target. - :param timeout: Seconds before an operation to a target fails. - - .. versionadded:: 0.14 - """ - - def __init__(self, app, targets, chunk_size=2 << 13, timeout=10): - def _set_defaults(opts): - opts.setdefault("remove_prefix", False) - opts.setdefault("host", "") - opts.setdefault("headers", {}) - opts.setdefault("ssl_context", None) - return opts - - self.app = app - self.targets = dict( - ("/%s/" % k.strip("/"), _set_defaults(v)) for k, v in targets.items() - ) - self.chunk_size = chunk_size - self.timeout = timeout - - def proxy_to(self, opts, path, prefix): - target = url_parse(opts["target"]) - - def application(environ, start_response): - headers = list(EnvironHeaders(environ).items()) - headers[:] = [ - (k, v) - for k, v in headers - if not is_hop_by_hop_header(k) - and k.lower() not in ("content-length", "host") - ] - headers.append(("Connection", "close")) - - if opts["host"] == "": - headers.append(("Host", target.ascii_host)) - elif opts["host"] is None: - headers.append(("Host", environ["HTTP_HOST"])) - else: - headers.append(("Host", opts["host"])) - - headers.extend(opts["headers"].items()) - remote_path = path - - if opts["remove_prefix"]: - remote_path = "%s/%s" % ( - target.path.rstrip("/"), - remote_path[len(prefix) :].lstrip("/"), - ) - - content_length = environ.get("CONTENT_LENGTH") - chunked = False - - if content_length not in ("", None): - headers.append(("Content-Length", content_length)) - elif content_length is not None: - headers.append(("Transfer-Encoding", "chunked")) - chunked = True - - try: - if target.scheme == "http": - con = client.HTTPConnection( - target.ascii_host, target.port or 80, timeout=self.timeout - ) - elif target.scheme == "https": - con = client.HTTPSConnection( - target.ascii_host, - target.port or 443, - timeout=self.timeout, - context=opts["ssl_context"], - ) - else: - raise RuntimeError( - "Target scheme must be 'http' or 'https', got '{}'.".format( - target.scheme - ) - ) - - con.connect() - remote_url = url_quote(remote_path) - querystring = environ["QUERY_STRING"] - - if querystring: - remote_url = remote_url + "?" + querystring - - con.putrequest(environ["REQUEST_METHOD"], remote_url, skip_host=True) - - for k, v in headers: - if k.lower() == "connection": - v = "close" - - con.putheader(k, v) - - con.endheaders() - stream = get_input_stream(environ) - - while 1: - data = stream.read(self.chunk_size) - - if not data: - break - - if chunked: - con.send(b"%x\r\n%s\r\n" % (len(data), data)) - else: - con.send(data) - - resp = con.getresponse() - except socket.error: - from ..exceptions import BadGateway - - return BadGateway()(environ, start_response) - - start_response( - "%d %s" % (resp.status, resp.reason), - [ - (k.title(), v) - for k, v in resp.getheaders() - if not is_hop_by_hop_header(k) - ], - ) - - def read(): - while 1: - try: - data = resp.read(self.chunk_size) - except socket.error: - break - - if not data: - break - - yield data - - return read() - - return application - - def __call__(self, environ, start_response): - path = environ["PATH_INFO"] - app = self.app - - for prefix, opts in self.targets.items(): - if path.startswith(prefix): - app = self.proxy_to(opts, path, prefix) - break - - return app(environ, start_response) diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/lint.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/lint.py deleted file mode 100644 index 98f9581..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/lint.py +++ /dev/null @@ -1,408 +0,0 @@ -""" -WSGI Protocol Linter -==================== - -This module provides a middleware that performs sanity checks on the -behavior of the WSGI server and application. It checks that the -:pep:`3333` WSGI spec is properly implemented. It also warns on some -common HTTP errors such as non-empty responses for 304 status codes. - -.. autoclass:: LintMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -from warnings import warn - -from .._compat import implements_iterator -from .._compat import PY2 -from .._compat import string_types -from ..datastructures import Headers -from ..http import is_entity_header -from ..wsgi import FileWrapper - -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - - -class WSGIWarning(Warning): - """Warning class for WSGI warnings.""" - - -class HTTPWarning(Warning): - """Warning class for HTTP warnings.""" - - -def check_string(context, obj, stacklevel=3): - if type(obj) is not str: - warn( - "'%s' requires strings, got '%s'" % (context, type(obj).__name__), - WSGIWarning, - ) - - -class InputStream(object): - def __init__(self, stream): - self._stream = stream - - def read(self, *args): - if len(args) == 0: - warn( - "WSGI does not guarantee an EOF marker on the input stream, thus making" - " calls to 'wsgi.input.read()' unsafe. Conforming servers may never" - " return from this call.", - WSGIWarning, - stacklevel=2, - ) - elif len(args) != 1: - warn( - "Too many parameters passed to 'wsgi.input.read()'.", - WSGIWarning, - stacklevel=2, - ) - return self._stream.read(*args) - - def readline(self, *args): - if len(args) == 0: - warn( - "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use" - " 'wsgi.input.read()' instead.", - WSGIWarning, - stacklevel=2, - ) - elif len(args) == 1: - warn( - "'wsgi.input.readline()' was called with a size hint. WSGI does not" - " support this, although it's available on all major servers.", - WSGIWarning, - stacklevel=2, - ) - else: - raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.") - return self._stream.readline(*args) - - def __iter__(self): - try: - return iter(self._stream) - except TypeError: - warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2) - return iter(()) - - def close(self): - warn("The application closed the input stream!", WSGIWarning, stacklevel=2) - self._stream.close() - - -class ErrorStream(object): - def __init__(self, stream): - self._stream = stream - - def write(self, s): - check_string("wsgi.error.write()", s) - self._stream.write(s) - - def flush(self): - self._stream.flush() - - def writelines(self, seq): - for line in seq: - self.write(line) - - def close(self): - warn("The application closed the error stream!", WSGIWarning, stacklevel=2) - self._stream.close() - - -class GuardedWrite(object): - def __init__(self, write, chunks): - self._write = write - self._chunks = chunks - - def __call__(self, s): - check_string("write()", s) - self._write.write(s) - self._chunks.append(len(s)) - - -@implements_iterator -class GuardedIterator(object): - def __init__(self, iterator, headers_set, chunks): - self._iterator = iterator - if PY2: - self._next = iter(iterator).next - else: - self._next = iter(iterator).__next__ - self.closed = False - self.headers_set = headers_set - self.chunks = chunks - - def __iter__(self): - return self - - def __next__(self): - if self.closed: - warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2) - - rv = self._next() - - if not self.headers_set: - warn( - "The application returned before it started the response.", - WSGIWarning, - stacklevel=2, - ) - - check_string("application iterator items", rv) - self.chunks.append(len(rv)) - return rv - - def close(self): - self.closed = True - - if hasattr(self._iterator, "close"): - self._iterator.close() - - if self.headers_set: - status_code, headers = self.headers_set - bytes_sent = sum(self.chunks) - content_length = headers.get("content-length", type=int) - - if status_code == 304: - for key, _value in headers: - key = key.lower() - if key not in ("expires", "content-location") and is_entity_header( - key - ): - warn( - "Entity header %r found in 304 response." % key, HTTPWarning - ) - if bytes_sent: - warn("304 responses must not have a body.", HTTPWarning) - elif 100 <= status_code < 200 or status_code == 204: - if content_length != 0: - warn( - "%r responses must have an empty content length." % status_code, - HTTPWarning, - ) - if bytes_sent: - warn( - "%r responses must not have a body." % status_code, HTTPWarning - ) - elif content_length is not None and content_length != bytes_sent: - warn( - "Content-Length and the number of bytes sent to the client do not" - " match.", - WSGIWarning, - ) - - def __del__(self): - if not self.closed: - try: - warn( - "Iterator was garbage collected before it was closed.", WSGIWarning - ) - except Exception: - pass - - -class LintMiddleware(object): - """Warns about common errors in the WSGI and HTTP behavior of the - server and wrapped application. Some of the issues it check are: - - - invalid status codes - - non-bytestrings sent to the WSGI server - - strings returned from the WSGI application - - non-empty conditional responses - - unquoted etags - - relative URLs in the Location header - - unsafe calls to wsgi.input - - unclosed iterators - - Error information is emitted using the :mod:`warnings` module. - - :param app: The WSGI application to wrap. - - .. code-block:: python - - from werkzeug.middleware.lint import LintMiddleware - app = LintMiddleware(app) - """ - - def __init__(self, app): - self.app = app - - def check_environ(self, environ): - if type(environ) is not dict: - warn( - "WSGI environment is not a standard Python dict.", - WSGIWarning, - stacklevel=4, - ) - for key in ( - "REQUEST_METHOD", - "SERVER_NAME", - "SERVER_PORT", - "wsgi.version", - "wsgi.input", - "wsgi.errors", - "wsgi.multithread", - "wsgi.multiprocess", - "wsgi.run_once", - ): - if key not in environ: - warn( - "Required environment key %r not found" % key, - WSGIWarning, - stacklevel=3, - ) - if environ["wsgi.version"] != (1, 0): - warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3) - - script_name = environ.get("SCRIPT_NAME", "") - path_info = environ.get("PATH_INFO", "") - - if script_name and script_name[0] != "/": - warn( - "'SCRIPT_NAME' does not start with a slash: %r" % script_name, - WSGIWarning, - stacklevel=3, - ) - - if path_info and path_info[0] != "/": - warn( - "'PATH_INFO' does not start with a slash: %r" % path_info, - WSGIWarning, - stacklevel=3, - ) - - def check_start_response(self, status, headers, exc_info): - check_string("status", status) - status_code = status.split(None, 1)[0] - - if len(status_code) != 3 or not status_code.isdigit(): - warn(WSGIWarning("Status code must be three digits"), stacklevel=3) - - if len(status) < 4 or status[3] != " ": - warn( - WSGIWarning( - "Invalid value for status %r. Valid " - "status strings are three digits, a space " - "and a status explanation" - ), - stacklevel=3, - ) - - status_code = int(status_code) - - if status_code < 100: - warn(WSGIWarning("status code < 100 detected"), stacklevel=3) - - if type(headers) is not list: - warn(WSGIWarning("header list is not a list"), stacklevel=3) - - for item in headers: - if type(item) is not tuple or len(item) != 2: - warn(WSGIWarning("Headers must tuple 2-item tuples"), stacklevel=3) - name, value = item - if type(name) is not str or type(value) is not str: - warn(WSGIWarning("header items must be strings"), stacklevel=3) - if name.lower() == "status": - warn( - WSGIWarning( - "The status header is not supported due to " - "conflicts with the CGI spec." - ), - stacklevel=3, - ) - - if exc_info is not None and not isinstance(exc_info, tuple): - warn(WSGIWarning("invalid value for exc_info"), stacklevel=3) - - headers = Headers(headers) - self.check_headers(headers) - - return status_code, headers - - def check_headers(self, headers): - etag = headers.get("etag") - - if etag is not None: - if etag.startswith(("W/", "w/")): - if etag.startswith("w/"): - warn( - HTTPWarning("weak etag indicator should be upcase."), - stacklevel=4, - ) - - etag = etag[2:] - - if not (etag[:1] == etag[-1:] == '"'): - warn(HTTPWarning("unquoted etag emitted."), stacklevel=4) - - location = headers.get("location") - - if location is not None: - if not urlparse(location).netloc: - warn( - HTTPWarning("absolute URLs required for location header"), - stacklevel=4, - ) - - def check_iterator(self, app_iter): - if isinstance(app_iter, string_types): - warn( - "The application returned astring. The response will send one character" - " at a time to the client, which will kill performance. Return a list" - " or iterable instead.", - WSGIWarning, - stacklevel=3, - ) - - def __call__(self, *args, **kwargs): - if len(args) != 2: - warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2) - - if kwargs: - warn( - "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2 - ) - - environ, start_response = args - - self.check_environ(environ) - environ["wsgi.input"] = InputStream(environ["wsgi.input"]) - environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"]) - - # Hook our own file wrapper in so that applications will always - # iterate to the end and we can check the content length. - environ["wsgi.file_wrapper"] = FileWrapper - - headers_set = [] - chunks = [] - - def checking_start_response(*args, **kwargs): - if len(args) not in (2, 3): - warn( - "Invalid number of arguments: %s, expected 2 or 3." % len(args), - WSGIWarning, - stacklevel=2, - ) - - if kwargs: - warn("'start_response' does not take keyword arguments.", WSGIWarning) - - status, headers = args[:2] - - if len(args) == 3: - exc_info = args[2] - else: - exc_info = None - - headers_set[:] = self.check_start_response(status, headers, exc_info) - return GuardedWrite(start_response(status, headers, exc_info), chunks) - - app_iter = self.app(environ, checking_start_response) - self.check_iterator(app_iter) - return GuardedIterator(app_iter, headers_set, chunks) diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/profiler.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/profiler.py deleted file mode 100644 index 32a14d9..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/profiler.py +++ /dev/null @@ -1,132 +0,0 @@ -""" -Application Profiler -==================== - -This module provides a middleware that profiles each request with the -:mod:`cProfile` module. This can help identify bottlenecks in your code -that may be slowing down your application. - -.. autoclass:: ProfilerMiddleware - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -from __future__ import print_function - -import os.path -import sys -import time -from pstats import Stats - -try: - from cProfile import Profile -except ImportError: - from profile import Profile - - -class ProfilerMiddleware(object): - """Wrap a WSGI application and profile the execution of each - request. Responses are buffered so that timings are more exact. - - If ``stream`` is given, :class:`pstats.Stats` are written to it - after each request. If ``profile_dir`` is given, :mod:`cProfile` - data files are saved to that directory, one file per request. - - The filename can be customized by passing ``filename_format``. If - it is a string, it will be formatted using :meth:`str.format` with - the following fields available: - - - ``{method}`` - The request method; GET, POST, etc. - - ``{path}`` - The request path or 'root' should one not exist. - - ``{elapsed}`` - The elapsed time of the request. - - ``{time}`` - The time of the request. - - If it is a callable, it will be called with the WSGI ``environ`` - dict and should return a filename. - - :param app: The WSGI application to wrap. - :param stream: Write stats to this stream. Disable with ``None``. - :param sort_by: A tuple of columns to sort stats by. See - :meth:`pstats.Stats.sort_stats`. - :param restrictions: A tuple of restrictions to filter stats by. See - :meth:`pstats.Stats.print_stats`. - :param profile_dir: Save profile data files to this directory. - :param filename_format: Format string for profile data file names, - or a callable returning a name. See explanation above. - - .. code-block:: python - - from werkzeug.middleware.profiler import ProfilerMiddleware - app = ProfilerMiddleware(app) - - .. versionchanged:: 0.15 - Stats are written even if ``profile_dir`` is given, and can be - disable by passing ``stream=None``. - - .. versionadded:: 0.15 - Added ``filename_format``. - - .. versionadded:: 0.9 - Added ``restrictions`` and ``profile_dir``. - """ - - def __init__( - self, - app, - stream=sys.stdout, - sort_by=("time", "calls"), - restrictions=(), - profile_dir=None, - filename_format="{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof", - ): - self._app = app - self._stream = stream - self._sort_by = sort_by - self._restrictions = restrictions - self._profile_dir = profile_dir - self._filename_format = filename_format - - def __call__(self, environ, start_response): - response_body = [] - - def catching_start_response(status, headers, exc_info=None): - start_response(status, headers, exc_info) - return response_body.append - - def runapp(): - app_iter = self._app(environ, catching_start_response) - response_body.extend(app_iter) - - if hasattr(app_iter, "close"): - app_iter.close() - - profile = Profile() - start = time.time() - profile.runcall(runapp) - body = b"".join(response_body) - elapsed = time.time() - start - - if self._profile_dir is not None: - if callable(self._filename_format): - filename = self._filename_format(environ) - else: - filename = self._filename_format.format( - method=environ["REQUEST_METHOD"], - path=( - environ.get("PATH_INFO").strip("/").replace("/", ".") or "root" - ), - elapsed=elapsed * 1000.0, - time=time.time(), - ) - filename = os.path.join(self._profile_dir, filename) - profile.dump_stats(filename) - - if self._stream is not None: - stats = Stats(profile, stream=self._stream) - stats.sort_stats(*self._sort_by) - print("-" * 80, file=self._stream) - print("PATH: {!r}".format(environ.get("PATH_INFO", "")), file=self._stream) - stats.print_stats(*self._restrictions) - print("-" * 80 + "\n", file=self._stream) - - return [body] diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/proxy_fix.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/proxy_fix.py deleted file mode 100644 index dc1dacc..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/proxy_fix.py +++ /dev/null @@ -1,228 +0,0 @@ -""" -X-Forwarded-For Proxy Fix -========================= - -This module provides a middleware that adjusts the WSGI environ based on -``X-Forwarded-`` headers that proxies in front of an application may -set. - -When an application is running behind a proxy server, WSGI may see the -request as coming from that server rather than the real client. Proxies -set various headers to track where the request actually came from. - -This middleware should only be applied if the application is actually -behind such a proxy, and should be configured with the number of proxies -that are chained in front of it. Not all proxies set all the headers. -Since incoming headers can be faked, you must set how many proxies are -setting each header so the middleware knows what to trust. - -.. autoclass:: ProxyFix - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import warnings - - -class ProxyFix(object): - """Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in - front of the application may set. - - - ``X-Forwarded-For`` sets ``REMOTE_ADDR``. - - ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``. - - ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and - ``SERVER_PORT``. - - ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``. - - ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``. - - You must tell the middleware how many proxies set each header so it - knows what values to trust. It is a security issue to trust values - that came from the client rather than a proxy. - - The original values of the headers are stored in the WSGI - environ as ``werkzeug.proxy_fix.orig``, a dict. - - :param app: The WSGI application to wrap. - :param x_for: Number of values to trust for ``X-Forwarded-For``. - :param x_proto: Number of values to trust for ``X-Forwarded-Proto``. - :param x_host: Number of values to trust for ``X-Forwarded-Host``. - :param x_port: Number of values to trust for ``X-Forwarded-Port``. - :param x_prefix: Number of values to trust for - ``X-Forwarded-Prefix``. - :param num_proxies: Deprecated, use ``x_for`` instead. - - .. code-block:: python - - from werkzeug.middleware.proxy_fix import ProxyFix - # App is behind one proxy that sets the -For and -Host headers. - app = ProxyFix(app, x_for=1, x_host=1) - - .. versionchanged:: 0.15 - All headers support multiple values. The ``num_proxies`` - argument is deprecated. Each header is configured with a - separate number of trusted proxies. - - .. versionchanged:: 0.15 - Original WSGI environ values are stored in the - ``werkzeug.proxy_fix.orig`` dict. ``orig_remote_addr``, - ``orig_wsgi_url_scheme``, and ``orig_http_host`` are deprecated - and will be removed in 1.0. - - .. versionchanged:: 0.15 - Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``. - - .. versionchanged:: 0.15 - ``X-Fowarded-Host`` and ``X-Forwarded-Port`` modify - ``SERVER_NAME`` and ``SERVER_PORT``. - """ - - def __init__( - self, app, num_proxies=None, x_for=1, x_proto=0, x_host=0, x_port=0, x_prefix=0 - ): - self.app = app - self.x_for = x_for - self.x_proto = x_proto - self.x_host = x_host - self.x_port = x_port - self.x_prefix = x_prefix - self.num_proxies = num_proxies - - @property - def num_proxies(self): - """The number of proxies setting ``X-Forwarded-For`` in front - of the application. - - .. deprecated:: 0.15 - A separate number of trusted proxies is configured for each - header. ``num_proxies`` maps to ``x_for``. This method will - be removed in 1.0. - - :internal: - """ - warnings.warn( - "'num_proxies' is deprecated as of version 0.15 and will be" - " removed in version 1.0. Use 'x_for' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.x_for - - @num_proxies.setter - def num_proxies(self, value): - if value is not None: - warnings.warn( - "'num_proxies' is deprecated as of version 0.15 and" - " will be removed in version 1.0. Use 'x_for' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.x_for = value - - def get_remote_addr(self, forwarded_for): - """Get the real ``remote_addr`` by looking backwards ``x_for`` - number of values in the ``X-Forwarded-For`` header. - - :param forwarded_for: List of values parsed from the - ``X-Forwarded-For`` header. - :return: The real ``remote_addr``, or ``None`` if there were not - at least ``x_for`` values. - - .. deprecated:: 0.15 - This is handled internally for each header. This method will - be removed in 1.0. - - .. versionchanged:: 0.9 - Use ``num_proxies`` instead of always picking the first - value. - - .. versionadded:: 0.8 - """ - warnings.warn( - "'get_remote_addr' is deprecated as of version 0.15 and" - " will be removed in version 1.0. It is now handled" - " internally for each header.", - DeprecationWarning, - ) - return self._get_trusted_comma(self.x_for, ",".join(forwarded_for)) - - def _get_trusted_comma(self, trusted, value): - """Get the real value from a comma-separated header based on the - configured number of trusted proxies. - - :param trusted: Number of values to trust in the header. - :param value: Header value to parse. - :return: The real value, or ``None`` if there are fewer values - than the number of trusted proxies. - - .. versionadded:: 0.15 - """ - if not (trusted and value): - return - values = [x.strip() for x in value.split(",")] - if len(values) >= trusted: - return values[-trusted] - - def __call__(self, environ, start_response): - """Modify the WSGI environ based on the various ``Forwarded`` - headers before calling the wrapped application. Store the - original environ values in ``werkzeug.proxy_fix.orig_{key}``. - """ - environ_get = environ.get - orig_remote_addr = environ_get("REMOTE_ADDR") - orig_wsgi_url_scheme = environ_get("wsgi.url_scheme") - orig_http_host = environ_get("HTTP_HOST") - environ.update( - { - "werkzeug.proxy_fix.orig": { - "REMOTE_ADDR": orig_remote_addr, - "wsgi.url_scheme": orig_wsgi_url_scheme, - "HTTP_HOST": orig_http_host, - "SERVER_NAME": environ_get("SERVER_NAME"), - "SERVER_PORT": environ_get("SERVER_PORT"), - "SCRIPT_NAME": environ_get("SCRIPT_NAME"), - }, - # todo: remove deprecated keys - "werkzeug.proxy_fix.orig_remote_addr": orig_remote_addr, - "werkzeug.proxy_fix.orig_wsgi_url_scheme": orig_wsgi_url_scheme, - "werkzeug.proxy_fix.orig_http_host": orig_http_host, - } - ) - - x_for = self._get_trusted_comma(self.x_for, environ_get("HTTP_X_FORWARDED_FOR")) - if x_for: - environ["REMOTE_ADDR"] = x_for - - x_proto = self._get_trusted_comma( - self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO") - ) - if x_proto: - environ["wsgi.url_scheme"] = x_proto - - x_host = self._get_trusted_comma( - self.x_host, environ_get("HTTP_X_FORWARDED_HOST") - ) - if x_host: - environ["HTTP_HOST"] = x_host - parts = x_host.split(":", 1) - environ["SERVER_NAME"] = parts[0] - if len(parts) == 2: - environ["SERVER_PORT"] = parts[1] - - x_port = self._get_trusted_comma( - self.x_port, environ_get("HTTP_X_FORWARDED_PORT") - ) - if x_port: - host = environ.get("HTTP_HOST") - if host: - parts = host.split(":", 1) - host = parts[0] if len(parts) == 2 else host - environ["HTTP_HOST"] = "%s:%s" % (host, x_port) - environ["SERVER_PORT"] = x_port - - x_prefix = self._get_trusted_comma( - self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX") - ) - if x_prefix: - environ["SCRIPT_NAME"] = x_prefix - - return self.app(environ, start_response) diff --git a/venv/lib/python3.6/site-packages/werkzeug/middleware/shared_data.py b/venv/lib/python3.6/site-packages/werkzeug/middleware/shared_data.py deleted file mode 100644 index 088504a..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/middleware/shared_data.py +++ /dev/null @@ -1,253 +0,0 @@ -""" -Serve Shared Static Files -========================= - -.. autoclass:: SharedDataMiddleware - :members: is_allowed - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -import mimetypes -import os -import posixpath -from datetime import datetime -from io import BytesIO -from time import mktime -from time import time -from zlib import adler32 - -from .._compat import PY2 -from .._compat import string_types -from ..filesystem import get_filesystem_encoding -from ..http import http_date -from ..http import is_resource_modified -from ..security import safe_join -from ..wsgi import get_path_info -from ..wsgi import wrap_file - - -class SharedDataMiddleware(object): - - """A WSGI middleware that provides static content for development - environments or simple server setups. Usage is quite simple:: - - import os - from werkzeug.wsgi import SharedDataMiddleware - - app = SharedDataMiddleware(app, { - '/static': os.path.join(os.path.dirname(__file__), 'static') - }) - - The contents of the folder ``./shared`` will now be available on - ``http://example.com/shared/``. This is pretty useful during development - because a standalone media server is not required. One can also mount - files on the root folder and still continue to use the application because - the shared data middleware forwards all unhandled requests to the - application, even if the requests are below one of the shared folders. - - If `pkg_resources` is available you can also tell the middleware to serve - files from package data:: - - app = SharedDataMiddleware(app, { - '/static': ('myapplication', 'static') - }) - - This will then serve the ``static`` folder in the `myapplication` - Python package. - - The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch` - rules for files that are not accessible from the web. If `cache` is set to - `False` no caching headers are sent. - - Currently the middleware does not support non ASCII filenames. If the - encoding on the file system happens to be the encoding of the URI it may - work but this could also be by accident. We strongly suggest using ASCII - only file names for static files. - - The middleware will guess the mimetype using the Python `mimetype` - module. If it's unable to figure out the charset it will fall back - to `fallback_mimetype`. - - .. versionchanged:: 0.5 - The cache timeout is configurable now. - - .. versionadded:: 0.6 - The `fallback_mimetype` parameter was added. - - :param app: the application to wrap. If you don't want to wrap an - application you can pass it :exc:`NotFound`. - :param exports: a list or dict of exported files and folders. - :param disallow: a list of :func:`~fnmatch.fnmatch` rules. - :param fallback_mimetype: the fallback mimetype for unknown files. - :param cache: enable or disable caching headers. - :param cache_timeout: the cache timeout in seconds for the headers. - """ - - def __init__( - self, - app, - exports, - disallow=None, - cache=True, - cache_timeout=60 * 60 * 12, - fallback_mimetype="text/plain", - ): - self.app = app - self.exports = [] - self.cache = cache - self.cache_timeout = cache_timeout - - if hasattr(exports, "items"): - exports = exports.items() - - for key, value in exports: - if isinstance(value, tuple): - loader = self.get_package_loader(*value) - elif isinstance(value, string_types): - if os.path.isfile(value): - loader = self.get_file_loader(value) - else: - loader = self.get_directory_loader(value) - else: - raise TypeError("unknown def %r" % value) - - self.exports.append((key, loader)) - - if disallow is not None: - from fnmatch import fnmatch - - self.is_allowed = lambda x: not fnmatch(x, disallow) - - self.fallback_mimetype = fallback_mimetype - - def is_allowed(self, filename): - """Subclasses can override this method to disallow the access to - certain files. However by providing `disallow` in the constructor - this method is overwritten. - """ - return True - - def _opener(self, filename): - return lambda: ( - open(filename, "rb"), - datetime.utcfromtimestamp(os.path.getmtime(filename)), - int(os.path.getsize(filename)), - ) - - def get_file_loader(self, filename): - return lambda x: (os.path.basename(filename), self._opener(filename)) - - def get_package_loader(self, package, package_path): - from pkg_resources import DefaultProvider, ResourceManager, get_provider - - loadtime = datetime.utcnow() - provider = get_provider(package) - manager = ResourceManager() - filesystem_bound = isinstance(provider, DefaultProvider) - - def loader(path): - if path is None: - return None, None - - path = safe_join(package_path, path) - - if not provider.has_resource(path): - return None, None - - basename = posixpath.basename(path) - - if filesystem_bound: - return ( - basename, - self._opener(provider.get_resource_filename(manager, path)), - ) - - s = provider.get_resource_string(manager, path) - return basename, lambda: (BytesIO(s), loadtime, len(s)) - - return loader - - def get_directory_loader(self, directory): - def loader(path): - if path is not None: - path = safe_join(directory, path) - else: - path = directory - - if os.path.isfile(path): - return os.path.basename(path), self._opener(path) - - return None, None - - return loader - - def generate_etag(self, mtime, file_size, real_filename): - if not isinstance(real_filename, bytes): - real_filename = real_filename.encode(get_filesystem_encoding()) - - return "wzsdm-%d-%s-%s" % ( - mktime(mtime.timetuple()), - file_size, - adler32(real_filename) & 0xFFFFFFFF, - ) - - def __call__(self, environ, start_response): - path = get_path_info(environ) - - if PY2: - path = path.encode(get_filesystem_encoding()) - - file_loader = None - - for search_path, loader in self.exports: - if search_path == path: - real_filename, file_loader = loader(None) - - if file_loader is not None: - break - - if not search_path.endswith("/"): - search_path += "/" - - if path.startswith(search_path): - real_filename, file_loader = loader(path[len(search_path) :]) - - if file_loader is not None: - break - - if file_loader is None or not self.is_allowed(real_filename): - return self.app(environ, start_response) - - guessed_type = mimetypes.guess_type(real_filename) - mime_type = guessed_type[0] or self.fallback_mimetype - f, mtime, file_size = file_loader() - - headers = [("Date", http_date())] - - if self.cache: - timeout = self.cache_timeout - etag = self.generate_etag(mtime, file_size, real_filename) - headers += [ - ("Etag", '"%s"' % etag), - ("Cache-Control", "max-age=%d, public" % timeout), - ] - - if not is_resource_modified(environ, etag, last_modified=mtime): - f.close() - start_response("304 Not Modified", headers) - return [] - - headers.append(("Expires", http_date(time() + timeout))) - else: - headers.append(("Cache-Control", "public")) - - headers.extend( - ( - ("Content-Type", mime_type), - ("Content-Length", str(file_size)), - ("Last-Modified", http_date(mtime)), - ) - ) - start_response("200 OK", headers) - return wrap_file(environ, f) diff --git a/venv/lib/python3.6/site-packages/werkzeug/posixemulation.py b/venv/lib/python3.6/site-packages/werkzeug/posixemulation.py deleted file mode 100644 index 696b456..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/posixemulation.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -r""" - werkzeug.posixemulation - ~~~~~~~~~~~~~~~~~~~~~~~ - - Provides a POSIX emulation for some features that are relevant to - web applications. The main purpose is to simplify support for - systems such as Windows NT that are not 100% POSIX compatible. - - Currently this only implements a :func:`rename` function that - follows POSIX semantics. Eg: if the target file already exists it - will be replaced without asking. - - This module was introduced in 0.6.1 and is not a public interface. - It might become one in later versions of Werkzeug. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import errno -import os -import random -import sys -import time - -from ._compat import to_unicode -from .filesystem import get_filesystem_encoding - -can_rename_open_file = False - -if os.name == "nt": - try: - import ctypes - - _MOVEFILE_REPLACE_EXISTING = 0x1 - _MOVEFILE_WRITE_THROUGH = 0x8 - _MoveFileEx = ctypes.windll.kernel32.MoveFileExW - - def _rename(src, dst): - src = to_unicode(src, get_filesystem_encoding()) - dst = to_unicode(dst, get_filesystem_encoding()) - if _rename_atomic(src, dst): - return True - retry = 0 - rv = False - while not rv and retry < 100: - rv = _MoveFileEx( - src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH - ) - if not rv: - time.sleep(0.001) - retry += 1 - return rv - - # new in Vista and Windows Server 2008 - _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction - _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction - _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW - _CloseHandle = ctypes.windll.kernel32.CloseHandle - can_rename_open_file = True - - def _rename_atomic(src, dst): - ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, "Werkzeug rename") - if ta == -1: - return False - try: - retry = 0 - rv = False - while not rv and retry < 100: - rv = _MoveFileTransacted( - src, - dst, - None, - None, - _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, - ta, - ) - if rv: - rv = _CommitTransaction(ta) - break - else: - time.sleep(0.001) - retry += 1 - return rv - finally: - _CloseHandle(ta) - - except Exception: - - def _rename(src, dst): - return False - - def _rename_atomic(src, dst): - return False - - def rename(src, dst): - # Try atomic or pseudo-atomic rename - if _rename(src, dst): - return - # Fall back to "move away and replace" - try: - os.rename(src, dst) - except OSError as e: - if e.errno != errno.EEXIST: - raise - old = "%s-%08x" % (dst, random.randint(0, sys.maxsize)) - os.rename(dst, old) - os.rename(src, dst) - try: - os.unlink(old) - except Exception: - pass - - -else: - rename = os.rename - can_rename_open_file = True diff --git a/venv/lib/python3.6/site-packages/werkzeug/routing.py b/venv/lib/python3.6/site-packages/werkzeug/routing.py deleted file mode 100644 index 8ff7df1..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/routing.py +++ /dev/null @@ -1,2039 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.routing - ~~~~~~~~~~~~~~~~ - - When it comes to combining multiple controller or view functions (however - you want to call them) you need a dispatcher. A simple way would be - applying regular expression tests on the ``PATH_INFO`` and calling - registered callback functions that return the value then. - - This module implements a much more powerful system than simple regular - expression matching because it can also convert values in the URLs and - build URLs. - - Here a simple example that creates an URL map for an application with - two subdomains (www and kb) and some URL rules: - - >>> m = Map([ - ... # Static URLs - ... Rule('/', endpoint='static/index'), - ... Rule('/about', endpoint='static/about'), - ... Rule('/help', endpoint='static/help'), - ... # Knowledge Base - ... Subdomain('kb', [ - ... Rule('/', endpoint='kb/index'), - ... Rule('/browse/', endpoint='kb/browse'), - ... Rule('/browse//', endpoint='kb/browse'), - ... Rule('/browse//', endpoint='kb/browse') - ... ]) - ... ], default_subdomain='www') - - If the application doesn't use subdomains it's perfectly fine to not set - the default subdomain and not use the `Subdomain` rule factory. The endpoint - in the rules can be anything, for example import paths or unique - identifiers. The WSGI application can use those endpoints to get the - handler for that URL. It doesn't have to be a string at all but it's - recommended. - - Now it's possible to create a URL adapter for one of the subdomains and - build URLs: - - >>> c = m.bind('example.com') - >>> c.build("kb/browse", dict(id=42)) - 'http://kb.example.com/browse/42/' - >>> c.build("kb/browse", dict()) - 'http://kb.example.com/browse/' - >>> c.build("kb/browse", dict(id=42, page=3)) - 'http://kb.example.com/browse/42/3' - >>> c.build("static/about") - '/about' - >>> c.build("static/index", force_external=True) - 'http://www.example.com/' - - >>> c = m.bind('example.com', subdomain='kb') - >>> c.build("static/about") - 'http://www.example.com/about' - - The first argument to bind is the server name *without* the subdomain. - Per default it will assume that the script is mounted on the root, but - often that's not the case so you can provide the real mount point as - second argument: - - >>> c = m.bind('example.com', '/applications/example') - - The third argument can be the subdomain, if not given the default - subdomain is used. For more details about binding have a look at the - documentation of the `MapAdapter`. - - And here is how you can match URLs: - - >>> c = m.bind('example.com') - >>> c.match("/") - ('static/index', {}) - >>> c.match("/about") - ('static/about', {}) - >>> c = m.bind('example.com', '/', 'kb') - >>> c.match("/") - ('kb/index', {}) - >>> c.match("/browse/42/23") - ('kb/browse', {'id': 42, 'page': 23}) - - If matching fails you get a `NotFound` exception, if the rule thinks - it's a good idea to redirect (for example because the URL was defined - to have a slash at the end but the request was missing that slash) it - will raise a `RequestRedirect` exception. Both are subclasses of the - `HTTPException` so you can use those errors as responses in the - application. - - If matching succeeded but the URL rule was incompatible to the given - method (for example there were only rules for `GET` and `HEAD` and - routing system tried to match a `POST` request) a `MethodNotAllowed` - exception is raised. - - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import ast -import difflib -import posixpath -import re -import uuid -from pprint import pformat -from threading import Lock - -from ._compat import implements_to_string -from ._compat import iteritems -from ._compat import itervalues -from ._compat import native_string_result -from ._compat import string_types -from ._compat import text_type -from ._compat import to_bytes -from ._compat import to_unicode -from ._compat import wsgi_decoding_dance -from ._internal import _encode_idna -from ._internal import _get_environ -from .datastructures import ImmutableDict -from .datastructures import MultiDict -from .exceptions import BadHost -from .exceptions import HTTPException -from .exceptions import MethodNotAllowed -from .exceptions import NotFound -from .urls import _fast_url_quote -from .urls import url_encode -from .urls import url_join -from .urls import url_quote -from .utils import cached_property -from .utils import format_string -from .utils import redirect -from .wsgi import get_host - -_rule_re = re.compile( - r""" - (?P[^<]*) # static rule data - < - (?: - (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name - (?:\((?P.*?)\))? # converter arguments - \: # variable delimiter - )? - (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name - > - """, - re.VERBOSE, -) -_simple_rule_re = re.compile(r"<([^>]+)>") -_converter_args_re = re.compile( - r""" - ((?P\w+)\s*=\s*)? - (?P - True|False| - \d+.\d+| - \d+.| - \d+| - [\w\d_.]+| - [urUR]?(?P"[^"]*?"|'[^']*') - )\s*, - """, - re.VERBOSE | re.UNICODE, -) - - -_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False} - - -def _pythonize(value): - if value in _PYTHON_CONSTANTS: - return _PYTHON_CONSTANTS[value] - for convert in int, float: - try: - return convert(value) - except ValueError: - pass - if value[:1] == value[-1:] and value[0] in "\"'": - value = value[1:-1] - return text_type(value) - - -def parse_converter_args(argstr): - argstr += "," - args = [] - kwargs = {} - - for item in _converter_args_re.finditer(argstr): - value = item.group("stringval") - if value is None: - value = item.group("value") - value = _pythonize(value) - if not item.group("name"): - args.append(value) - else: - name = item.group("name") - kwargs[name] = value - - return tuple(args), kwargs - - -def parse_rule(rule): - """Parse a rule and return it as generator. Each iteration yields tuples - in the form ``(converter, arguments, variable)``. If the converter is - `None` it's a static url part, otherwise it's a dynamic one. - - :internal: - """ - pos = 0 - end = len(rule) - do_match = _rule_re.match - used_names = set() - while pos < end: - m = do_match(rule, pos) - if m is None: - break - data = m.groupdict() - if data["static"]: - yield None, None, data["static"] - variable = data["variable"] - converter = data["converter"] or "default" - if variable in used_names: - raise ValueError("variable name %r used twice." % variable) - used_names.add(variable) - yield converter, data["args"] or None, variable - pos = m.end() - if pos < end: - remaining = rule[pos:] - if ">" in remaining or "<" in remaining: - raise ValueError("malformed url rule: %r" % rule) - yield None, None, remaining - - -class RoutingException(Exception): - """Special exceptions that require the application to redirect, notifying - about missing urls, etc. - - :internal: - """ - - -class RequestRedirect(HTTPException, RoutingException): - """Raise if the map requests a redirect. This is for example the case if - `strict_slashes` are activated and an url that requires a trailing slash. - - The attribute `new_url` contains the absolute destination url. - """ - - code = 308 - - def __init__(self, new_url): - RoutingException.__init__(self, new_url) - self.new_url = new_url - - def get_response(self, environ): - return redirect(self.new_url, self.code) - - -class RequestSlash(RoutingException): - """Internal exception.""" - - -class RequestAliasRedirect(RoutingException): # noqa: B903 - """This rule is an alias and wants to redirect to the canonical URL.""" - - def __init__(self, matched_values): - self.matched_values = matched_values - - -@implements_to_string -class BuildError(RoutingException, LookupError): - """Raised if the build system cannot find a URL for an endpoint with the - values provided. - """ - - def __init__(self, endpoint, values, method, adapter=None): - LookupError.__init__(self, endpoint, values, method) - self.endpoint = endpoint - self.values = values - self.method = method - self.adapter = adapter - - @cached_property - def suggested(self): - return self.closest_rule(self.adapter) - - def closest_rule(self, adapter): - def _score_rule(rule): - return sum( - [ - 0.98 - * difflib.SequenceMatcher( - None, rule.endpoint, self.endpoint - ).ratio(), - 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), - 0.01 * bool(rule.methods and self.method in rule.methods), - ] - ) - - if adapter and adapter.map._rules: - return max(adapter.map._rules, key=_score_rule) - - def __str__(self): - message = [] - message.append("Could not build url for endpoint %r" % self.endpoint) - if self.method: - message.append(" (%r)" % self.method) - if self.values: - message.append(" with values %r" % sorted(self.values.keys())) - message.append(".") - if self.suggested: - if self.endpoint == self.suggested.endpoint: - if self.method and self.method not in self.suggested.methods: - message.append( - " Did you mean to use methods %r?" - % sorted(self.suggested.methods) - ) - missing_values = self.suggested.arguments.union( - set(self.suggested.defaults or ()) - ) - set(self.values.keys()) - if missing_values: - message.append( - " Did you forget to specify values %r?" % sorted(missing_values) - ) - else: - message.append(" Did you mean %r instead?" % self.suggested.endpoint) - return u"".join(message) - - -class ValidationError(ValueError): - """Validation error. If a rule converter raises this exception the rule - does not match the current URL and the next URL is tried. - """ - - -class RuleFactory(object): - """As soon as you have more complex URL setups it's a good idea to use rule - factories to avoid repetitive tasks. Some of them are builtin, others can - be added by subclassing `RuleFactory` and overriding `get_rules`. - """ - - def get_rules(self, map): - """Subclasses of `RuleFactory` have to override this method and return - an iterable of rules.""" - raise NotImplementedError() - - -class Subdomain(RuleFactory): - """All URLs provided by this factory have the subdomain set to a - specific domain. For example if you want to use the subdomain for - the current language this can be a good setup:: - - url_map = Map([ - Rule('/', endpoint='#select_language'), - Subdomain('', [ - Rule('/', endpoint='index'), - Rule('/about', endpoint='about'), - Rule('/help', endpoint='help') - ]) - ]) - - All the rules except for the ``'#select_language'`` endpoint will now - listen on a two letter long subdomain that holds the language code - for the current request. - """ - - def __init__(self, subdomain, rules): - self.subdomain = subdomain - self.rules = rules - - def get_rules(self, map): - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.subdomain = self.subdomain - yield rule - - -class Submount(RuleFactory): - """Like `Subdomain` but prefixes the URL rule with a given string:: - - url_map = Map([ - Rule('/', endpoint='index'), - Submount('/blog', [ - Rule('/', endpoint='blog/index'), - Rule('/entry/', endpoint='blog/show') - ]) - ]) - - Now the rule ``'blog/show'`` matches ``/blog/entry/``. - """ - - def __init__(self, path, rules): - self.path = path.rstrip("/") - self.rules = rules - - def get_rules(self, map): - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.rule = self.path + rule.rule - yield rule - - -class EndpointPrefix(RuleFactory): - """Prefixes all endpoints (which must be strings for this factory) with - another string. This can be useful for sub applications:: - - url_map = Map([ - Rule('/', endpoint='index'), - EndpointPrefix('blog/', [Submount('/blog', [ - Rule('/', endpoint='index'), - Rule('/entry/', endpoint='show') - ])]) - ]) - """ - - def __init__(self, prefix, rules): - self.prefix = prefix - self.rules = rules - - def get_rules(self, map): - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - rule = rule.empty() - rule.endpoint = self.prefix + rule.endpoint - yield rule - - -class RuleTemplate(object): - """Returns copies of the rules wrapped and expands string templates in - the endpoint, rule, defaults or subdomain sections. - - Here a small example for such a rule template:: - - from werkzeug.routing import Map, Rule, RuleTemplate - - resource = RuleTemplate([ - Rule('/$name/', endpoint='$name.list'), - Rule('/$name/', endpoint='$name.show') - ]) - - url_map = Map([resource(name='user'), resource(name='page')]) - - When a rule template is called the keyword arguments are used to - replace the placeholders in all the string parameters. - """ - - def __init__(self, rules): - self.rules = list(rules) - - def __call__(self, *args, **kwargs): - return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) - - -class RuleTemplateFactory(RuleFactory): - """A factory that fills in template variables into rules. Used by - `RuleTemplate` internally. - - :internal: - """ - - def __init__(self, rules, context): - self.rules = rules - self.context = context - - def get_rules(self, map): - for rulefactory in self.rules: - for rule in rulefactory.get_rules(map): - new_defaults = subdomain = None - if rule.defaults: - new_defaults = {} - for key, value in iteritems(rule.defaults): - if isinstance(value, string_types): - value = format_string(value, self.context) - new_defaults[key] = value - if rule.subdomain is not None: - subdomain = format_string(rule.subdomain, self.context) - new_endpoint = rule.endpoint - if isinstance(new_endpoint, string_types): - new_endpoint = format_string(new_endpoint, self.context) - yield Rule( - format_string(rule.rule, self.context), - new_defaults, - subdomain, - rule.methods, - rule.build_only, - new_endpoint, - rule.strict_slashes, - ) - - -def _prefix_names(src): - """ast parse and prefix names with `.` to avoid collision with user vars""" - tree = ast.parse(src).body[0] - if isinstance(tree, ast.Expr): - tree = tree.value - for node in ast.walk(tree): - if isinstance(node, ast.Name): - node.id = "." + node.id - return tree - - -_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()" -_IF_KWARGS_URL_ENCODE_CODE = """\ -if kwargs: - q = '?' - params = self._encode_query_vars(kwargs) -else: - q = params = '' -""" -_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) -_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) - - -@implements_to_string -class Rule(RuleFactory): - """A Rule represents one URL pattern. There are some options for `Rule` - that change the way it behaves and are passed to the `Rule` constructor. - Note that besides the rule-string all arguments *must* be keyword arguments - in order to not break the application on Werkzeug upgrades. - - `string` - Rule strings basically are just normal URL paths with placeholders in - the format ```` where the converter and the - arguments are optional. If no converter is defined the `default` - converter is used which means `string` in the normal configuration. - - URL rules that end with a slash are branch URLs, others are leaves. - If you have `strict_slashes` enabled (which is the default), all - branch URLs that are matched without a trailing slash will trigger a - redirect to the same URL with the missing slash appended. - - The converters are defined on the `Map`. - - `endpoint` - The endpoint for this rule. This can be anything. A reference to a - function, a string, a number etc. The preferred way is using a string - because the endpoint is used for URL generation. - - `defaults` - An optional dict with defaults for other rules with the same endpoint. - This is a bit tricky but useful if you want to have unique URLs:: - - url_map = Map([ - Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), - Rule('/all/page/', endpoint='all_entries') - ]) - - If a user now visits ``http://example.com/all/page/1`` he will be - redirected to ``http://example.com/all/``. If `redirect_defaults` is - disabled on the `Map` instance this will only affect the URL - generation. - - `subdomain` - The subdomain rule string for this rule. If not specified the rule - only matches for the `default_subdomain` of the map. If the map is - not bound to a subdomain this feature is disabled. - - Can be useful if you want to have user profiles on different subdomains - and all subdomains are forwarded to your application:: - - url_map = Map([ - Rule('/', subdomain='', endpoint='user/homepage'), - Rule('/stats', subdomain='', endpoint='user/stats') - ]) - - `methods` - A sequence of http methods this rule applies to. If not specified, all - methods are allowed. For example this can be useful if you want different - endpoints for `POST` and `GET`. If methods are defined and the path - matches but the method matched against is not in this list or in the - list of another rule for that path the error raised is of the type - `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the - list of methods and `HEAD` is not, `HEAD` is added automatically. - - .. versionchanged:: 0.6.1 - `HEAD` is now automatically added to the methods if `GET` is - present. The reason for this is that existing code often did not - work properly in servers not rewriting `HEAD` to `GET` - automatically and it was not documented how `HEAD` should be - treated. This was considered a bug in Werkzeug because of that. - - `strict_slashes` - Override the `Map` setting for `strict_slashes` only for this rule. If - not specified the `Map` setting is used. - - `build_only` - Set this to True and the rule will never match but will create a URL - that can be build. This is useful if you have resources on a subdomain - or folder that are not handled by the WSGI application (like static data) - - `redirect_to` - If given this must be either a string or callable. In case of a - callable it's called with the url adapter that triggered the match and - the values of the URL as keyword arguments and has to return the target - for the redirect, otherwise it has to be a string with placeholders in - rule syntax:: - - def foo_with_slug(adapter, id): - # ask the database for the slug for the old id. this of - # course has nothing to do with werkzeug. - return 'foo/' + Foo.get_slug_for_id(id) - - url_map = Map([ - Rule('/foo/', endpoint='foo'), - Rule('/some/old/url/', redirect_to='foo/'), - Rule('/other/old/url/', redirect_to=foo_with_slug) - ]) - - When the rule is matched the routing system will raise a - `RequestRedirect` exception with the target for the redirect. - - Keep in mind that the URL will be joined against the URL root of the - script so don't use a leading slash on the target URL unless you - really mean root of that domain. - - `alias` - If enabled this rule serves as an alias for another rule with the same - endpoint and arguments. - - `host` - If provided and the URL map has host matching enabled this can be - used to provide a match rule for the whole host. This also means - that the subdomain feature is disabled. - - .. versionadded:: 0.7 - The `alias` and `host` parameters were added. - """ - - def __init__( - self, - string, - defaults=None, - subdomain=None, - methods=None, - build_only=False, - endpoint=None, - strict_slashes=None, - redirect_to=None, - alias=False, - host=None, - ): - if not string.startswith("/"): - raise ValueError("urls must start with a leading slash") - self.rule = string - self.is_leaf = not string.endswith("/") - - self.map = None - self.strict_slashes = strict_slashes - self.subdomain = subdomain - self.host = host - self.defaults = defaults - self.build_only = build_only - self.alias = alias - if methods is None: - self.methods = None - else: - if isinstance(methods, str): - raise TypeError("param `methods` should be `Iterable[str]`, not `str`") - self.methods = set([x.upper() for x in methods]) - if "HEAD" not in self.methods and "GET" in self.methods: - self.methods.add("HEAD") - self.endpoint = endpoint - self.redirect_to = redirect_to - - if defaults: - self.arguments = set(map(str, defaults)) - else: - self.arguments = set() - self._trace = self._converters = self._regex = self._argument_weights = None - - def empty(self): - """ - Return an unbound copy of this rule. - - This can be useful if want to reuse an already bound URL for another - map. See ``get_empty_kwargs`` to override what keyword arguments are - provided to the new copy. - """ - return type(self)(self.rule, **self.get_empty_kwargs()) - - def get_empty_kwargs(self): - """ - Provides kwargs for instantiating empty copy with empty() - - Use this method to provide custom keyword arguments to the subclass of - ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass - has custom keyword arguments that are needed at instantiation. - - Must return a ``dict`` that will be provided as kwargs to the new - instance of ``Rule``, following the initial ``self.rule`` value which - is always provided as the first, required positional argument. - """ - defaults = None - if self.defaults: - defaults = dict(self.defaults) - return dict( - defaults=defaults, - subdomain=self.subdomain, - methods=self.methods, - build_only=self.build_only, - endpoint=self.endpoint, - strict_slashes=self.strict_slashes, - redirect_to=self.redirect_to, - alias=self.alias, - host=self.host, - ) - - def get_rules(self, map): - yield self - - def refresh(self): - """Rebinds and refreshes the URL. Call this if you modified the - rule in place. - - :internal: - """ - self.bind(self.map, rebind=True) - - def bind(self, map, rebind=False): - """Bind the url to a map and create a regular expression based on - the information from the rule itself and the defaults from the map. - - :internal: - """ - if self.map is not None and not rebind: - raise RuntimeError("url rule %r already bound to map %r" % (self, self.map)) - self.map = map - if self.strict_slashes is None: - self.strict_slashes = map.strict_slashes - if self.subdomain is None: - self.subdomain = map.default_subdomain - self.compile() - - def get_converter(self, variable_name, converter_name, args, kwargs): - """Looks up the converter for the given parameter. - - .. versionadded:: 0.9 - """ - if converter_name not in self.map.converters: - raise LookupError("the converter %r does not exist" % converter_name) - return self.map.converters[converter_name](self.map, *args, **kwargs) - - def _encode_query_vars(self, query_vars): - return url_encode( - query_vars, - charset=self.map.charset, - sort=self.map.sort_parameters, - key=self.map.sort_key, - ) - - def compile(self): - """Compiles the regular expression and stores it.""" - assert self.map is not None, "rule not bound" - - if self.map.host_matching: - domain_rule = self.host or "" - else: - domain_rule = self.subdomain or "" - - self._trace = [] - self._converters = {} - self._static_weights = [] - self._argument_weights = [] - regex_parts = [] - - def _build_regex(rule): - index = 0 - for converter, arguments, variable in parse_rule(rule): - if converter is None: - regex_parts.append(re.escape(variable)) - self._trace.append((False, variable)) - for part in variable.split("/"): - if part: - self._static_weights.append((index, -len(part))) - else: - if arguments: - c_args, c_kwargs = parse_converter_args(arguments) - else: - c_args = () - c_kwargs = {} - convobj = self.get_converter(variable, converter, c_args, c_kwargs) - regex_parts.append("(?P<%s>%s)" % (variable, convobj.regex)) - self._converters[variable] = convobj - self._trace.append((True, variable)) - self._argument_weights.append(convobj.weight) - self.arguments.add(str(variable)) - index = index + 1 - - _build_regex(domain_rule) - regex_parts.append("\\|") - self._trace.append((False, "|")) - _build_regex(self.rule if self.is_leaf else self.rule.rstrip("/")) - if not self.is_leaf: - self._trace.append((False, "/")) - - self._build = self._compile_builder(False).__get__(self, None) - self._build_unknown = self._compile_builder(True).__get__(self, None) - - if self.build_only: - return - regex = r"^%s%s$" % ( - u"".join(regex_parts), - (not self.is_leaf or not self.strict_slashes) - and "(?/?)" - or "", - ) - self._regex = re.compile(regex, re.UNICODE) - - def match(self, path, method=None): - """Check if the rule matches a given path. Path is a string in the - form ``"subdomain|/path"`` and is assembled by the map. If - the map is doing host matching the subdomain part will be the host - instead. - - If the rule matches a dict with the converted values is returned, - otherwise the return value is `None`. - - :internal: - """ - if not self.build_only: - m = self._regex.search(path) - if m is not None: - groups = m.groupdict() - # we have a folder like part of the url without a trailing - # slash and strict slashes enabled. raise an exception that - # tells the map to redirect to the same url but with a - # trailing slash - if ( - self.strict_slashes - and not self.is_leaf - and not groups.pop("__suffix__") - and ( - method is None or self.methods is None or method in self.methods - ) - ): - raise RequestSlash() - # if we are not in strict slashes mode we have to remove - # a __suffix__ - elif not self.strict_slashes: - del groups["__suffix__"] - - result = {} - for name, value in iteritems(groups): - try: - value = self._converters[name].to_python(value) - except ValidationError: - return - result[str(name)] = value - if self.defaults: - result.update(self.defaults) - - if self.alias and self.map.redirect_defaults: - raise RequestAliasRedirect(result) - - return result - - @staticmethod - def _get_func_code(code, name): - globs, locs = {}, {} - exec(code, globs, locs) - return locs[name] - - def _compile_builder(self, append_unknown=True): - defaults = self.defaults or {} - dom_ops = [] - url_ops = [] - - opl = dom_ops - for is_dynamic, data in self._trace: - if data == "|" and opl is dom_ops: - opl = url_ops - continue - # this seems like a silly case to ever come up but: - # if a default is given for a value that appears in the rule, - # resolve it to a constant ahead of time - if is_dynamic and data in defaults: - data = self._converters[data].to_url(defaults[data]) - opl.append((False, data)) - elif not is_dynamic: - opl.append( - (False, url_quote(to_bytes(data, self.map.charset), safe="/:|+")) - ) - else: - opl.append((True, data)) - - def _convert(elem): - ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) - ret.args = [ast.Name(str(elem), ast.Load())] # str for py2 - return ret - - def _parts(ops): - parts = [ - _convert(elem) if is_dynamic else ast.Str(s=elem) - for is_dynamic, elem in ops - ] - parts = parts or [ast.Str("")] - # constant fold - ret = [parts[0]] - for p in parts[1:]: - if isinstance(p, ast.Str) and isinstance(ret[-1], ast.Str): - ret[-1] = ast.Str(ret[-1].s + p.s) - else: - ret.append(p) - return ret - - dom_parts = _parts(dom_ops) - url_parts = _parts(url_ops) - if not append_unknown: - body = [] - else: - body = [_IF_KWARGS_URL_ENCODE_AST] - url_parts.extend(_URL_ENCODE_AST_NAMES) - - def _join(parts): - if len(parts) == 1: # shortcut - return parts[0] - elif hasattr(ast, "JoinedStr"): # py36+ - return ast.JoinedStr(parts) - else: - call = _prefix_names('"".join()') - call.args = [ast.Tuple(parts, ast.Load())] - return call - - body.append( - ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())) - ) - - # str is necessary for python2 - pargs = [ - str(elem) - for is_dynamic, elem in dom_ops + url_ops - if is_dynamic and elem not in defaults - ] - kargs = [str(k) for k in defaults] - - func_ast = _prefix_names("def _(): pass") - func_ast.name = "".format(self.rule) - if hasattr(ast, "arg"): # py3 - func_ast.args.args.append(ast.arg(".self", None)) - for arg in pargs + kargs: - func_ast.args.args.append(ast.arg(arg, None)) - func_ast.args.kwarg = ast.arg(".kwargs", None) - else: - func_ast.args.args.append(ast.Name(".self", ast.Param())) - for arg in pargs + kargs: - func_ast.args.args.append(ast.Name(arg, ast.Param())) - func_ast.args.kwarg = ".kwargs" - for _ in kargs: - func_ast.args.defaults.append(ast.Str("")) - func_ast.body = body - - # use `ast.parse` instead of `ast.Module` for better portability - # python3.8 changes the signature of `ast.Module` - module = ast.parse("") - module.body = [func_ast] - - # mark everything as on line 1, offset 0 - # less error-prone than `ast.fix_missing_locations` - # bad line numbers cause an assert to fail in debug builds - for node in ast.walk(module): - if "lineno" in node._attributes: - node.lineno = 1 - if "col_offset" in node._attributes: - node.col_offset = 0 - - code = compile(module, "", "exec") - return self._get_func_code(code, func_ast.name) - - def build(self, values, append_unknown=True): - """Assembles the relative url for that rule and the subdomain. - If building doesn't work for some reasons `None` is returned. - - :internal: - """ - try: - if append_unknown: - return self._build_unknown(**values) - else: - return self._build(**values) - except ValidationError: - return None - - def provides_defaults_for(self, rule): - """Check if this rule has defaults for a given rule. - - :internal: - """ - return ( - not self.build_only - and self.defaults - and self.endpoint == rule.endpoint - and self != rule - and self.arguments == rule.arguments - ) - - def suitable_for(self, values, method=None): - """Check if the dict of values has enough data for url generation. - - :internal: - """ - # if a method was given explicitly and that method is not supported - # by this rule, this rule is not suitable. - if ( - method is not None - and self.methods is not None - and method not in self.methods - ): - return False - - defaults = self.defaults or () - - # all arguments required must be either in the defaults dict or - # the value dictionary otherwise it's not suitable - for key in self.arguments: - if key not in defaults and key not in values: - return False - - # in case defaults are given we ensure that either the value was - # skipped or the value is the same as the default value. - if defaults: - for key, value in iteritems(defaults): - if key in values and value != values[key]: - return False - - return True - - def match_compare_key(self): - """The match compare key for sorting. - - Current implementation: - - 1. rules without any arguments come first for performance - reasons only as we expect them to match faster and some - common ones usually don't have any arguments (index pages etc.) - 2. rules with more static parts come first so the second argument - is the negative length of the number of the static weights. - 3. we order by static weights, which is a combination of index - and length - 4. The more complex rules come first so the next argument is the - negative length of the number of argument weights. - 5. lastly we order by the actual argument weights. - - :internal: - """ - return ( - bool(self.arguments), - -len(self._static_weights), - self._static_weights, - -len(self._argument_weights), - self._argument_weights, - ) - - def build_compare_key(self): - """The build compare key for sorting. - - :internal: - """ - return 1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ()) - - def __eq__(self, other): - return self.__class__ is other.__class__ and self._trace == other._trace - - __hash__ = None - - def __ne__(self, other): - return not self.__eq__(other) - - def __str__(self): - return self.rule - - @native_string_result - def __repr__(self): - if self.map is None: - return u"<%s (unbound)>" % self.__class__.__name__ - tmp = [] - for is_dynamic, data in self._trace: - if is_dynamic: - tmp.append(u"<%s>" % data) - else: - tmp.append(data) - return u"<%s %s%s -> %s>" % ( - self.__class__.__name__, - repr((u"".join(tmp)).lstrip(u"|")).lstrip(u"u"), - self.methods is not None and u" (%s)" % u", ".join(self.methods) or u"", - self.endpoint, - ) - - -class BaseConverter(object): - """Base class for all converters.""" - - regex = "[^/]+" - weight = 100 - - def __init__(self, map): - self.map = map - - def to_python(self, value): - return value - - def to_url(self, value): - if isinstance(value, (bytes, bytearray)): - return _fast_url_quote(value) - return _fast_url_quote(text_type(value).encode(self.map.charset)) - - -class UnicodeConverter(BaseConverter): - """This converter is the default converter and accepts any string but - only one path segment. Thus the string can not include a slash. - - This is the default validator. - - Example:: - - Rule('/pages/'), - Rule('/') - - :param map: the :class:`Map`. - :param minlength: the minimum length of the string. Must be greater - or equal 1. - :param maxlength: the maximum length of the string. - :param length: the exact length of the string. - """ - - def __init__(self, map, minlength=1, maxlength=None, length=None): - BaseConverter.__init__(self, map) - if length is not None: - length = "{%d}" % int(length) - else: - if maxlength is None: - maxlength = "" - else: - maxlength = int(maxlength) - length = "{%s,%s}" % (int(minlength), maxlength) - self.regex = "[^/]" + length - - -class AnyConverter(BaseConverter): - """Matches one of the items provided. Items can either be Python - identifiers or strings:: - - Rule('/') - - :param map: the :class:`Map`. - :param items: this function accepts the possible items as positional - arguments. - """ - - def __init__(self, map, *items): - BaseConverter.__init__(self, map) - self.regex = "(?:%s)" % "|".join([re.escape(x) for x in items]) - - -class PathConverter(BaseConverter): - """Like the default :class:`UnicodeConverter`, but it also matches - slashes. This is useful for wikis and similar applications:: - - Rule('/') - Rule('//edit') - - :param map: the :class:`Map`. - """ - - regex = "[^/].*?" - weight = 200 - - -class NumberConverter(BaseConverter): - """Baseclass for `IntegerConverter` and `FloatConverter`. - - :internal: - """ - - weight = 50 - - def __init__(self, map, fixed_digits=0, min=None, max=None, signed=False): - if signed: - self.regex = self.signed_regex - BaseConverter.__init__(self, map) - self.fixed_digits = fixed_digits - self.min = min - self.max = max - self.signed = signed - - def to_python(self, value): - if self.fixed_digits and len(value) != self.fixed_digits: - raise ValidationError() - value = self.num_convert(value) - if (self.min is not None and value < self.min) or ( - self.max is not None and value > self.max - ): - raise ValidationError() - return value - - def to_url(self, value): - value = self.num_convert(value) - if self.fixed_digits: - value = ("%%0%sd" % self.fixed_digits) % value - return str(value) - - @property - def signed_regex(self): - return r"-?" + self.regex - - -class IntegerConverter(NumberConverter): - """This converter only accepts integer values:: - - Rule("/page/") - - By default it only accepts unsigned, positive values. The ``signed`` - parameter will enable signed, negative values. :: - - Rule("/page/") - - :param map: The :class:`Map`. - :param fixed_digits: The number of fixed digits in the URL. If you - set this to ``4`` for example, the rule will only match if the - URL looks like ``/0001/``. The default is variable length. - :param min: The minimal value. - :param max: The maximal value. - :param signed: Allow signed (negative) values. - - .. versionadded:: 0.15 - The ``signed`` parameter. - """ - - regex = r"\d+" - num_convert = int - - -class FloatConverter(NumberConverter): - """This converter only accepts floating point values:: - - Rule("/probability/") - - By default it only accepts unsigned, positive values. The ``signed`` - parameter will enable signed, negative values. :: - - Rule("/offset/") - - :param map: The :class:`Map`. - :param min: The minimal value. - :param max: The maximal value. - :param signed: Allow signed (negative) values. - - .. versionadded:: 0.15 - The ``signed`` parameter. - """ - - regex = r"\d+\.\d+" - num_convert = float - - def __init__(self, map, min=None, max=None, signed=False): - NumberConverter.__init__(self, map, min=min, max=max, signed=signed) - - -class UUIDConverter(BaseConverter): - """This converter only accepts UUID strings:: - - Rule('/object/') - - .. versionadded:: 0.10 - - :param map: the :class:`Map`. - """ - - regex = ( - r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-" - r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}" - ) - - def to_python(self, value): - return uuid.UUID(value) - - def to_url(self, value): - return str(value) - - -#: the default converter mapping for the map. -DEFAULT_CONVERTERS = { - "default": UnicodeConverter, - "string": UnicodeConverter, - "any": AnyConverter, - "path": PathConverter, - "int": IntegerConverter, - "float": FloatConverter, - "uuid": UUIDConverter, -} - - -class Map(object): - """The map class stores all the URL rules and some configuration - parameters. Some of the configuration values are only stored on the - `Map` instance since those affect all rules, others are just defaults - and can be overridden for each rule. Note that you have to specify all - arguments besides the `rules` as keyword arguments! - - :param rules: sequence of url rules for this map. - :param default_subdomain: The default subdomain for rules without a - subdomain defined. - :param charset: charset of the url. defaults to ``"utf-8"`` - :param strict_slashes: Take care of trailing slashes. - :param redirect_defaults: This will redirect to the default rule if it - wasn't visited that way. This helps creating - unique URLs. - :param converters: A dict of converters that adds additional converters - to the list of converters. If you redefine one - converter this will override the original one. - :param sort_parameters: If set to `True` the url parameters are sorted. - See `url_encode` for more details. - :param sort_key: The sort key function for `url_encode`. - :param encoding_errors: the error method to use for decoding - :param host_matching: if set to `True` it enables the host matching - feature and disables the subdomain one. If - enabled the `host` parameter to rules is used - instead of the `subdomain` one. - - .. versionadded:: 0.5 - `sort_parameters` and `sort_key` was added. - - .. versionadded:: 0.7 - `encoding_errors` and `host_matching` was added. - """ - - #: A dict of default converters to be used. - default_converters = ImmutableDict(DEFAULT_CONVERTERS) - - def __init__( - self, - rules=None, - default_subdomain="", - charset="utf-8", - strict_slashes=True, - redirect_defaults=True, - converters=None, - sort_parameters=False, - sort_key=None, - encoding_errors="replace", - host_matching=False, - ): - self._rules = [] - self._rules_by_endpoint = {} - self._remap = True - self._remap_lock = Lock() - - self.default_subdomain = default_subdomain - self.charset = charset - self.encoding_errors = encoding_errors - self.strict_slashes = strict_slashes - self.redirect_defaults = redirect_defaults - self.host_matching = host_matching - - self.converters = self.default_converters.copy() - if converters: - self.converters.update(converters) - - self.sort_parameters = sort_parameters - self.sort_key = sort_key - - for rulefactory in rules or (): - self.add(rulefactory) - - def is_endpoint_expecting(self, endpoint, *arguments): - """Iterate over all rules and check if the endpoint expects - the arguments provided. This is for example useful if you have - some URLs that expect a language code and others that do not and - you want to wrap the builder a bit so that the current language - code is automatically added if not provided but endpoints expect - it. - - :param endpoint: the endpoint to check. - :param arguments: this function accepts one or more arguments - as positional arguments. Each one of them is - checked. - """ - self.update() - arguments = set(arguments) - for rule in self._rules_by_endpoint[endpoint]: - if arguments.issubset(rule.arguments): - return True - return False - - def iter_rules(self, endpoint=None): - """Iterate over all rules or the rules of an endpoint. - - :param endpoint: if provided only the rules for that endpoint - are returned. - :return: an iterator - """ - self.update() - if endpoint is not None: - return iter(self._rules_by_endpoint[endpoint]) - return iter(self._rules) - - def add(self, rulefactory): - """Add a new rule or factory to the map and bind it. Requires that the - rule is not bound to another map. - - :param rulefactory: a :class:`Rule` or :class:`RuleFactory` - """ - for rule in rulefactory.get_rules(self): - rule.bind(self) - self._rules.append(rule) - self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) - self._remap = True - - def bind( - self, - server_name, - script_name=None, - subdomain=None, - url_scheme="http", - default_method="GET", - path_info=None, - query_args=None, - ): - """Return a new :class:`MapAdapter` with the details specified to the - call. Note that `script_name` will default to ``'/'`` if not further - specified or `None`. The `server_name` at least is a requirement - because the HTTP RFC requires absolute URLs for redirects and so all - redirect exceptions raised by Werkzeug will contain the full canonical - URL. - - If no path_info is passed to :meth:`match` it will use the default path - info passed to bind. While this doesn't really make sense for - manual bind calls, it's useful if you bind a map to a WSGI - environment which already contains the path info. - - `subdomain` will default to the `default_subdomain` for this map if - no defined. If there is no `default_subdomain` you cannot use the - subdomain feature. - - .. versionadded:: 0.7 - `query_args` added - - .. versionadded:: 0.8 - `query_args` can now also be a string. - - .. versionchanged:: 0.15 - ``path_info`` defaults to ``'/'`` if ``None``. - """ - server_name = server_name.lower() - if self.host_matching: - if subdomain is not None: - raise RuntimeError("host matching enabled and a subdomain was provided") - elif subdomain is None: - subdomain = self.default_subdomain - if script_name is None: - script_name = "/" - if path_info is None: - path_info = "/" - try: - server_name = _encode_idna(server_name) - except UnicodeError: - raise BadHost() - return MapAdapter( - self, - server_name, - script_name, - subdomain, - url_scheme, - path_info, - default_method, - query_args, - ) - - def bind_to_environ(self, environ, server_name=None, subdomain=None): - """Like :meth:`bind` but you can pass it an WSGI environment and it - will fetch the information from that dictionary. Note that because of - limitations in the protocol there is no way to get the current - subdomain and real `server_name` from the environment. If you don't - provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or - `HTTP_HOST` if provided) as used `server_name` with disabled subdomain - feature. - - If `subdomain` is `None` but an environment and a server name is - provided it will calculate the current subdomain automatically. - Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` - in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated - subdomain will be ``'staging.dev'``. - - If the object passed as environ has an environ attribute, the value of - this attribute is used instead. This allows you to pass request - objects. Additionally `PATH_INFO` added as a default of the - :class:`MapAdapter` so that you don't have to pass the path info to - the match method. - - .. versionchanged:: 0.5 - previously this method accepted a bogus `calculate_subdomain` - parameter that did not have any effect. It was removed because - of that. - - .. versionchanged:: 0.8 - This will no longer raise a ValueError when an unexpected server - name was passed. - - :param environ: a WSGI environment. - :param server_name: an optional server name hint (see above). - :param subdomain: optionally the current subdomain (see above). - """ - environ = _get_environ(environ) - - wsgi_server_name = get_host(environ).lower() - - if server_name is None: - server_name = wsgi_server_name - else: - server_name = server_name.lower() - - if subdomain is None and not self.host_matching: - cur_server_name = wsgi_server_name.split(".") - real_server_name = server_name.split(".") - offset = -len(real_server_name) - if cur_server_name[offset:] != real_server_name: - # This can happen even with valid configs if the server was - # accesssed directly by IP address under some situations. - # Instead of raising an exception like in Werkzeug 0.7 or - # earlier we go by an invalid subdomain which will result - # in a 404 error on matching. - subdomain = "" - else: - subdomain = ".".join(filter(None, cur_server_name[:offset])) - - def _get_wsgi_string(name): - val = environ.get(name) - if val is not None: - return wsgi_decoding_dance(val, self.charset) - - script_name = _get_wsgi_string("SCRIPT_NAME") - path_info = _get_wsgi_string("PATH_INFO") - query_args = _get_wsgi_string("QUERY_STRING") - return Map.bind( - self, - server_name, - script_name, - subdomain, - environ["wsgi.url_scheme"], - environ["REQUEST_METHOD"], - path_info, - query_args=query_args, - ) - - def update(self): - """Called before matching and building to keep the compiled rules - in the correct order after things changed. - """ - if not self._remap: - return - - with self._remap_lock: - if not self._remap: - return - - self._rules.sort(key=lambda x: x.match_compare_key()) - for rules in itervalues(self._rules_by_endpoint): - rules.sort(key=lambda x: x.build_compare_key()) - self._remap = False - - def __repr__(self): - rules = self.iter_rules() - return "%s(%s)" % (self.__class__.__name__, pformat(list(rules))) - - -class MapAdapter(object): - - """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does - the URL matching and building based on runtime information. - """ - - def __init__( - self, - map, - server_name, - script_name, - subdomain, - url_scheme, - path_info, - default_method, - query_args=None, - ): - self.map = map - self.server_name = to_unicode(server_name) - script_name = to_unicode(script_name) - if not script_name.endswith(u"/"): - script_name += u"/" - self.script_name = script_name - self.subdomain = to_unicode(subdomain) - self.url_scheme = to_unicode(url_scheme) - self.path_info = to_unicode(path_info) - self.default_method = to_unicode(default_method) - self.query_args = query_args - - def dispatch( - self, view_func, path_info=None, method=None, catch_http_exceptions=False - ): - """Does the complete dispatching process. `view_func` is called with - the endpoint and a dict with the values for the view. It should - look up the view function, call it, and return a response object - or WSGI application. http exceptions are not caught by default - so that applications can display nicer error messages by just - catching them by hand. If you want to stick with the default - error messages you can pass it ``catch_http_exceptions=True`` and - it will catch the http exceptions. - - Here a small example for the dispatch usage:: - - from werkzeug.wrappers import Request, Response - from werkzeug.wsgi import responder - from werkzeug.routing import Map, Rule - - def on_index(request): - return Response('Hello from the index') - - url_map = Map([Rule('/', endpoint='index')]) - views = {'index': on_index} - - @responder - def application(environ, start_response): - request = Request(environ) - urls = url_map.bind_to_environ(environ) - return urls.dispatch(lambda e, v: views[e](request, **v), - catch_http_exceptions=True) - - Keep in mind that this method might return exception objects, too, so - use :class:`Response.force_type` to get a response object. - - :param view_func: a function that is called with the endpoint as - first argument and the value dict as second. Has - to dispatch to the actual view function with this - information. (see above) - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - :param catch_http_exceptions: set to `True` to catch any of the - werkzeug :class:`HTTPException`\\s. - """ - try: - try: - endpoint, args = self.match(path_info, method) - except RequestRedirect as e: - return e - return view_func(endpoint, args) - except HTTPException as e: - if catch_http_exceptions: - return e - raise - - def match(self, path_info=None, method=None, return_rule=False, query_args=None): - """The usage is simple: you just pass the match method the current - path info as well as the method (which defaults to `GET`). The - following things can then happen: - - - you receive a `NotFound` exception that indicates that no URL is - matching. A `NotFound` exception is also a WSGI application you - can call to get a default page not found page (happens to be the - same object as `werkzeug.exceptions.NotFound`) - - - you receive a `MethodNotAllowed` exception that indicates that there - is a match for this URL but not for the current request method. - This is useful for RESTful applications. - - - you receive a `RequestRedirect` exception with a `new_url` - attribute. This exception is used to notify you about a request - Werkzeug requests from your WSGI application. This is for example the - case if you request ``/foo`` although the correct URL is ``/foo/`` - You can use the `RequestRedirect` instance as response-like object - similar to all other subclasses of `HTTPException`. - - - you get a tuple in the form ``(endpoint, arguments)`` if there is - a match (unless `return_rule` is True, in which case you get a tuple - in the form ``(rule, arguments)``) - - If the path info is not passed to the match method the default path - info of the map is used (defaults to the root URL if not defined - explicitly). - - All of the exceptions raised are subclasses of `HTTPException` so they - can be used as WSGI responses. They will all render generic error or - redirect pages. - - Here is a small example for matching: - - >>> m = Map([ - ... Rule('/', endpoint='index'), - ... Rule('/downloads/', endpoint='downloads/index'), - ... Rule('/downloads/', endpoint='downloads/show') - ... ]) - >>> urls = m.bind("example.com", "/") - >>> urls.match("/", "GET") - ('index', {}) - >>> urls.match("/downloads/42") - ('downloads/show', {'id': 42}) - - And here is what happens on redirect and missing URLs: - - >>> urls.match("/downloads") - Traceback (most recent call last): - ... - RequestRedirect: http://example.com/downloads/ - >>> urls.match("/missing") - Traceback (most recent call last): - ... - NotFound: 404 Not Found - - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - :param return_rule: return the rule that matched instead of just the - endpoint (defaults to `False`). - :param query_args: optional query arguments that are used for - automatic redirects as string or dictionary. It's - currently not possible to use the query arguments - for URL matching. - - .. versionadded:: 0.6 - `return_rule` was added. - - .. versionadded:: 0.7 - `query_args` was added. - - .. versionchanged:: 0.8 - `query_args` can now also be a string. - """ - self.map.update() - if path_info is None: - path_info = self.path_info - else: - path_info = to_unicode(path_info, self.map.charset) - if query_args is None: - query_args = self.query_args - method = (method or self.default_method).upper() - - path = u"%s|%s" % ( - self.map.host_matching and self.server_name or self.subdomain, - path_info and "/%s" % path_info.lstrip("/"), - ) - - have_match_for = set() - for rule in self.map._rules: - try: - rv = rule.match(path, method) - except RequestSlash: - raise RequestRedirect( - self.make_redirect_url( - url_quote(path_info, self.map.charset, safe="/:|+") + "/", - query_args, - ) - ) - except RequestAliasRedirect as e: - raise RequestRedirect( - self.make_alias_redirect_url( - path, rule.endpoint, e.matched_values, method, query_args - ) - ) - if rv is None: - continue - if rule.methods is not None and method not in rule.methods: - have_match_for.update(rule.methods) - continue - - if self.map.redirect_defaults: - redirect_url = self.get_default_redirect(rule, method, rv, query_args) - if redirect_url is not None: - raise RequestRedirect(redirect_url) - - if rule.redirect_to is not None: - if isinstance(rule.redirect_to, string_types): - - def _handle_match(match): - value = rv[match.group(1)] - return rule._converters[match.group(1)].to_url(value) - - redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to) - else: - redirect_url = rule.redirect_to(self, **rv) - raise RequestRedirect( - str( - url_join( - "%s://%s%s%s" - % ( - self.url_scheme or "http", - self.subdomain + "." if self.subdomain else "", - self.server_name, - self.script_name, - ), - redirect_url, - ) - ) - ) - - if return_rule: - return rule, rv - else: - return rule.endpoint, rv - - if have_match_for: - raise MethodNotAllowed(valid_methods=list(have_match_for)) - raise NotFound() - - def test(self, path_info=None, method=None): - """Test if a rule would match. Works like `match` but returns `True` - if the URL matches, or `False` if it does not exist. - - :param path_info: the path info to use for matching. Overrides the - path info specified on binding. - :param method: the HTTP method used for matching. Overrides the - method specified on binding. - """ - try: - self.match(path_info, method) - except RequestRedirect: - pass - except HTTPException: - return False - return True - - def allowed_methods(self, path_info=None): - """Returns the valid methods that match for a given path. - - .. versionadded:: 0.7 - """ - try: - self.match(path_info, method="--") - except MethodNotAllowed as e: - return e.valid_methods - except HTTPException: - pass - return [] - - def get_host(self, domain_part): - """Figures out the full host name for the given domain part. The - domain part is a subdomain in case host matching is disabled or - a full host name. - """ - if self.map.host_matching: - if domain_part is None: - return self.server_name - return to_unicode(domain_part, "ascii") - subdomain = domain_part - if subdomain is None: - subdomain = self.subdomain - else: - subdomain = to_unicode(subdomain, "ascii") - return (subdomain + u"." if subdomain else u"") + self.server_name - - def get_default_redirect(self, rule, method, values, query_args): - """A helper that returns the URL to redirect to if it finds one. - This is used for default redirecting only. - - :internal: - """ - assert self.map.redirect_defaults - for r in self.map._rules_by_endpoint[rule.endpoint]: - # every rule that comes after this one, including ourself - # has a lower priority for the defaults. We order the ones - # with the highest priority up for building. - if r is rule: - break - if r.provides_defaults_for(rule) and r.suitable_for(values, method): - values.update(r.defaults) - domain_part, path = r.build(values) - return self.make_redirect_url(path, query_args, domain_part=domain_part) - - def encode_query_args(self, query_args): - if not isinstance(query_args, string_types): - query_args = url_encode(query_args, self.map.charset) - return query_args - - def make_redirect_url(self, path_info, query_args=None, domain_part=None): - """Creates a redirect URL. - - :internal: - """ - suffix = "" - if query_args: - suffix = "?" + self.encode_query_args(query_args) - return str( - "%s://%s/%s%s" - % ( - self.url_scheme or "http", - self.get_host(domain_part), - posixpath.join( - self.script_name[:-1].lstrip("/"), path_info.lstrip("/") - ), - suffix, - ) - ) - - def make_alias_redirect_url(self, path, endpoint, values, method, query_args): - """Internally called to make an alias redirect URL.""" - url = self.build( - endpoint, values, method, append_unknown=False, force_external=True - ) - if query_args: - url += "?" + self.encode_query_args(query_args) - assert url != path, "detected invalid alias setting. No canonical URL found" - return url - - def _partial_build(self, endpoint, values, method, append_unknown): - """Helper for :meth:`build`. Returns subdomain and path for the - rule that accepts this endpoint, values and method. - - :internal: - """ - # in case the method is none, try with the default method first - if method is None: - rv = self._partial_build( - endpoint, values, self.default_method, append_unknown - ) - if rv is not None: - return rv - - # default method did not match or a specific method is passed, - # check all and go with first result. - for rule in self.map._rules_by_endpoint.get(endpoint, ()): - if rule.suitable_for(values, method): - rv = rule.build(values, append_unknown) - if rv is not None: - return rv - - def build( - self, - endpoint, - values=None, - method=None, - force_external=False, - append_unknown=True, - ): - """Building URLs works pretty much the other way round. Instead of - `match` you call `build` and pass it the endpoint and a dict of - arguments for the placeholders. - - The `build` function also accepts an argument called `force_external` - which, if you set it to `True` will force external URLs. Per default - external URLs (include the server name) will only be used if the - target URL is on a different subdomain. - - >>> m = Map([ - ... Rule('/', endpoint='index'), - ... Rule('/downloads/', endpoint='downloads/index'), - ... Rule('/downloads/', endpoint='downloads/show') - ... ]) - >>> urls = m.bind("example.com", "/") - >>> urls.build("index", {}) - '/' - >>> urls.build("downloads/show", {'id': 42}) - '/downloads/42' - >>> urls.build("downloads/show", {'id': 42}, force_external=True) - 'http://example.com/downloads/42' - - Because URLs cannot contain non ASCII data you will always get - bytestrings back. Non ASCII characters are urlencoded with the - charset defined on the map instance. - - Additional values are converted to unicode and appended to the URL as - URL querystring parameters: - - >>> urls.build("index", {'q': 'My Searchstring'}) - '/?q=My+Searchstring' - - When processing those additional values, lists are furthermore - interpreted as multiple values (as per - :py:class:`werkzeug.datastructures.MultiDict`): - - >>> urls.build("index", {'q': ['a', 'b', 'c']}) - '/?q=a&q=b&q=c' - - Passing a ``MultiDict`` will also add multiple values: - - >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b')))) - '/?p=z&q=a&q=b' - - If a rule does not exist when building a `BuildError` exception is - raised. - - The build method accepts an argument called `method` which allows you - to specify the method you want to have an URL built for if you have - different methods for the same endpoint specified. - - .. versionadded:: 0.6 - the `append_unknown` parameter was added. - - :param endpoint: the endpoint of the URL to build. - :param values: the values for the URL to build. Unhandled values are - appended to the URL as query parameters. - :param method: the HTTP method for the rule if there are different - URLs for different methods on the same endpoint. - :param force_external: enforce full canonical external URLs. If the URL - scheme is not provided, this will generate - a protocol-relative URL. - :param append_unknown: unknown parameters are appended to the generated - URL as query string argument. Disable this - if you want the builder to ignore those. - """ - self.map.update() - - if values: - if isinstance(values, MultiDict): - temp_values = {} - # iteritems(dict, values) is like `values.lists()` - # without the call or `list()` coercion overhead. - for key, value in iteritems(dict, values): - if not value: - continue - if len(value) == 1: # flatten single item lists - value = value[0] - if value is None: # drop None - continue - temp_values[key] = value - values = temp_values - else: - # drop None - values = dict(i for i in iteritems(values) if i[1] is not None) - else: - values = {} - - rv = self._partial_build(endpoint, values, method, append_unknown) - if rv is None: - raise BuildError(endpoint, values, method, self) - domain_part, path = rv - - host = self.get_host(domain_part) - - # shortcut this. - if not force_external and ( - (self.map.host_matching and host == self.server_name) - or (not self.map.host_matching and domain_part == self.subdomain) - ): - return "%s/%s" % (self.script_name.rstrip("/"), path.lstrip("/")) - return str( - "%s//%s%s/%s" - % ( - self.url_scheme + ":" if self.url_scheme else "", - host, - self.script_name[:-1], - path.lstrip("/"), - ) - ) diff --git a/venv/lib/python3.6/site-packages/werkzeug/security.py b/venv/lib/python3.6/site-packages/werkzeug/security.py deleted file mode 100644 index 2308040..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/security.py +++ /dev/null @@ -1,249 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.security - ~~~~~~~~~~~~~~~~~ - - Security related helpers such as secure password hashing tools. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import codecs -import hashlib -import hmac -import os -import posixpath -from random import SystemRandom -from struct import Struct - -from ._compat import izip -from ._compat import PY2 -from ._compat import range_type -from ._compat import text_type -from ._compat import to_bytes -from ._compat import to_native - -SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" -DEFAULT_PBKDF2_ITERATIONS = 150000 - -_pack_int = Struct(">I").pack -_builtin_safe_str_cmp = getattr(hmac, "compare_digest", None) -_sys_rng = SystemRandom() -_os_alt_seps = list( - sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, "/") -) - - -def pbkdf2_hex( - data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None -): - """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. - - .. versionadded:: 0.9 - - :param data: the data to derive. - :param salt: the salt for the derivation. - :param iterations: the number of iterations. - :param keylen: the length of the resulting key. If not provided, - the digest size will be used. - :param hashfunc: the hash function to use. This can either be the - string name of a known hash function, or a function - from the hashlib module. Defaults to sha256. - """ - rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc) - return to_native(codecs.encode(rv, "hex_codec")) - - -def pbkdf2_bin( - data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None -): - """Returns a binary digest for the PBKDF2 hash algorithm of `data` - with the given `salt`. It iterates `iterations` times and produces a - key of `keylen` bytes. By default, SHA-256 is used as hash function; - a different hashlib `hashfunc` can be provided. - - .. versionadded:: 0.9 - - :param data: the data to derive. - :param salt: the salt for the derivation. - :param iterations: the number of iterations. - :param keylen: the length of the resulting key. If not provided - the digest size will be used. - :param hashfunc: the hash function to use. This can either be the - string name of a known hash function or a function - from the hashlib module. Defaults to sha256. - """ - if not hashfunc: - hashfunc = "sha256" - - data = to_bytes(data) - salt = to_bytes(salt) - - if callable(hashfunc): - _test_hash = hashfunc() - hash_name = getattr(_test_hash, "name", None) - else: - hash_name = hashfunc - return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) - - -def safe_str_cmp(a, b): - """This function compares strings in somewhat constant time. This - requires that the length of at least one string is known in advance. - - Returns `True` if the two strings are equal, or `False` if they are not. - - .. versionadded:: 0.7 - """ - if isinstance(a, text_type): - a = a.encode("utf-8") - if isinstance(b, text_type): - b = b.encode("utf-8") - - if _builtin_safe_str_cmp is not None: - return _builtin_safe_str_cmp(a, b) - - if len(a) != len(b): - return False - - rv = 0 - if PY2: - for x, y in izip(a, b): - rv |= ord(x) ^ ord(y) - else: - for x, y in izip(a, b): - rv |= x ^ y - - return rv == 0 - - -def gen_salt(length): - """Generate a random string of SALT_CHARS with specified ``length``.""" - if length <= 0: - raise ValueError("Salt length must be positive") - return "".join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length)) - - -def _hash_internal(method, salt, password): - """Internal password hash helper. Supports plaintext without salt, - unsalted and salted passwords. In case salted passwords are used - hmac is used. - """ - if method == "plain": - return password, method - - if isinstance(password, text_type): - password = password.encode("utf-8") - - if method.startswith("pbkdf2:"): - args = method[7:].split(":") - if len(args) not in (1, 2): - raise ValueError("Invalid number of arguments for PBKDF2") - method = args.pop(0) - iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS - is_pbkdf2 = True - actual_method = "pbkdf2:%s:%d" % (method, iterations) - else: - is_pbkdf2 = False - actual_method = method - - if is_pbkdf2: - if not salt: - raise ValueError("Salt is required for PBKDF2") - rv = pbkdf2_hex(password, salt, iterations, hashfunc=method) - elif salt: - if isinstance(salt, text_type): - salt = salt.encode("utf-8") - mac = _create_mac(salt, password, method) - rv = mac.hexdigest() - else: - rv = hashlib.new(method, password).hexdigest() - return rv, actual_method - - -def _create_mac(key, msg, method): - if callable(method): - return hmac.HMAC(key, msg, method) - - def hashfunc(d=b""): - return hashlib.new(method, d) - - # Python 2.7 used ``hasattr(digestmod, '__call__')`` - # to detect if hashfunc is callable - hashfunc.__call__ = hashfunc - return hmac.HMAC(key, msg, hashfunc) - - -def generate_password_hash(password, method="pbkdf2:sha256", salt_length=8): - """Hash a password with the given method and salt with a string of - the given length. The format of the string returned includes the method - that was used so that :func:`check_password_hash` can check the hash. - - The format for the hashed string looks like this:: - - method$salt$hash - - This method can **not** generate unsalted passwords but it is possible - to set param method='plain' in order to enforce plaintext passwords. - If a salt is used, hmac is used internally to salt the password. - - If PBKDF2 is wanted it can be enabled by setting the method to - ``pbkdf2:method:iterations`` where iterations is optional:: - - pbkdf2:sha256:80000$salt$hash - pbkdf2:sha256$salt$hash - - :param password: the password to hash. - :param method: the hash method to use (one that hashlib supports). Can - optionally be in the format ``pbkdf2:[:iterations]`` - to enable PBKDF2. - :param salt_length: the length of the salt in letters. - """ - salt = gen_salt(salt_length) if method != "plain" else "" - h, actual_method = _hash_internal(method, salt, password) - return "%s$%s$%s" % (actual_method, salt, h) - - -def check_password_hash(pwhash, password): - """check a password against a given salted and hashed password value. - In order to support unsalted legacy passwords this method supports - plain text passwords, md5 and sha1 hashes (both salted and unsalted). - - Returns `True` if the password matched, `False` otherwise. - - :param pwhash: a hashed string like returned by - :func:`generate_password_hash`. - :param password: the plaintext password to compare against the hash. - """ - if pwhash.count("$") < 2: - return False - method, salt, hashval = pwhash.split("$", 2) - return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval) - - -def safe_join(directory, *pathnames): - """Safely join zero or more untrusted path components to a base - directory to avoid escaping the base directory. - - :param directory: The trusted base directory. - :param pathnames: The untrusted path components relative to the - base directory. - :return: A safe path, otherwise ``None``. - """ - parts = [directory] - - for filename in pathnames: - if filename != "": - filename = posixpath.normpath(filename) - - if ( - any(sep in filename for sep in _os_alt_seps) - or os.path.isabs(filename) - or filename == ".." - or filename.startswith("../") - ): - return None - - parts.append(filename) - - return posixpath.join(*parts) diff --git a/venv/lib/python3.6/site-packages/werkzeug/serving.py b/venv/lib/python3.6/site-packages/werkzeug/serving.py deleted file mode 100644 index ff9f880..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/serving.py +++ /dev/null @@ -1,1074 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.serving - ~~~~~~~~~~~~~~~~ - - There are many ways to serve a WSGI application. While you're developing - it you usually don't want a full blown webserver like Apache but a simple - standalone one. From Python 2.5 onwards there is the `wsgiref`_ server in - the standard library. If you're using older versions of Python you can - download the package from the cheeseshop. - - However there are some caveats. Sourcecode won't reload itself when - changed and each time you kill the server using ``^C`` you get an - `KeyboardInterrupt` error. While the latter is easy to solve the first - one can be a pain in the ass in some situations. - - The easiest way is creating a small ``start-myproject.py`` that runs the - application:: - - #!/usr/bin/env python - # -*- coding: utf-8 -*- - from myproject import make_app - from werkzeug.serving import run_simple - - app = make_app(...) - run_simple('localhost', 8080, app, use_reloader=True) - - You can also pass it a `extra_files` keyword argument with a list of - additional files (like configuration files) you want to observe. - - For bigger applications you should consider using `click` - (http://click.pocoo.org) instead of a simple start file. - - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import io -import os -import signal -import socket -import sys - -import werkzeug -from ._compat import PY2 -from ._compat import reraise -from ._compat import WIN -from ._compat import wsgi_encoding_dance -from ._internal import _log -from .exceptions import InternalServerError -from .urls import uri_to_iri -from .urls import url_parse -from .urls import url_unquote - -try: - import socketserver - from http.server import BaseHTTPRequestHandler - from http.server import HTTPServer -except ImportError: - import SocketServer as socketserver - from BaseHTTPServer import HTTPServer - from BaseHTTPServer import BaseHTTPRequestHandler - -try: - import ssl -except ImportError: - - class _SslDummy(object): - def __getattr__(self, name): - raise RuntimeError("SSL support unavailable") - - ssl = _SslDummy() - -try: - import termcolor -except ImportError: - termcolor = None - - -def _get_openssl_crypto_module(): - try: - from OpenSSL import crypto - except ImportError: - raise TypeError("Using ad-hoc certificates requires the pyOpenSSL library.") - else: - return crypto - - -ThreadingMixIn = socketserver.ThreadingMixIn -can_fork = hasattr(os, "fork") - -if can_fork: - ForkingMixIn = socketserver.ForkingMixIn -else: - - class ForkingMixIn(object): - pass - - -try: - af_unix = socket.AF_UNIX -except AttributeError: - af_unix = None - - -LISTEN_QUEUE = 128 -can_open_by_fd = not WIN and hasattr(socket, "fromfd") - -# On Python 3, ConnectionError represents the same errnos as -# socket.error from Python 2, while socket.error is an alias for the -# more generic OSError. -if PY2: - _ConnectionError = socket.error -else: - _ConnectionError = ConnectionError - - -class DechunkedInput(io.RawIOBase): - """An input stream that handles Transfer-Encoding 'chunked'""" - - def __init__(self, rfile): - self._rfile = rfile - self._done = False - self._len = 0 - - def readable(self): - return True - - def read_chunk_len(self): - try: - line = self._rfile.readline().decode("latin1") - _len = int(line.strip(), 16) - except ValueError: - raise IOError("Invalid chunk header") - if _len < 0: - raise IOError("Negative chunk length not allowed") - return _len - - def readinto(self, buf): - read = 0 - while not self._done and read < len(buf): - if self._len == 0: - # This is the first chunk or we fully consumed the previous - # one. Read the next length of the next chunk - self._len = self.read_chunk_len() - - if self._len == 0: - # Found the final chunk of size 0. The stream is now exhausted, - # but there is still a final newline that should be consumed - self._done = True - - if self._len > 0: - # There is data (left) in this chunk, so append it to the - # buffer. If this operation fully consumes the chunk, this will - # reset self._len to 0. - n = min(len(buf), self._len) - buf[read : read + n] = self._rfile.read(n) - self._len -= n - read += n - - if self._len == 0: - # Skip the terminating newline of a chunk that has been fully - # consumed. This also applies to the 0-sized final chunk - terminator = self._rfile.readline() - if terminator not in (b"\n", b"\r\n", b"\r"): - raise IOError("Missing chunk terminating newline") - - return read - - -class WSGIRequestHandler(BaseHTTPRequestHandler, object): - - """A request handler that implements WSGI dispatching.""" - - @property - def server_version(self): - return "Werkzeug/" + werkzeug.__version__ - - def make_environ(self): - request_url = url_parse(self.path) - - def shutdown_server(): - self.server.shutdown_signal = True - - url_scheme = "http" if self.server.ssl_context is None else "https" - if not self.client_address: - self.client_address = "" - if isinstance(self.client_address, str): - self.client_address = (self.client_address, 0) - else: - pass - path_info = url_unquote(request_url.path) - - environ = { - "wsgi.version": (1, 0), - "wsgi.url_scheme": url_scheme, - "wsgi.input": self.rfile, - "wsgi.errors": sys.stderr, - "wsgi.multithread": self.server.multithread, - "wsgi.multiprocess": self.server.multiprocess, - "wsgi.run_once": False, - "werkzeug.server.shutdown": shutdown_server, - "SERVER_SOFTWARE": self.server_version, - "REQUEST_METHOD": self.command, - "SCRIPT_NAME": "", - "PATH_INFO": wsgi_encoding_dance(path_info), - "QUERY_STRING": wsgi_encoding_dance(request_url.query), - # Non-standard, added by mod_wsgi, uWSGI - "REQUEST_URI": wsgi_encoding_dance(self.path), - # Non-standard, added by gunicorn - "RAW_URI": wsgi_encoding_dance(self.path), - "REMOTE_ADDR": self.address_string(), - "REMOTE_PORT": self.port_integer(), - "SERVER_NAME": self.server.server_address[0], - "SERVER_PORT": str(self.server.server_address[1]), - "SERVER_PROTOCOL": self.request_version, - } - - for key, value in self.get_header_items(): - key = key.upper().replace("-", "_") - value = value.replace("\r\n", "") - if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): - key = "HTTP_" + key - if key in environ: - value = "{},{}".format(environ[key], value) - environ[key] = value - - if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked": - environ["wsgi.input_terminated"] = True - environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"]) - - if request_url.scheme and request_url.netloc: - environ["HTTP_HOST"] = request_url.netloc - - return environ - - def run_wsgi(self): - if self.headers.get("Expect", "").lower().strip() == "100-continue": - self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n") - - self.environ = environ = self.make_environ() - headers_set = [] - headers_sent = [] - - def write(data): - assert headers_set, "write() before start_response" - if not headers_sent: - status, response_headers = headers_sent[:] = headers_set - try: - code, msg = status.split(None, 1) - except ValueError: - code, msg = status, "" - code = int(code) - self.send_response(code, msg) - header_keys = set() - for key, value in response_headers: - self.send_header(key, value) - key = key.lower() - header_keys.add(key) - if not ( - "content-length" in header_keys - or environ["REQUEST_METHOD"] == "HEAD" - or code < 200 - or code in (204, 304) - ): - self.close_connection = True - self.send_header("Connection", "close") - if "server" not in header_keys: - self.send_header("Server", self.version_string()) - if "date" not in header_keys: - self.send_header("Date", self.date_time_string()) - self.end_headers() - - assert isinstance(data, bytes), "applications must write bytes" - self.wfile.write(data) - self.wfile.flush() - - def start_response(status, response_headers, exc_info=None): - if exc_info: - try: - if headers_sent: - reraise(*exc_info) - finally: - exc_info = None - elif headers_set: - raise AssertionError("Headers already set") - headers_set[:] = [status, response_headers] - return write - - def execute(app): - application_iter = app(environ, start_response) - try: - for data in application_iter: - write(data) - if not headers_sent: - write(b"") - finally: - if hasattr(application_iter, "close"): - application_iter.close() - application_iter = None - - try: - execute(self.server.app) - except (_ConnectionError, socket.timeout) as e: - self.connection_dropped(e, environ) - except Exception: - if self.server.passthrough_errors: - raise - from .debug.tbtools import get_current_traceback - - traceback = get_current_traceback(ignore_system_exceptions=True) - try: - # if we haven't yet sent the headers but they are set - # we roll back to be able to set them again. - if not headers_sent: - del headers_set[:] - execute(InternalServerError()) - except Exception: - pass - self.server.log("error", "Error on request:\n%s", traceback.plaintext) - - def handle(self): - """Handles a request ignoring dropped connections.""" - rv = None - try: - rv = BaseHTTPRequestHandler.handle(self) - except (_ConnectionError, socket.timeout) as e: - self.connection_dropped(e) - except Exception as e: - if self.server.ssl_context is None or not is_ssl_error(e): - raise - if self.server.shutdown_signal: - self.initiate_shutdown() - return rv - - def initiate_shutdown(self): - """A horrible, horrible way to kill the server for Python 2.6 and - later. It's the best we can do. - """ - # Windows does not provide SIGKILL, go with SIGTERM then. - sig = getattr(signal, "SIGKILL", signal.SIGTERM) - # reloader active - if is_running_from_reloader(): - os.kill(os.getpid(), sig) - # python 2.7 - self.server._BaseServer__shutdown_request = True - # python 2.6 - self.server._BaseServer__serving = False - - def connection_dropped(self, error, environ=None): - """Called if the connection was closed by the client. By default - nothing happens. - """ - - def handle_one_request(self): - """Handle a single HTTP request.""" - self.raw_requestline = self.rfile.readline() - if not self.raw_requestline: - self.close_connection = 1 - elif self.parse_request(): - return self.run_wsgi() - - def send_response(self, code, message=None): - """Send the response header and log the response code.""" - self.log_request(code) - if message is None: - message = code in self.responses and self.responses[code][0] or "" - if self.request_version != "HTTP/0.9": - hdr = "%s %d %s\r\n" % (self.protocol_version, code, message) - self.wfile.write(hdr.encode("ascii")) - - def version_string(self): - return BaseHTTPRequestHandler.version_string(self).strip() - - def address_string(self): - if getattr(self, "environ", None): - return self.environ["REMOTE_ADDR"] - elif not self.client_address: - return "" - elif isinstance(self.client_address, str): - return self.client_address - else: - return self.client_address[0] - - def port_integer(self): - return self.client_address[1] - - def log_request(self, code="-", size="-"): - try: - path = uri_to_iri(self.path) - msg = "%s %s %s" % (self.command, path, self.request_version) - except AttributeError: - # path isn't set if the requestline was bad - msg = self.requestline - - code = str(code) - - if termcolor: - color = termcolor.colored - - if code[0] == "1": # 1xx - Informational - msg = color(msg, attrs=["bold"]) - elif code[0] == "2": # 2xx - Success - msg = color(msg, color="white") - elif code == "304": # 304 - Resource Not Modified - msg = color(msg, color="cyan") - elif code[0] == "3": # 3xx - Redirection - msg = color(msg, color="green") - elif code == "404": # 404 - Resource Not Found - msg = color(msg, color="yellow") - elif code[0] == "4": # 4xx - Client Error - msg = color(msg, color="red", attrs=["bold"]) - else: # 5xx, or any other response - msg = color(msg, color="magenta", attrs=["bold"]) - - self.log("info", '"%s" %s %s', msg, code, size) - - def log_error(self, *args): - self.log("error", *args) - - def log_message(self, format, *args): - self.log("info", format, *args) - - def log(self, type, message, *args): - _log( - type, - "%s - - [%s] %s\n" - % (self.address_string(), self.log_date_time_string(), message % args), - ) - - def get_header_items(self): - """ - Get an iterable list of key/value pairs representing headers. - - This function provides Python 2/3 compatibility as related to the - parsing of request headers. Python 2.7 is not compliant with - RFC 3875 Section 4.1.18 which requires multiple values for headers - to be provided or RFC 2616 which allows for folding of multi-line - headers. This function will return a matching list regardless - of Python version. It can be removed once Python 2.7 support - is dropped. - - :return: List of tuples containing header hey/value pairs - """ - if PY2: - # For Python 2, process the headers manually according to - # W3C RFC 2616 Section 4.2. - items = [] - for header in self.headers.headers: - # Remove "\r\n" from the header and split on ":" to get - # the field name and value. - try: - key, value = header[0:-2].split(":", 1) - except ValueError: - # If header could not be slit with : but starts with white - # space and it follows an existing header, it's a folded - # header. - if header[0] in ("\t", " ") and items: - # Pop off the last header - key, value = items.pop() - # Append the current header to the value of the last - # header which will be placed back on the end of the - # list - value = value + header - # Otherwise it's just a bad header and should error - else: - # Re-raise the value error - raise - - # Add the key and the value once stripped of leading - # white space. The specification allows for stripping - # trailing white space but the Python 3 code does not - # strip trailing white space. Therefore, trailing space - # will be left as is to match the Python 3 behavior. - items.append((key, value.lstrip())) - else: - items = self.headers.items() - - return items - - -#: backwards compatible name if someone is subclassing it -BaseRequestHandler = WSGIRequestHandler - - -def generate_adhoc_ssl_pair(cn=None): - from random import random - - crypto = _get_openssl_crypto_module() - - # pretty damn sure that this is not actually accepted by anyone - if cn is None: - cn = "*" - - cert = crypto.X509() - cert.set_serial_number(int(random() * sys.maxsize)) - cert.gmtime_adj_notBefore(0) - cert.gmtime_adj_notAfter(60 * 60 * 24 * 365) - - subject = cert.get_subject() - subject.CN = cn - subject.O = "Dummy Certificate" # noqa: E741 - - issuer = cert.get_issuer() - issuer.CN = subject.CN - issuer.O = subject.O # noqa: E741 - - pkey = crypto.PKey() - pkey.generate_key(crypto.TYPE_RSA, 2048) - cert.set_pubkey(pkey) - cert.sign(pkey, "sha256") - - return cert, pkey - - -def make_ssl_devcert(base_path, host=None, cn=None): - """Creates an SSL key for development. This should be used instead of - the ``'adhoc'`` key which generates a new cert on each server start. - It accepts a path for where it should store the key and cert and - either a host or CN. If a host is given it will use the CN - ``*.host/CN=host``. - - For more information see :func:`run_simple`. - - .. versionadded:: 0.9 - - :param base_path: the path to the certificate and key. The extension - ``.crt`` is added for the certificate, ``.key`` is - added for the key. - :param host: the name of the host. This can be used as an alternative - for the `cn`. - :param cn: the `CN` to use. - """ - from OpenSSL import crypto - - if host is not None: - cn = "*.%s/CN=%s" % (host, host) - cert, pkey = generate_adhoc_ssl_pair(cn=cn) - - cert_file = base_path + ".crt" - pkey_file = base_path + ".key" - - with open(cert_file, "wb") as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) - with open(pkey_file, "wb") as f: - f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) - - return cert_file, pkey_file - - -def generate_adhoc_ssl_context(): - """Generates an adhoc SSL context for the development server.""" - crypto = _get_openssl_crypto_module() - import tempfile - import atexit - - cert, pkey = generate_adhoc_ssl_pair() - cert_handle, cert_file = tempfile.mkstemp() - pkey_handle, pkey_file = tempfile.mkstemp() - atexit.register(os.remove, pkey_file) - atexit.register(os.remove, cert_file) - - os.write(cert_handle, crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) - os.write(pkey_handle, crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) - os.close(cert_handle) - os.close(pkey_handle) - ctx = load_ssl_context(cert_file, pkey_file) - return ctx - - -def load_ssl_context(cert_file, pkey_file=None, protocol=None): - """Loads SSL context from cert/private key files and optional protocol. - Many parameters are directly taken from the API of - :py:class:`ssl.SSLContext`. - - :param cert_file: Path of the certificate to use. - :param pkey_file: Path of the private key to use. If not given, the key - will be obtained from the certificate file. - :param protocol: One of the ``PROTOCOL_*`` constants in the stdlib ``ssl`` - module. Defaults to ``PROTOCOL_SSLv23``. - """ - if protocol is None: - protocol = ssl.PROTOCOL_SSLv23 - ctx = _SSLContext(protocol) - ctx.load_cert_chain(cert_file, pkey_file) - return ctx - - -class _SSLContext(object): - - """A dummy class with a small subset of Python3's ``ssl.SSLContext``, only - intended to be used with and by Werkzeug.""" - - def __init__(self, protocol): - self._protocol = protocol - self._certfile = None - self._keyfile = None - self._password = None - - def load_cert_chain(self, certfile, keyfile=None, password=None): - self._certfile = certfile - self._keyfile = keyfile or certfile - self._password = password - - def wrap_socket(self, sock, **kwargs): - return ssl.wrap_socket( - sock, - keyfile=self._keyfile, - certfile=self._certfile, - ssl_version=self._protocol, - **kwargs - ) - - -def is_ssl_error(error=None): - """Checks if the given error (or the current one) is an SSL error.""" - exc_types = (ssl.SSLError,) - try: - from OpenSSL.SSL import Error - - exc_types += (Error,) - except ImportError: - pass - - if error is None: - error = sys.exc_info()[1] - return isinstance(error, exc_types) - - -def select_address_family(host, port): - """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on - the host and port.""" - # disabled due to problems with current ipv6 implementations - # and various operating systems. Probably this code also is - # not supposed to work, but I can't come up with any other - # ways to implement this. - # try: - # info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, - # socket.SOCK_STREAM, 0, - # socket.AI_PASSIVE) - # if info: - # return info[0][0] - # except socket.gaierror: - # pass - if host.startswith("unix://"): - return socket.AF_UNIX - elif ":" in host and hasattr(socket, "AF_INET6"): - return socket.AF_INET6 - return socket.AF_INET - - -def get_sockaddr(host, port, family): - """Return a fully qualified socket address that can be passed to - :func:`socket.bind`.""" - if family == af_unix: - return host.split("://", 1)[1] - try: - res = socket.getaddrinfo( - host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP - ) - except socket.gaierror: - return host, port - return res[0][4] - - -class BaseWSGIServer(HTTPServer, object): - - """Simple single-threaded, single-process WSGI server.""" - - multithread = False - multiprocess = False - request_queue_size = LISTEN_QUEUE - - def __init__( - self, - host, - port, - app, - handler=None, - passthrough_errors=False, - ssl_context=None, - fd=None, - ): - if handler is None: - handler = WSGIRequestHandler - - self.address_family = select_address_family(host, port) - - if fd is not None: - real_sock = socket.fromfd(fd, self.address_family, socket.SOCK_STREAM) - port = 0 - - server_address = get_sockaddr(host, int(port), self.address_family) - - # remove socket file if it already exists - if self.address_family == af_unix and os.path.exists(server_address): - os.unlink(server_address) - HTTPServer.__init__(self, server_address, handler) - - self.app = app - self.passthrough_errors = passthrough_errors - self.shutdown_signal = False - self.host = host - self.port = self.socket.getsockname()[1] - - # Patch in the original socket. - if fd is not None: - self.socket.close() - self.socket = real_sock - self.server_address = self.socket.getsockname() - - if ssl_context is not None: - if isinstance(ssl_context, tuple): - ssl_context = load_ssl_context(*ssl_context) - if ssl_context == "adhoc": - ssl_context = generate_adhoc_ssl_context() - # If we are on Python 2 the return value from socket.fromfd - # is an internal socket object but what we need for ssl wrap - # is the wrapper around it :( - sock = self.socket - if PY2 and not isinstance(sock, socket.socket): - sock = socket.socket(sock.family, sock.type, sock.proto, sock) - self.socket = ssl_context.wrap_socket(sock, server_side=True) - self.ssl_context = ssl_context - else: - self.ssl_context = None - - def log(self, type, message, *args): - _log(type, message, *args) - - def serve_forever(self): - self.shutdown_signal = False - try: - HTTPServer.serve_forever(self) - except KeyboardInterrupt: - pass - finally: - self.server_close() - - def handle_error(self, request, client_address): - if self.passthrough_errors: - raise - # Python 2 still causes a socket.error after the earlier - # handling, so silence it here. - if isinstance(sys.exc_info()[1], _ConnectionError): - return - return HTTPServer.handle_error(self, request, client_address) - - def get_request(self): - con, info = self.socket.accept() - return con, info - - -class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): - - """A WSGI server that does threading.""" - - multithread = True - daemon_threads = True - - -class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): - - """A WSGI server that does forking.""" - - multiprocess = True - - def __init__( - self, - host, - port, - app, - processes=40, - handler=None, - passthrough_errors=False, - ssl_context=None, - fd=None, - ): - if not can_fork: - raise ValueError("Your platform does not support forking.") - BaseWSGIServer.__init__( - self, host, port, app, handler, passthrough_errors, ssl_context, fd - ) - self.max_children = processes - - -def make_server( - host=None, - port=None, - app=None, - threaded=False, - processes=1, - request_handler=None, - passthrough_errors=False, - ssl_context=None, - fd=None, -): - """Create a new server instance that is either threaded, or forks - or just processes one request after another. - """ - if threaded and processes > 1: - raise ValueError("cannot have a multithreaded and multi process server.") - elif threaded: - return ThreadedWSGIServer( - host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd - ) - elif processes > 1: - return ForkingWSGIServer( - host, - port, - app, - processes, - request_handler, - passthrough_errors, - ssl_context, - fd=fd, - ) - else: - return BaseWSGIServer( - host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd - ) - - -def is_running_from_reloader(): - """Checks if the application is running from within the Werkzeug - reloader subprocess. - - .. versionadded:: 0.10 - """ - return os.environ.get("WERKZEUG_RUN_MAIN") == "true" - - -def run_simple( - hostname, - port, - application, - use_reloader=False, - use_debugger=False, - use_evalex=True, - extra_files=None, - reloader_interval=1, - reloader_type="auto", - threaded=False, - processes=1, - request_handler=None, - static_files=None, - passthrough_errors=False, - ssl_context=None, -): - """Start a WSGI application. Optional features include a reloader, - multithreading and fork support. - - This function has a command-line interface too:: - - python -m werkzeug.serving --help - - .. versionadded:: 0.5 - `static_files` was added to simplify serving of static files as well - as `passthrough_errors`. - - .. versionadded:: 0.6 - support for SSL was added. - - .. versionadded:: 0.8 - Added support for automatically loading a SSL context from certificate - file and private key. - - .. versionadded:: 0.9 - Added command-line interface. - - .. versionadded:: 0.10 - Improved the reloader and added support for changing the backend - through the `reloader_type` parameter. See :ref:`reloader` - for more information. - - .. versionchanged:: 0.15 - Bind to a Unix socket by passing a path that starts with - ``unix://`` as the ``hostname``. - - :param hostname: The host to bind to, for example ``'localhost'``. - If the value is a path that starts with ``unix://`` it will bind - to a Unix socket instead of a TCP socket.. - :param port: The port for the server. eg: ``8080`` - :param application: the WSGI application to execute - :param use_reloader: should the server automatically restart the python - process if modules were changed? - :param use_debugger: should the werkzeug debugging system be used? - :param use_evalex: should the exception evaluation feature be enabled? - :param extra_files: a list of files the reloader should watch - additionally to the modules. For example configuration - files. - :param reloader_interval: the interval for the reloader in seconds. - :param reloader_type: the type of reloader to use. The default is - auto detection. Valid values are ``'stat'`` and - ``'watchdog'``. See :ref:`reloader` for more - information. - :param threaded: should the process handle each request in a separate - thread? - :param processes: if greater than 1 then handle each request in a new process - up to this maximum number of concurrent processes. - :param request_handler: optional parameter that can be used to replace - the default one. You can use this to replace it - with a different - :class:`~BaseHTTPServer.BaseHTTPRequestHandler` - subclass. - :param static_files: a list or dict of paths for static files. This works - exactly like :class:`SharedDataMiddleware`, it's actually - just wrapping the application in that middleware before - serving. - :param passthrough_errors: set this to `True` to disable the error catching. - This means that the server will die on errors but - it can be useful to hook debuggers in (pdb etc.) - :param ssl_context: an SSL context for the connection. Either an - :class:`ssl.SSLContext`, a tuple in the form - ``(cert_file, pkey_file)``, the string ``'adhoc'`` if - the server should automatically create one, or ``None`` - to disable SSL (which is the default). - """ - if not isinstance(port, int): - raise TypeError("port must be an integer") - if use_debugger: - from .debug import DebuggedApplication - - application = DebuggedApplication(application, use_evalex) - if static_files: - from .middleware.shared_data import SharedDataMiddleware - - application = SharedDataMiddleware(application, static_files) - - def log_startup(sock): - display_hostname = hostname if hostname not in ("", "*") else "localhost" - quit_msg = "(Press CTRL+C to quit)" - if sock.family == af_unix: - _log("info", " * Running on %s %s", display_hostname, quit_msg) - else: - if ":" in display_hostname: - display_hostname = "[%s]" % display_hostname - port = sock.getsockname()[1] - _log( - "info", - " * Running on %s://%s:%d/ %s", - "http" if ssl_context is None else "https", - display_hostname, - port, - quit_msg, - ) - - def inner(): - try: - fd = int(os.environ["WERKZEUG_SERVER_FD"]) - except (LookupError, ValueError): - fd = None - srv = make_server( - hostname, - port, - application, - threaded, - processes, - request_handler, - passthrough_errors, - ssl_context, - fd=fd, - ) - if fd is None: - log_startup(srv.socket) - srv.serve_forever() - - if use_reloader: - # If we're not running already in the subprocess that is the - # reloader we want to open up a socket early to make sure the - # port is actually available. - if not is_running_from_reloader(): - if port == 0 and not can_open_by_fd: - raise ValueError( - "Cannot bind to a random port with enabled " - "reloader if the Python interpreter does " - "not support socket opening by fd." - ) - - # Create and destroy a socket so that any exceptions are - # raised before we spawn a separate Python interpreter and - # lose this ability. - address_family = select_address_family(hostname, port) - server_address = get_sockaddr(hostname, port, address_family) - s = socket.socket(address_family, socket.SOCK_STREAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - s.bind(server_address) - if hasattr(s, "set_inheritable"): - s.set_inheritable(True) - - # If we can open the socket by file descriptor, then we can just - # reuse this one and our socket will survive the restarts. - if can_open_by_fd: - os.environ["WERKZEUG_SERVER_FD"] = str(s.fileno()) - s.listen(LISTEN_QUEUE) - log_startup(s) - else: - s.close() - if address_family == af_unix: - _log("info", "Unlinking %s" % server_address) - os.unlink(server_address) - - # Do not use relative imports, otherwise "python -m werkzeug.serving" - # breaks. - from ._reloader import run_with_reloader - - run_with_reloader(inner, extra_files, reloader_interval, reloader_type) - else: - inner() - - -def run_with_reloader(*args, **kwargs): - # People keep using undocumented APIs. Do not use this function - # please, we do not guarantee that it continues working. - from ._reloader import run_with_reloader - - return run_with_reloader(*args, **kwargs) - - -def main(): - """A simple command-line interface for :py:func:`run_simple`.""" - - # in contrast to argparse, this works at least under Python < 2.7 - import optparse - from .utils import import_string - - parser = optparse.OptionParser(usage="Usage: %prog [options] app_module:app_object") - parser.add_option( - "-b", - "--bind", - dest="address", - help="The hostname:port the app should listen on.", - ) - parser.add_option( - "-d", - "--debug", - dest="use_debugger", - action="store_true", - default=False, - help="Use Werkzeug's debugger.", - ) - parser.add_option( - "-r", - "--reload", - dest="use_reloader", - action="store_true", - default=False, - help="Reload Python process if modules change.", - ) - options, args = parser.parse_args() - - hostname, port = None, None - if options.address: - address = options.address.split(":") - hostname = address[0] - if len(address) > 1: - port = address[1] - - if len(args) != 1: - sys.stdout.write("No application supplied, or too much. See --help\n") - sys.exit(1) - app = import_string(args[0]) - - run_simple( - hostname=(hostname or "127.0.0.1"), - port=int(port or 5000), - application=app, - use_reloader=options.use_reloader, - use_debugger=options.use_debugger, - ) - - -if __name__ == "__main__": - main() diff --git a/venv/lib/python3.6/site-packages/werkzeug/test.py b/venv/lib/python3.6/site-packages/werkzeug/test.py deleted file mode 100644 index 6148665..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/test.py +++ /dev/null @@ -1,1146 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.test - ~~~~~~~~~~~~~ - - This module implements a client to WSGI applications for testing. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import mimetypes -import sys -from io import BytesIO -from itertools import chain -from random import random -from tempfile import TemporaryFile -from time import time - -from ._compat import iteritems -from ._compat import iterlists -from ._compat import itervalues -from ._compat import make_literal_wrapper -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type -from ._compat import to_bytes -from ._compat import wsgi_encoding_dance -from ._internal import _get_environ -from .datastructures import CallbackDict -from .datastructures import CombinedMultiDict -from .datastructures import EnvironHeaders -from .datastructures import FileMultiDict -from .datastructures import FileStorage -from .datastructures import Headers -from .datastructures import MultiDict -from .http import dump_cookie -from .http import dump_options_header -from .http import parse_options_header -from .urls import iri_to_uri -from .urls import url_encode -from .urls import url_fix -from .urls import url_parse -from .urls import url_unparse -from .urls import url_unquote -from .utils import get_content_type -from .wrappers import BaseRequest -from .wsgi import ClosingIterator -from .wsgi import get_current_url - -try: - from urllib.request import Request as U2Request -except ImportError: - from urllib2 import Request as U2Request - -try: - from http.cookiejar import CookieJar -except ImportError: - from cookielib import CookieJar - - -def stream_encode_multipart( - values, use_tempfile=True, threshold=1024 * 500, boundary=None, charset="utf-8" -): - """Encode a dict of values (either strings or file descriptors or - :class:`FileStorage` objects.) into a multipart encoded string stored - in a file descriptor. - """ - if boundary is None: - boundary = "---------------WerkzeugFormPart_%s%s" % (time(), random()) - _closure = [BytesIO(), 0, False] - - if use_tempfile: - - def write_binary(string): - stream, total_length, on_disk = _closure - if on_disk: - stream.write(string) - else: - length = len(string) - if length + _closure[1] <= threshold: - stream.write(string) - else: - new_stream = TemporaryFile("wb+") - new_stream.write(stream.getvalue()) - new_stream.write(string) - _closure[0] = new_stream - _closure[2] = True - _closure[1] = total_length + length - - else: - write_binary = _closure[0].write - - def write(string): - write_binary(string.encode(charset)) - - if not isinstance(values, MultiDict): - values = MultiDict(values) - - for key, values in iterlists(values): - for value in values: - write('--%s\r\nContent-Disposition: form-data; name="%s"' % (boundary, key)) - reader = getattr(value, "read", None) - if reader is not None: - filename = getattr(value, "filename", getattr(value, "name", None)) - content_type = getattr(value, "content_type", None) - if content_type is None: - content_type = ( - filename - and mimetypes.guess_type(filename)[0] - or "application/octet-stream" - ) - if filename is not None: - write('; filename="%s"\r\n' % filename) - else: - write("\r\n") - write("Content-Type: %s\r\n\r\n" % content_type) - while 1: - chunk = reader(16384) - if not chunk: - break - write_binary(chunk) - else: - if not isinstance(value, string_types): - value = str(value) - - value = to_bytes(value, charset) - write("\r\n\r\n") - write_binary(value) - write("\r\n") - write("--%s--\r\n" % boundary) - - length = int(_closure[0].tell()) - _closure[0].seek(0) - return _closure[0], length, boundary - - -def encode_multipart(values, boundary=None, charset="utf-8"): - """Like `stream_encode_multipart` but returns a tuple in the form - (``boundary``, ``data``) where data is a bytestring. - """ - stream, length, boundary = stream_encode_multipart( - values, use_tempfile=False, boundary=boundary, charset=charset - ) - return boundary, stream.read() - - -def File(fd, filename=None, mimetype=None): - """Backwards compat. - - .. deprecated:: 0.5 - """ - from warnings import warn - - warn( - "'werkzeug.test.File' is deprecated as of version 0.5 and will" - " be removed in version 1.0. Use 'EnvironBuilder' or" - " 'FileStorage' instead.", - DeprecationWarning, - stacklevel=2, - ) - return FileStorage(fd, filename=filename, content_type=mimetype) - - -class _TestCookieHeaders(object): - - """A headers adapter for cookielib - """ - - def __init__(self, headers): - self.headers = headers - - def getheaders(self, name): - headers = [] - name = name.lower() - for k, v in self.headers: - if k.lower() == name: - headers.append(v) - return headers - - def get_all(self, name, default=None): - rv = [] - for k, v in self.headers: - if k.lower() == name.lower(): - rv.append(v) - return rv or default or [] - - -class _TestCookieResponse(object): - - """Something that looks like a httplib.HTTPResponse, but is actually just an - adapter for our test responses to make them available for cookielib. - """ - - def __init__(self, headers): - self.headers = _TestCookieHeaders(headers) - - def info(self): - return self.headers - - -class _TestCookieJar(CookieJar): - - """A cookielib.CookieJar modified to inject and read cookie headers from - and to wsgi environments, and wsgi application responses. - """ - - def inject_wsgi(self, environ): - """Inject the cookies as client headers into the server's wsgi - environment. - """ - cvals = ["%s=%s" % (c.name, c.value) for c in self] - - if cvals: - environ["HTTP_COOKIE"] = "; ".join(cvals) - else: - environ.pop("HTTP_COOKIE", None) - - def extract_wsgi(self, environ, headers): - """Extract the server's set-cookie headers as cookies into the - cookie jar. - """ - self.extract_cookies( - _TestCookieResponse(headers), U2Request(get_current_url(environ)) - ) - - -def _iter_data(data): - """Iterates over a `dict` or :class:`MultiDict` yielding all keys and - values. - This is used to iterate over the data passed to the - :class:`EnvironBuilder`. - """ - if isinstance(data, MultiDict): - for key, values in iterlists(data): - for value in values: - yield key, value - else: - for key, values in iteritems(data): - if isinstance(values, list): - for value in values: - yield key, value - else: - yield key, values - - -class EnvironBuilder(object): - """This class can be used to conveniently create a WSGI environment - for testing purposes. It can be used to quickly create WSGI environments - or request objects from arbitrary data. - - The signature of this class is also used in some other places as of - Werkzeug 0.5 (:func:`create_environ`, :meth:`BaseResponse.from_values`, - :meth:`Client.open`). Because of this most of the functionality is - available through the constructor alone. - - Files and regular form data can be manipulated independently of each - other with the :attr:`form` and :attr:`files` attributes, but are - passed with the same argument to the constructor: `data`. - - `data` can be any of these values: - - - a `str` or `bytes` object: The object is converted into an - :attr:`input_stream`, the :attr:`content_length` is set and you have to - provide a :attr:`content_type`. - - a `dict` or :class:`MultiDict`: The keys have to be strings. The values - have to be either any of the following objects, or a list of any of the - following objects: - - - a :class:`file`-like object: These are converted into - :class:`FileStorage` objects automatically. - - a `tuple`: The :meth:`~FileMultiDict.add_file` method is called - with the key and the unpacked `tuple` items as positional - arguments. - - a `str`: The string is set as form data for the associated key. - - a file-like object: The object content is loaded in memory and then - handled like a regular `str` or a `bytes`. - - :param path: the path of the request. In the WSGI environment this will - end up as `PATH_INFO`. If the `query_string` is not defined - and there is a question mark in the `path` everything after - it is used as query string. - :param base_url: the base URL is a URL that is used to extract the WSGI - URL scheme, host (server name + server port) and the - script root (`SCRIPT_NAME`). - :param query_string: an optional string or dict with URL parameters. - :param method: the HTTP method to use, defaults to `GET`. - :param input_stream: an optional input stream. Do not specify this and - `data`. As soon as an input stream is set you can't - modify :attr:`args` and :attr:`files` unless you - set the :attr:`input_stream` to `None` again. - :param content_type: The content type for the request. As of 0.5 you - don't have to provide this when specifying files - and form data via `data`. - :param content_length: The content length for the request. You don't - have to specify this when providing data via - `data`. - :param errors_stream: an optional error stream that is used for - `wsgi.errors`. Defaults to :data:`stderr`. - :param multithread: controls `wsgi.multithread`. Defaults to `False`. - :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. - :param run_once: controls `wsgi.run_once`. Defaults to `False`. - :param headers: an optional list or :class:`Headers` object of headers. - :param data: a string or dict of form data or a file-object. - See explanation above. - :param json: An object to be serialized and assigned to ``data``. - Defaults the content type to ``"application/json"``. - Serialized with the function assigned to :attr:`json_dumps`. - :param environ_base: an optional dict of environment defaults. - :param environ_overrides: an optional dict of environment overrides. - :param charset: the charset used to encode unicode data. - - .. versionadded:: 0.15 - The ``json`` param and :meth:`json_dumps` method. - - .. versionadded:: 0.15 - The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing - the path before perecent-decoding. This is not part of the WSGI - PEP, but many WSGI servers include it. - - .. versionchanged:: 0.6 - ``path`` and ``base_url`` can now be unicode strings that are - encoded with :func:`iri_to_uri`. - """ - - #: the server protocol to use. defaults to HTTP/1.1 - server_protocol = "HTTP/1.1" - - #: the wsgi version to use. defaults to (1, 0) - wsgi_version = (1, 0) - - #: the default request class for :meth:`get_request` - request_class = BaseRequest - - import json - - #: The serialization function used when ``json`` is passed. - json_dumps = staticmethod(json.dumps) - del json - - def __init__( - self, - path="/", - base_url=None, - query_string=None, - method="GET", - input_stream=None, - content_type=None, - content_length=None, - errors_stream=None, - multithread=False, - multiprocess=False, - run_once=False, - headers=None, - data=None, - environ_base=None, - environ_overrides=None, - charset="utf-8", - mimetype=None, - json=None, - ): - path_s = make_literal_wrapper(path) - if query_string is not None and path_s("?") in path: - raise ValueError("Query string is defined in the path and as an argument") - if query_string is None and path_s("?") in path: - path, query_string = path.split(path_s("?"), 1) - self.charset = charset - self.path = iri_to_uri(path) - if base_url is not None: - base_url = url_fix(iri_to_uri(base_url, charset), charset) - self.base_url = base_url - if isinstance(query_string, (bytes, text_type)): - self.query_string = query_string - else: - if query_string is None: - query_string = MultiDict() - elif not isinstance(query_string, MultiDict): - query_string = MultiDict(query_string) - self.args = query_string - self.method = method - if headers is None: - headers = Headers() - elif not isinstance(headers, Headers): - headers = Headers(headers) - self.headers = headers - if content_type is not None: - self.content_type = content_type - if errors_stream is None: - errors_stream = sys.stderr - self.errors_stream = errors_stream - self.multithread = multithread - self.multiprocess = multiprocess - self.run_once = run_once - self.environ_base = environ_base - self.environ_overrides = environ_overrides - self.input_stream = input_stream - self.content_length = content_length - self.closed = False - - if json is not None: - if data is not None: - raise TypeError("can't provide both json and data") - - data = self.json_dumps(json) - - if self.content_type is None: - self.content_type = "application/json" - - if data: - if input_stream is not None: - raise TypeError("can't provide input stream and data") - if hasattr(data, "read"): - data = data.read() - if isinstance(data, text_type): - data = data.encode(self.charset) - if isinstance(data, bytes): - self.input_stream = BytesIO(data) - if self.content_length is None: - self.content_length = len(data) - else: - for key, value in _iter_data(data): - if isinstance(value, (tuple, dict)) or hasattr(value, "read"): - self._add_file_from_data(key, value) - else: - self.form.setlistdefault(key).append(value) - - if mimetype is not None: - self.mimetype = mimetype - - @classmethod - def from_environ(cls, environ, **kwargs): - """Turn an environ dict back into a builder. Any extra kwargs - override the args extracted from the environ. - - .. versionadded:: 0.15 - """ - headers = Headers(EnvironHeaders(environ)) - out = { - "path": environ["PATH_INFO"], - "base_url": cls._make_base_url( - environ["wsgi.url_scheme"], headers.pop("Host"), environ["SCRIPT_NAME"] - ), - "query_string": environ["QUERY_STRING"], - "method": environ["REQUEST_METHOD"], - "input_stream": environ["wsgi.input"], - "content_type": headers.pop("Content-Type", None), - "content_length": headers.pop("Content-Length", None), - "errors_stream": environ["wsgi.errors"], - "multithread": environ["wsgi.multithread"], - "multiprocess": environ["wsgi.multiprocess"], - "run_once": environ["wsgi.run_once"], - "headers": headers, - } - out.update(kwargs) - return cls(**out) - - def _add_file_from_data(self, key, value): - """Called in the EnvironBuilder to add files from the data dict.""" - if isinstance(value, tuple): - self.files.add_file(key, *value) - elif isinstance(value, dict): - from warnings import warn - - warn( - "Passing a dict as file data is deprecated as of" - " version 0.5 and will be removed in version 1.0. Use" - " a tuple or 'FileStorage' object instead.", - DeprecationWarning, - stacklevel=2, - ) - value = dict(value) - mimetype = value.pop("mimetype", None) - if mimetype is not None: - value["content_type"] = mimetype - self.files.add_file(key, **value) - else: - self.files.add_file(key, value) - - @staticmethod - def _make_base_url(scheme, host, script_root): - return url_unparse((scheme, host, script_root, "", "")).rstrip("/") + "/" - - @property - def base_url(self): - """The base URL is used to extract the URL scheme, host name, - port, and root path. - """ - return self._make_base_url(self.url_scheme, self.host, self.script_root) - - @base_url.setter - def base_url(self, value): - if value is None: - scheme = "http" - netloc = "localhost" - script_root = "" - else: - scheme, netloc, script_root, qs, anchor = url_parse(value) - if qs or anchor: - raise ValueError("base url must not contain a query string or fragment") - self.script_root = script_root.rstrip("/") - self.host = netloc - self.url_scheme = scheme - - def _get_content_type(self): - ct = self.headers.get("Content-Type") - if ct is None and not self._input_stream: - if self._files: - return "multipart/form-data" - elif self._form: - return "application/x-www-form-urlencoded" - return None - return ct - - def _set_content_type(self, value): - if value is None: - self.headers.pop("Content-Type", None) - else: - self.headers["Content-Type"] = value - - content_type = property( - _get_content_type, - _set_content_type, - doc="""The content type for the request. Reflected from and to - the :attr:`headers`. Do not set if you set :attr:`files` or - :attr:`form` for auto detection.""", - ) - del _get_content_type, _set_content_type - - def _get_content_length(self): - return self.headers.get("Content-Length", type=int) - - def _get_mimetype(self): - ct = self.content_type - if ct: - return ct.split(";")[0].strip() - - def _set_mimetype(self, value): - self.content_type = get_content_type(value, self.charset) - - def _get_mimetype_params(self): - def on_update(d): - self.headers["Content-Type"] = dump_options_header(self.mimetype, d) - - d = parse_options_header(self.headers.get("content-type", ""))[1] - return CallbackDict(d, on_update) - - mimetype = property( - _get_mimetype, - _set_mimetype, - doc="""The mimetype (content type without charset etc.) - - .. versionadded:: 0.14 - """, - ) - mimetype_params = property( - _get_mimetype_params, - doc=""" The mimetype parameters as dict. For example if the - content type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - - .. versionadded:: 0.14 - """, - ) - del _get_mimetype, _set_mimetype, _get_mimetype_params - - def _set_content_length(self, value): - if value is None: - self.headers.pop("Content-Length", None) - else: - self.headers["Content-Length"] = str(value) - - content_length = property( - _get_content_length, - _set_content_length, - doc="""The content length as integer. Reflected from and to the - :attr:`headers`. Do not set if you set :attr:`files` or - :attr:`form` for auto detection.""", - ) - del _get_content_length, _set_content_length - - def form_property(name, storage, doc): # noqa: B902 - key = "_" + name - - def getter(self): - if self._input_stream is not None: - raise AttributeError("an input stream is defined") - rv = getattr(self, key) - if rv is None: - rv = storage() - setattr(self, key, rv) - - return rv - - def setter(self, value): - self._input_stream = None - setattr(self, key, value) - - return property(getter, setter, doc=doc) - - form = form_property("form", MultiDict, doc="A :class:`MultiDict` of form values.") - files = form_property( - "files", - FileMultiDict, - doc="""A :class:`FileMultiDict` of uploaded files. You can use - the :meth:`~FileMultiDict.add_file` method to add new files to - the dict.""", - ) - del form_property - - def _get_input_stream(self): - return self._input_stream - - def _set_input_stream(self, value): - self._input_stream = value - self._form = self._files = None - - input_stream = property( - _get_input_stream, - _set_input_stream, - doc="""An optional input stream. If you set this it will clear - :attr:`form` and :attr:`files`.""", - ) - del _get_input_stream, _set_input_stream - - def _get_query_string(self): - if self._query_string is None: - if self._args is not None: - return url_encode(self._args, charset=self.charset) - return "" - return self._query_string - - def _set_query_string(self, value): - self._query_string = value - self._args = None - - query_string = property( - _get_query_string, - _set_query_string, - doc="""The query string. If you set this to a string - :attr:`args` will no longer be available.""", - ) - del _get_query_string, _set_query_string - - def _get_args(self): - if self._query_string is not None: - raise AttributeError("a query string is defined") - if self._args is None: - self._args = MultiDict() - return self._args - - def _set_args(self, value): - self._query_string = None - self._args = value - - args = property( - _get_args, _set_args, doc="The URL arguments as :class:`MultiDict`." - ) - del _get_args, _set_args - - @property - def server_name(self): - """The server name (read-only, use :attr:`host` to set)""" - return self.host.split(":", 1)[0] - - @property - def server_port(self): - """The server port as integer (read-only, use :attr:`host` to set)""" - pieces = self.host.split(":", 1) - if len(pieces) == 2 and pieces[1].isdigit(): - return int(pieces[1]) - elif self.url_scheme == "https": - return 443 - return 80 - - def __del__(self): - try: - self.close() - except Exception: - pass - - def close(self): - """Closes all files. If you put real :class:`file` objects into the - :attr:`files` dict you can call this method to automatically close - them all in one go. - """ - if self.closed: - return - try: - files = itervalues(self.files) - except AttributeError: - files = () - for f in files: - try: - f.close() - except Exception: - pass - self.closed = True - - def get_environ(self): - """Return the built environ. - - .. versionchanged:: 0.15 - The content type and length headers are set based on - input stream detection. Previously this only set the WSGI - keys. - """ - input_stream = self.input_stream - content_length = self.content_length - - mimetype = self.mimetype - content_type = self.content_type - - if input_stream is not None: - start_pos = input_stream.tell() - input_stream.seek(0, 2) - end_pos = input_stream.tell() - input_stream.seek(start_pos) - content_length = end_pos - start_pos - elif mimetype == "multipart/form-data": - values = CombinedMultiDict([self.form, self.files]) - input_stream, content_length, boundary = stream_encode_multipart( - values, charset=self.charset - ) - content_type = mimetype + '; boundary="%s"' % boundary - elif mimetype == "application/x-www-form-urlencoded": - # XXX: py2v3 review - values = url_encode(self.form, charset=self.charset) - values = values.encode("ascii") - content_length = len(values) - input_stream = BytesIO(values) - else: - input_stream = BytesIO() - - result = {} - if self.environ_base: - result.update(self.environ_base) - - def _path_encode(x): - return wsgi_encoding_dance(url_unquote(x, self.charset), self.charset) - - qs = wsgi_encoding_dance(self.query_string) - - result.update( - { - "REQUEST_METHOD": self.method, - "SCRIPT_NAME": _path_encode(self.script_root), - "PATH_INFO": _path_encode(self.path), - "QUERY_STRING": qs, - # Non-standard, added by mod_wsgi, uWSGI - "REQUEST_URI": wsgi_encoding_dance(self.path), - # Non-standard, added by gunicorn - "RAW_URI": wsgi_encoding_dance(self.path), - "SERVER_NAME": self.server_name, - "SERVER_PORT": str(self.server_port), - "HTTP_HOST": self.host, - "SERVER_PROTOCOL": self.server_protocol, - "wsgi.version": self.wsgi_version, - "wsgi.url_scheme": self.url_scheme, - "wsgi.input": input_stream, - "wsgi.errors": self.errors_stream, - "wsgi.multithread": self.multithread, - "wsgi.multiprocess": self.multiprocess, - "wsgi.run_once": self.run_once, - } - ) - - headers = self.headers.copy() - - if content_type is not None: - result["CONTENT_TYPE"] = content_type - headers.set("Content-Type", content_type) - - if content_length is not None: - result["CONTENT_LENGTH"] = str(content_length) - headers.set("Content-Length", content_length) - - for key, value in headers.to_wsgi_list(): - result["HTTP_%s" % key.upper().replace("-", "_")] = value - - if self.environ_overrides: - result.update(self.environ_overrides) - - return result - - def get_request(self, cls=None): - """Returns a request with the data. If the request class is not - specified :attr:`request_class` is used. - - :param cls: The request wrapper to use. - """ - if cls is None: - cls = self.request_class - return cls(self.get_environ()) - - -class ClientRedirectError(Exception): - """If a redirect loop is detected when using follow_redirects=True with - the :cls:`Client`, then this exception is raised. - """ - - -class Client(object): - """This class allows you to send requests to a wrapped application. - - The response wrapper can be a class or factory function that takes - three arguments: app_iter, status and headers. The default response - wrapper just returns a tuple. - - Example:: - - class ClientResponse(BaseResponse): - ... - - client = Client(MyApplication(), response_wrapper=ClientResponse) - - The use_cookies parameter indicates whether cookies should be stored and - sent for subsequent requests. This is True by default, but passing False - will disable this behaviour. - - If you want to request some subdomain of your application you may set - `allow_subdomain_redirects` to `True` as if not no external redirects - are allowed. - - .. versionadded:: 0.5 - `use_cookies` is new in this version. Older versions did not provide - builtin cookie support. - - .. versionadded:: 0.14 - The `mimetype` parameter was added. - - .. versionadded:: 0.15 - The ``json`` parameter. - """ - - def __init__( - self, - application, - response_wrapper=None, - use_cookies=True, - allow_subdomain_redirects=False, - ): - self.application = application - self.response_wrapper = response_wrapper - if use_cookies: - self.cookie_jar = _TestCookieJar() - else: - self.cookie_jar = None - self.allow_subdomain_redirects = allow_subdomain_redirects - - def set_cookie( - self, - server_name, - key, - value="", - max_age=None, - expires=None, - path="/", - domain=None, - secure=None, - httponly=False, - charset="utf-8", - ): - """Sets a cookie in the client's cookie jar. The server name - is required and has to match the one that is also passed to - the open call. - """ - assert self.cookie_jar is not None, "cookies disabled" - header = dump_cookie( - key, value, max_age, expires, path, domain, secure, httponly, charset - ) - environ = create_environ(path, base_url="http://" + server_name) - headers = [("Set-Cookie", header)] - self.cookie_jar.extract_wsgi(environ, headers) - - def delete_cookie(self, server_name, key, path="/", domain=None): - """Deletes a cookie in the test client.""" - self.set_cookie( - server_name, key, expires=0, max_age=0, path=path, domain=domain - ) - - def run_wsgi_app(self, environ, buffered=False): - """Runs the wrapped WSGI app with the given environment.""" - if self.cookie_jar is not None: - self.cookie_jar.inject_wsgi(environ) - rv = run_wsgi_app(self.application, environ, buffered=buffered) - if self.cookie_jar is not None: - self.cookie_jar.extract_wsgi(environ, rv[2]) - return rv - - def resolve_redirect(self, response, new_location, environ, buffered=False): - """Perform a new request to the location given by the redirect - response to the previous request. - """ - scheme, netloc, path, qs, anchor = url_parse(new_location) - builder = EnvironBuilder.from_environ(environ, query_string=qs) - - to_name_parts = netloc.split(":", 1)[0].split(".") - from_name_parts = builder.server_name.split(".") - - if to_name_parts != [""]: - # The new location has a host, use it for the base URL. - builder.url_scheme = scheme - builder.host = netloc - else: - # A local redirect with autocorrect_location_header=False - # doesn't have a host, so use the request's host. - to_name_parts = from_name_parts - - # Explain why a redirect to a different server name won't be followed. - if to_name_parts != from_name_parts: - if to_name_parts[-len(from_name_parts) :] == from_name_parts: - if not self.allow_subdomain_redirects: - raise RuntimeError("Following subdomain redirects is not enabled.") - else: - raise RuntimeError("Following external redirects is not supported.") - - path_parts = path.split("/") - root_parts = builder.script_root.split("/") - - if path_parts[: len(root_parts)] == root_parts: - # Strip the script root from the path. - builder.path = path[len(builder.script_root) :] - else: - # The new location is not under the script root, so use the - # whole path and clear the previous root. - builder.path = path - builder.script_root = "" - - status_code = int(response[1].split(None, 1)[0]) - - # Only 307 and 308 preserve all of the original request. - if status_code not in {307, 308}: - # HEAD is preserved, everything else becomes GET. - if builder.method != "HEAD": - builder.method = "GET" - - # Clear the body and the headers that describe it. - builder.input_stream = None - builder.content_type = None - builder.content_length = None - builder.headers.pop("Transfer-Encoding", None) - - # Disable the response wrapper while handling redirects. Not - # thread safe, but the client should not be shared anyway. - old_response_wrapper = self.response_wrapper - self.response_wrapper = None - - try: - return self.open(builder, as_tuple=True, buffered=buffered) - finally: - self.response_wrapper = old_response_wrapper - - def open(self, *args, **kwargs): - """Takes the same arguments as the :class:`EnvironBuilder` class with - some additions: You can provide a :class:`EnvironBuilder` or a WSGI - environment as only argument instead of the :class:`EnvironBuilder` - arguments and two optional keyword arguments (`as_tuple`, `buffered`) - that change the type of the return value or the way the application is - executed. - - .. versionchanged:: 0.5 - If a dict is provided as file in the dict for the `data` parameter - the content type has to be called `content_type` now instead of - `mimetype`. This change was made for consistency with - :class:`werkzeug.FileWrapper`. - - The `follow_redirects` parameter was added to :func:`open`. - - Additional parameters: - - :param as_tuple: Returns a tuple in the form ``(environ, result)`` - :param buffered: Set this to True to buffer the application run. - This will automatically close the application for - you as well. - :param follow_redirects: Set this to True if the `Client` should - follow HTTP redirects. - """ - as_tuple = kwargs.pop("as_tuple", False) - buffered = kwargs.pop("buffered", False) - follow_redirects = kwargs.pop("follow_redirects", False) - environ = None - if not kwargs and len(args) == 1: - if isinstance(args[0], EnvironBuilder): - environ = args[0].get_environ() - elif isinstance(args[0], dict): - environ = args[0] - if environ is None: - builder = EnvironBuilder(*args, **kwargs) - try: - environ = builder.get_environ() - finally: - builder.close() - - response = self.run_wsgi_app(environ.copy(), buffered=buffered) - - # handle redirects - redirect_chain = [] - while 1: - status_code = int(response[1].split(None, 1)[0]) - if ( - status_code not in {301, 302, 303, 305, 307, 308} - or not follow_redirects - ): - break - - # Exhaust intermediate response bodies to ensure middleware - # that returns an iterator runs any cleanup code. - if not buffered: - for _ in response[0]: - pass - - new_location = response[2]["location"] - new_redirect_entry = (new_location, status_code) - if new_redirect_entry in redirect_chain: - raise ClientRedirectError("loop detected") - redirect_chain.append(new_redirect_entry) - environ, response = self.resolve_redirect( - response, new_location, environ, buffered=buffered - ) - - if self.response_wrapper is not None: - response = self.response_wrapper(*response) - if as_tuple: - return environ, response - return response - - def get(self, *args, **kw): - """Like open but method is enforced to GET.""" - kw["method"] = "GET" - return self.open(*args, **kw) - - def patch(self, *args, **kw): - """Like open but method is enforced to PATCH.""" - kw["method"] = "PATCH" - return self.open(*args, **kw) - - def post(self, *args, **kw): - """Like open but method is enforced to POST.""" - kw["method"] = "POST" - return self.open(*args, **kw) - - def head(self, *args, **kw): - """Like open but method is enforced to HEAD.""" - kw["method"] = "HEAD" - return self.open(*args, **kw) - - def put(self, *args, **kw): - """Like open but method is enforced to PUT.""" - kw["method"] = "PUT" - return self.open(*args, **kw) - - def delete(self, *args, **kw): - """Like open but method is enforced to DELETE.""" - kw["method"] = "DELETE" - return self.open(*args, **kw) - - def options(self, *args, **kw): - """Like open but method is enforced to OPTIONS.""" - kw["method"] = "OPTIONS" - return self.open(*args, **kw) - - def trace(self, *args, **kw): - """Like open but method is enforced to TRACE.""" - kw["method"] = "TRACE" - return self.open(*args, **kw) - - def __repr__(self): - return "<%s %r>" % (self.__class__.__name__, self.application) - - -def create_environ(*args, **kwargs): - """Create a new WSGI environ dict based on the values passed. The first - parameter should be the path of the request which defaults to '/'. The - second one can either be an absolute path (in that case the host is - localhost:80) or a full path to the request with scheme, netloc port and - the path to the script. - - This accepts the same arguments as the :class:`EnvironBuilder` - constructor. - - .. versionchanged:: 0.5 - This function is now a thin wrapper over :class:`EnvironBuilder` which - was added in 0.5. The `headers`, `environ_base`, `environ_overrides` - and `charset` parameters were added. - """ - builder = EnvironBuilder(*args, **kwargs) - try: - return builder.get_environ() - finally: - builder.close() - - -def run_wsgi_app(app, environ, buffered=False): - """Return a tuple in the form (app_iter, status, headers) of the - application output. This works best if you pass it an application that - returns an iterator all the time. - - Sometimes applications may use the `write()` callable returned - by the `start_response` function. This tries to resolve such edge - cases automatically. But if you don't get the expected output you - should set `buffered` to `True` which enforces buffering. - - If passed an invalid WSGI application the behavior of this function is - undefined. Never pass non-conforming WSGI applications to this function. - - :param app: the application to execute. - :param buffered: set to `True` to enforce buffering. - :return: tuple in the form ``(app_iter, status, headers)`` - """ - environ = _get_environ(environ) - response = [] - buffer = [] - - def start_response(status, headers, exc_info=None): - if exc_info is not None: - reraise(*exc_info) - response[:] = [status, headers] - return buffer.append - - app_rv = app(environ, start_response) - close_func = getattr(app_rv, "close", None) - app_iter = iter(app_rv) - - # when buffering we emit the close call early and convert the - # application iterator into a regular list - if buffered: - try: - app_iter = list(app_iter) - finally: - if close_func is not None: - close_func() - - # otherwise we iterate the application iter until we have a response, chain - # the already received data with the already collected data and wrap it in - # a new `ClosingIterator` if we need to restore a `close` callable from the - # original return value. - else: - for item in app_iter: - buffer.append(item) - if response: - break - if buffer: - app_iter = chain(buffer, app_iter) - if close_func is not None and app_iter is not app_rv: - app_iter = ClosingIterator(app_iter, close_func) - - return app_iter, response[0], Headers(response[1]) diff --git a/venv/lib/python3.6/site-packages/werkzeug/testapp.py b/venv/lib/python3.6/site-packages/werkzeug/testapp.py deleted file mode 100644 index 8ea23be..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/testapp.py +++ /dev/null @@ -1,241 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.testapp - ~~~~~~~~~~~~~~~~ - - Provide a small test application that can be used to test a WSGI server - and check it for WSGI compliance. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import base64 -import os -import sys -from textwrap import wrap - -import werkzeug -from .utils import escape -from .wrappers import BaseRequest as Request -from .wrappers import BaseResponse as Response - -logo = Response( - base64.b64decode( - """ -R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP///////// -//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv -nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25 -7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq -ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX -m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G -p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo -SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf -78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA -ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA -tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx -w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx -lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45 -Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB -yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd -dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r -idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh -EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8 -ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64 -gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C -JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y -Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9 -YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX -c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb -qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL -cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG -cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2 -KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe -EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb -UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB -Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z -aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn -kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs -=""" - ), - mimetype="image/png", -) - - -TEMPLATE = u"""\ - -WSGI Information - -
    - -

    WSGI Information

    -

    - This page displays all available information about the WSGI server and - the underlying Python interpreter. -

    Python Interpreter

    - - - - - - -
    Python Version - %(python_version)s -
    Platform - %(platform)s [%(os)s] -
    API Version - %(api_version)s -
    Byteorder - %(byteorder)s -
    Werkzeug Version - %(werkzeug_version)s -
    -

    WSGI Environment

    - %(wsgi_env)s
    -

    Installed Eggs

    -

    - The following python packages were installed on the system as - Python eggs: -

      %(python_eggs)s
    -

    System Path

    -

    - The following paths are the current contents of the load path. The - following entries are looked up for Python packages. Note that not - all items in this path are folders. Gray and underlined items are - entries pointing to invalid resources or used by custom import hooks - such as the zip importer. -

    - Items with a bright background were expanded for display from a relative - path. If you encounter such paths in the output you might want to check - your setup as relative paths are usually problematic in multithreaded - environments. -

      %(sys_path)s
    -
    -""" - - -def iter_sys_path(): - if os.name == "posix": - - def strip(x): - prefix = os.path.expanduser("~") - if x.startswith(prefix): - x = "~" + x[len(prefix) :] - return x - - else: - - def strip(x): - return x - - cwd = os.path.abspath(os.getcwd()) - for item in sys.path: - path = os.path.join(cwd, item or os.path.curdir) - yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item - - -def render_testapp(req): - try: - import pkg_resources - except ImportError: - eggs = () - else: - eggs = sorted(pkg_resources.working_set, key=lambda x: x.project_name.lower()) - python_eggs = [] - for egg in eggs: - try: - version = egg.version - except (ValueError, AttributeError): - version = "unknown" - python_eggs.append( - "
  • %s [%s]" % (escape(egg.project_name), escape(version)) - ) - - wsgi_env = [] - sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower()) - for key, value in sorted_environ: - wsgi_env.append( - "%s%s" - % (escape(str(key)), " ".join(wrap(escape(repr(value))))) - ) - - sys_path = [] - for item, virtual, expanded in iter_sys_path(): - class_ = [] - if virtual: - class_.append("virtual") - if expanded: - class_.append("exp") - sys_path.append( - "%s" - % (' class="%s"' % " ".join(class_) if class_ else "", escape(item)) - ) - - return ( - TEMPLATE - % { - "python_version": "
    ".join(escape(sys.version).splitlines()), - "platform": escape(sys.platform), - "os": escape(os.name), - "api_version": sys.api_version, - "byteorder": sys.byteorder, - "werkzeug_version": werkzeug.__version__, - "python_eggs": "\n".join(python_eggs), - "wsgi_env": "\n".join(wsgi_env), - "sys_path": "\n".join(sys_path), - } - ).encode("utf-8") - - -def test_app(environ, start_response): - """Simple test application that dumps the environment. You can use - it to check if Werkzeug is working properly: - - .. sourcecode:: pycon - - >>> from werkzeug.serving import run_simple - >>> from werkzeug.testapp import test_app - >>> run_simple('localhost', 3000, test_app) - * Running on http://localhost:3000/ - - The application displays important information from the WSGI environment, - the Python interpreter and the installed libraries. - """ - req = Request(environ, populate_request=False) - if req.args.get("resource") == "logo": - response = logo - else: - response = Response(render_testapp(req), mimetype="text/html") - return response(environ, start_response) - - -if __name__ == "__main__": - from .serving import run_simple - - run_simple("localhost", 5000, test_app, use_reloader=True) diff --git a/venv/lib/python3.6/site-packages/werkzeug/urls.py b/venv/lib/python3.6/site-packages/werkzeug/urls.py deleted file mode 100644 index 38e9e5a..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/urls.py +++ /dev/null @@ -1,1134 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.urls - ~~~~~~~~~~~~~ - - ``werkzeug.urls`` used to provide several wrapper functions for Python 2 - urlparse, whose main purpose were to work around the behavior of the Py2 - stdlib and its lack of unicode support. While this was already a somewhat - inconvenient situation, it got even more complicated because Python 3's - ``urllib.parse`` actually does handle unicode properly. In other words, - this module would wrap two libraries with completely different behavior. So - now this module contains a 2-and-3-compatible backport of Python 3's - ``urllib.parse``, which is mostly API-compatible. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import codecs -import os -import re -from collections import namedtuple - -from ._compat import fix_tuple_repr -from ._compat import implements_to_string -from ._compat import make_literal_wrapper -from ._compat import normalize_string_tuple -from ._compat import PY2 -from ._compat import text_type -from ._compat import to_native -from ._compat import to_unicode -from ._compat import try_coerce_native -from ._internal import _decode_idna -from ._internal import _encode_idna -from .datastructures import iter_multi_items -from .datastructures import MultiDict - -# A regular expression for what a valid schema looks like -_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$") - -# Characters that are safe in any part of an URL. -_always_safe = frozenset( - bytearray( - b"abcdefghijklmnopqrstuvwxyz" - b"ABCDEFGHIJKLMNOPQRSTUVWXYZ" - b"0123456789" - b"-._~" - ) -) - -_hexdigits = "0123456789ABCDEFabcdef" -_hextobyte = dict( - ((a + b).encode(), int(a + b, 16)) for a in _hexdigits for b in _hexdigits -) -_bytetohex = [("%%%02X" % char).encode("ascii") for char in range(256)] - - -_URLTuple = fix_tuple_repr( - namedtuple("_URLTuple", ["scheme", "netloc", "path", "query", "fragment"]) -) - - -class BaseURL(_URLTuple): - """Superclass of :py:class:`URL` and :py:class:`BytesURL`.""" - - __slots__ = () - - def replace(self, **kwargs): - """Return an URL with the same values, except for those parameters - given new values by whichever keyword arguments are specified.""" - return self._replace(**kwargs) - - @property - def host(self): - """The host part of the URL if available, otherwise `None`. The - host is either the hostname or the IP address mentioned in the - URL. It will not contain the port. - """ - return self._split_host()[0] - - @property - def ascii_host(self): - """Works exactly like :attr:`host` but will return a result that - is restricted to ASCII. If it finds a netloc that is not ASCII - it will attempt to idna decode it. This is useful for socket - operations when the URL might include internationalized characters. - """ - rv = self.host - if rv is not None and isinstance(rv, text_type): - try: - rv = _encode_idna(rv) - except UnicodeError: - rv = rv.encode("ascii", "ignore") - return to_native(rv, "ascii", "ignore") - - @property - def port(self): - """The port in the URL as an integer if it was present, `None` - otherwise. This does not fill in default ports. - """ - try: - rv = int(to_native(self._split_host()[1])) - if 0 <= rv <= 65535: - return rv - except (ValueError, TypeError): - pass - - @property - def auth(self): - """The authentication part in the URL if available, `None` - otherwise. - """ - return self._split_netloc()[0] - - @property - def username(self): - """The username if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a unicode string. - """ - rv = self._split_auth()[0] - if rv is not None: - return _url_unquote_legacy(rv) - - @property - def raw_username(self): - """The username if it was part of the URL, `None` otherwise. - Unlike :attr:`username` this one is not being decoded. - """ - return self._split_auth()[0] - - @property - def password(self): - """The password if it was part of the URL, `None` otherwise. - This undergoes URL decoding and will always be a unicode string. - """ - rv = self._split_auth()[1] - if rv is not None: - return _url_unquote_legacy(rv) - - @property - def raw_password(self): - """The password if it was part of the URL, `None` otherwise. - Unlike :attr:`password` this one is not being decoded. - """ - return self._split_auth()[1] - - def decode_query(self, *args, **kwargs): - """Decodes the query part of the URL. Ths is a shortcut for - calling :func:`url_decode` on the query argument. The arguments and - keyword arguments are forwarded to :func:`url_decode` unchanged. - """ - return url_decode(self.query, *args, **kwargs) - - def join(self, *args, **kwargs): - """Joins this URL with another one. This is just a convenience - function for calling into :meth:`url_join` and then parsing the - return value again. - """ - return url_parse(url_join(self, *args, **kwargs)) - - def to_url(self): - """Returns a URL string or bytes depending on the type of the - information stored. This is just a convenience function - for calling :meth:`url_unparse` for this URL. - """ - return url_unparse(self) - - def decode_netloc(self): - """Decodes the netloc part into a string.""" - rv = _decode_idna(self.host or "") - - if ":" in rv: - rv = "[%s]" % rv - port = self.port - if port is not None: - rv = "%s:%d" % (rv, port) - auth = ":".join( - filter( - None, - [ - _url_unquote_legacy(self.raw_username or "", "/:%@"), - _url_unquote_legacy(self.raw_password or "", "/:%@"), - ], - ) - ) - if auth: - rv = "%s@%s" % (auth, rv) - return rv - - def to_uri_tuple(self): - """Returns a :class:`BytesURL` tuple that holds a URI. This will - encode all the information in the URL properly to ASCII using the - rules a web browser would follow. - - It's usually more interesting to directly call :meth:`iri_to_uri` which - will return a string. - """ - return url_parse(iri_to_uri(self).encode("ascii")) - - def to_iri_tuple(self): - """Returns a :class:`URL` tuple that holds a IRI. This will try - to decode as much information as possible in the URL without - losing information similar to how a web browser does it for the - URL bar. - - It's usually more interesting to directly call :meth:`uri_to_iri` which - will return a string. - """ - return url_parse(uri_to_iri(self)) - - def get_file_location(self, pathformat=None): - """Returns a tuple with the location of the file in the form - ``(server, location)``. If the netloc is empty in the URL or - points to localhost, it's represented as ``None``. - - The `pathformat` by default is autodetection but needs to be set - when working with URLs of a specific system. The supported values - are ``'windows'`` when working with Windows or DOS paths and - ``'posix'`` when working with posix paths. - - If the URL does not point to a local file, the server and location - are both represented as ``None``. - - :param pathformat: The expected format of the path component. - Currently ``'windows'`` and ``'posix'`` are - supported. Defaults to ``None`` which is - autodetect. - """ - if self.scheme != "file": - return None, None - - path = url_unquote(self.path) - host = self.netloc or None - - if pathformat is None: - if os.name == "nt": - pathformat = "windows" - else: - pathformat = "posix" - - if pathformat == "windows": - if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": - path = path[1:2] + ":" + path[3:] - windows_share = path[:3] in ("\\" * 3, "/" * 3) - import ntpath - - path = ntpath.normpath(path) - # Windows shared drives are represented as ``\\host\\directory``. - # That results in a URL like ``file://///host/directory``, and a - # path like ``///host/directory``. We need to special-case this - # because the path contains the hostname. - if windows_share and host is None: - parts = path.lstrip("\\").split("\\", 1) - if len(parts) == 2: - host, path = parts - else: - host = parts[0] - path = "" - elif pathformat == "posix": - import posixpath - - path = posixpath.normpath(path) - else: - raise TypeError("Invalid path format %s" % repr(pathformat)) - - if host in ("127.0.0.1", "::1", "localhost"): - host = None - - return host, path - - def _split_netloc(self): - if self._at in self.netloc: - return self.netloc.split(self._at, 1) - return None, self.netloc - - def _split_auth(self): - auth = self._split_netloc()[0] - if not auth: - return None, None - if self._colon not in auth: - return auth, None - return auth.split(self._colon, 1) - - def _split_host(self): - rv = self._split_netloc()[1] - if not rv: - return None, None - - if not rv.startswith(self._lbracket): - if self._colon in rv: - return rv.split(self._colon, 1) - return rv, None - - idx = rv.find(self._rbracket) - if idx < 0: - return rv, None - - host = rv[1:idx] - rest = rv[idx + 1 :] - if rest.startswith(self._colon): - return host, rest[1:] - return host, None - - -@implements_to_string -class URL(BaseURL): - """Represents a parsed URL. This behaves like a regular tuple but - also has some extra attributes that give further insight into the - URL. - """ - - __slots__ = () - _at = "@" - _colon = ":" - _lbracket = "[" - _rbracket = "]" - - def __str__(self): - return self.to_url() - - def encode_netloc(self): - """Encodes the netloc part to an ASCII safe URL as bytes.""" - rv = self.ascii_host or "" - if ":" in rv: - rv = "[%s]" % rv - port = self.port - if port is not None: - rv = "%s:%d" % (rv, port) - auth = ":".join( - filter( - None, - [ - url_quote(self.raw_username or "", "utf-8", "strict", "/:%"), - url_quote(self.raw_password or "", "utf-8", "strict", "/:%"), - ], - ) - ) - if auth: - rv = "%s@%s" % (auth, rv) - return to_native(rv) - - def encode(self, charset="utf-8", errors="replace"): - """Encodes the URL to a tuple made out of bytes. The charset is - only being used for the path, query and fragment. - """ - return BytesURL( - self.scheme.encode("ascii"), - self.encode_netloc(), - self.path.encode(charset, errors), - self.query.encode(charset, errors), - self.fragment.encode(charset, errors), - ) - - -class BytesURL(BaseURL): - """Represents a parsed URL in bytes.""" - - __slots__ = () - _at = b"@" - _colon = b":" - _lbracket = b"[" - _rbracket = b"]" - - def __str__(self): - return self.to_url().decode("utf-8", "replace") - - def encode_netloc(self): - """Returns the netloc unchanged as bytes.""" - return self.netloc - - def decode(self, charset="utf-8", errors="replace"): - """Decodes the URL to a tuple made out of strings. The charset is - only being used for the path, query and fragment. - """ - return URL( - self.scheme.decode("ascii"), - self.decode_netloc(), - self.path.decode(charset, errors), - self.query.decode(charset, errors), - self.fragment.decode(charset, errors), - ) - - -_unquote_maps = {frozenset(): _hextobyte} - - -def _unquote_to_bytes(string, unsafe=""): - if isinstance(string, text_type): - string = string.encode("utf-8") - - if isinstance(unsafe, text_type): - unsafe = unsafe.encode("utf-8") - - unsafe = frozenset(bytearray(unsafe)) - groups = iter(string.split(b"%")) - result = bytearray(next(groups, b"")) - - try: - hex_to_byte = _unquote_maps[unsafe] - except KeyError: - hex_to_byte = _unquote_maps[unsafe] = { - h: b for h, b in _hextobyte.items() if b not in unsafe - } - - for group in groups: - code = group[:2] - - if code in hex_to_byte: - result.append(hex_to_byte[code]) - result.extend(group[2:]) - else: - result.append(37) # % - result.extend(group) - - return bytes(result) - - -def _url_encode_impl(obj, charset, encode_keys, sort, key): - iterable = iter_multi_items(obj) - if sort: - iterable = sorted(iterable, key=key) - for key, value in iterable: - if value is None: - continue - if not isinstance(key, bytes): - key = text_type(key).encode(charset) - if not isinstance(value, bytes): - value = text_type(value).encode(charset) - yield _fast_url_quote_plus(key) + "=" + _fast_url_quote_plus(value) - - -def _url_unquote_legacy(value, unsafe=""): - try: - return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe) - except UnicodeError: - return url_unquote(value, charset="latin1", unsafe=unsafe) - - -def url_parse(url, scheme=None, allow_fragments=True): - """Parses a URL from a string into a :class:`URL` tuple. If the URL - is lacking a scheme it can be provided as second argument. Otherwise, - it is ignored. Optionally fragments can be stripped from the URL - by setting `allow_fragments` to `False`. - - The inverse of this function is :func:`url_unparse`. - - :param url: the URL to parse. - :param scheme: the default schema to use if the URL is schemaless. - :param allow_fragments: if set to `False` a fragment will be removed - from the URL. - """ - s = make_literal_wrapper(url) - is_text_based = isinstance(url, text_type) - - if scheme is None: - scheme = s("") - netloc = query = fragment = s("") - i = url.find(s(":")) - if i > 0 and _scheme_re.match(to_native(url[:i], errors="replace")): - # make sure "iri" is not actually a port number (in which case - # "scheme" is really part of the path) - rest = url[i + 1 :] - if not rest or any(c not in s("0123456789") for c in rest): - # not a port number - scheme, url = url[:i].lower(), rest - - if url[:2] == s("//"): - delim = len(url) - for c in s("/?#"): - wdelim = url.find(c, 2) - if wdelim >= 0: - delim = min(delim, wdelim) - netloc, url = url[2:delim], url[delim:] - if (s("[") in netloc and s("]") not in netloc) or ( - s("]") in netloc and s("[") not in netloc - ): - raise ValueError("Invalid IPv6 URL") - - if allow_fragments and s("#") in url: - url, fragment = url.split(s("#"), 1) - if s("?") in url: - url, query = url.split(s("?"), 1) - - result_type = URL if is_text_based else BytesURL - return result_type(scheme, netloc, url, query, fragment) - - -def _make_fast_url_quote(charset="utf-8", errors="strict", safe="/:", unsafe=""): - """Precompile the translation table for a URL encoding function. - - Unlike :func:`url_quote`, the generated function only takes the - string to quote. - - :param charset: The charset to encode the result with. - :param errors: How to handle encoding errors. - :param safe: An optional sequence of safe characters to never encode. - :param unsafe: An optional sequence of unsafe characters to always encode. - """ - if isinstance(safe, text_type): - safe = safe.encode(charset, errors) - - if isinstance(unsafe, text_type): - unsafe = unsafe.encode(charset, errors) - - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - table = [chr(c) if c in safe else "%%%02X" % c for c in range(256)] - - if not PY2: - - def quote(string): - return "".join([table[c] for c in string]) - - else: - - def quote(string): - return "".join([table[c] for c in bytearray(string)]) - - return quote - - -_fast_url_quote = _make_fast_url_quote() -_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+") - - -def _fast_url_quote_plus(string): - return _fast_quote_plus(string).replace(" ", "+") - - -def url_quote(string, charset="utf-8", errors="strict", safe="/:", unsafe=""): - """URL encode a single string with a given encoding. - - :param s: the string to quote. - :param charset: the charset to be used. - :param safe: an optional sequence of safe characters. - :param unsafe: an optional sequence of unsafe characters. - - .. versionadded:: 0.9.2 - The `unsafe` parameter was added. - """ - if not isinstance(string, (text_type, bytes, bytearray)): - string = text_type(string) - if isinstance(string, text_type): - string = string.encode(charset, errors) - if isinstance(safe, text_type): - safe = safe.encode(charset, errors) - if isinstance(unsafe, text_type): - unsafe = unsafe.encode(charset, errors) - safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) - rv = bytearray() - for char in bytearray(string): - if char in safe: - rv.append(char) - else: - rv.extend(_bytetohex[char]) - return to_native(bytes(rv)) - - -def url_quote_plus(string, charset="utf-8", errors="strict", safe=""): - """URL encode a single string with the given encoding and convert - whitespace to "+". - - :param s: The string to quote. - :param charset: The charset to be used. - :param safe: An optional sequence of safe characters. - """ - return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+") - - -def url_unparse(components): - """The reverse operation to :meth:`url_parse`. This accepts arbitrary - as well as :class:`URL` tuples and returns a URL as a string. - - :param components: the parsed URL as tuple which should be converted - into a URL string. - """ - scheme, netloc, path, query, fragment = normalize_string_tuple(components) - s = make_literal_wrapper(scheme) - url = s("") - - # We generally treat file:///x and file:/x the same which is also - # what browsers seem to do. This also allows us to ignore a schema - # register for netloc utilization or having to differenciate between - # empty and missing netloc. - if netloc or (scheme and path.startswith(s("/"))): - if path and path[:1] != s("/"): - path = s("/") + path - url = s("//") + (netloc or s("")) + path - elif path: - url += path - if scheme: - url = scheme + s(":") + url - if query: - url = url + s("?") + query - if fragment: - url = url + s("#") + fragment - return url - - -def url_unquote(string, charset="utf-8", errors="replace", unsafe=""): - """URL decode a single string with a given encoding. If the charset - is set to `None` no unicode decoding is performed and raw bytes - are returned. - - :param s: the string to unquote. - :param charset: the charset of the query string. If set to `None` - no unicode decoding will take place. - :param errors: the error handling for the charset decoding. - """ - rv = _unquote_to_bytes(string, unsafe) - if charset is not None: - rv = rv.decode(charset, errors) - return rv - - -def url_unquote_plus(s, charset="utf-8", errors="replace"): - """URL decode a single string with the given `charset` and decode "+" to - whitespace. - - Per default encoding errors are ignored. If you want a different behavior - you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a - :exc:`HTTPUnicodeError` is raised. - - :param s: The string to unquote. - :param charset: the charset of the query string. If set to `None` - no unicode decoding will take place. - :param errors: The error handling for the `charset` decoding. - """ - if isinstance(s, text_type): - s = s.replace(u"+", u" ") - else: - s = s.replace(b"+", b" ") - return url_unquote(s, charset, errors) - - -def url_fix(s, charset="utf-8"): - r"""Sometimes you get an URL by a user that just isn't a real URL because - it contains unsafe characters like ' ' and so on. This function can fix - some of the problems in a similar way browsers handle data entered by the - user: - - >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') - 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' - - :param s: the string with the URL to fix. - :param charset: The target charset for the URL if the url was given as - unicode string. - """ - # First step is to switch to unicode processing and to convert - # backslashes (which are invalid in URLs anyways) to slashes. This is - # consistent with what Chrome does. - s = to_unicode(s, charset, "replace").replace("\\", "/") - - # For the specific case that we look like a malformed windows URL - # we want to fix this up manually: - if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"): - s = "file:///" + s[7:] - - url = url_parse(s) - path = url_quote(url.path, charset, safe="/%+$!*'(),") - qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),") - anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),") - return to_native(url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor))) - - -# not-unreserved characters remain quoted when unquoting to IRI -_to_iri_unsafe = "".join([chr(c) for c in range(128) if c not in _always_safe]) - - -def _codec_error_url_quote(e): - """Used in :func:`uri_to_iri` after unquoting to re-quote any - invalid bytes. - """ - out = _fast_url_quote(e.object[e.start : e.end]) - - if PY2: - out = out.decode("utf-8") - - return out, e.end - - -codecs.register_error("werkzeug.url_quote", _codec_error_url_quote) - - -def uri_to_iri(uri, charset="utf-8", errors="werkzeug.url_quote"): - """Convert a URI to an IRI. All valid UTF-8 characters are unquoted, - leaving all reserved and invalid characters quoted. If the URL has - a domain, it is decoded from Punycode. - - >>> uri_to_iri("http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF") - 'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF' - - :param uri: The URI to convert. - :param charset: The encoding to encode unquoted bytes with. - :param errors: Error handler to use during ``bytes.encode``. By - default, invalid bytes are left quoted. - - .. versionchanged:: 0.15 - All reserved and invalid characters remain quoted. Previously, - only some reserved characters were preserved, and invalid bytes - were replaced instead of left quoted. - - .. versionadded:: 0.6 - """ - if isinstance(uri, tuple): - uri = url_unparse(uri) - - uri = url_parse(to_unicode(uri, charset)) - path = url_unquote(uri.path, charset, errors, _to_iri_unsafe) - query = url_unquote(uri.query, charset, errors, _to_iri_unsafe) - fragment = url_unquote(uri.fragment, charset, errors, _to_iri_unsafe) - return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment)) - - -# reserved characters remain unquoted when quoting to URI -_to_uri_safe = ":/?#[]@!$&'()*+,;=%" - - -def iri_to_uri(iri, charset="utf-8", errors="strict", safe_conversion=False): - """Convert an IRI to a URI. All non-ASCII and unsafe characters are - quoted. If the URL has a domain, it is encoded to Punycode. - - >>> iri_to_uri('http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF') - 'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF' - - :param iri: The IRI to convert. - :param charset: The encoding of the IRI. - :param errors: Error handler to use during ``bytes.encode``. - :param safe_conversion: Return the URL unchanged if it only contains - ASCII characters and no whitespace. See the explanation below. - - There is a general problem with IRI conversion with some protocols - that are in violation of the URI specification. Consider the - following two IRIs:: - - magnet:?xt=uri:whatever - itms-services://?action=download-manifest - - After parsing, we don't know if the scheme requires the ``//``, - which is dropped if empty, but conveys different meanings in the - final URL if it's present or not. In this case, you can use - ``safe_conversion``, which will return the URL unchanged if it only - contains ASCII characters and no whitespace. This can result in a - URI with unquoted characters if it was not already quoted correctly, - but preserves the URL's semantics. Werkzeug uses this for the - ``Location`` header for redirects. - - .. versionchanged:: 0.15 - All reserved characters remain unquoted. Previously, only some - reserved characters were left unquoted. - - .. versionchanged:: 0.9.6 - The ``safe_conversion`` parameter was added. - - .. versionadded:: 0.6 - """ - if isinstance(iri, tuple): - iri = url_unparse(iri) - - if safe_conversion: - # If we're not sure if it's safe to convert the URL, and it only - # contains ASCII characters, return it unconverted. - try: - native_iri = to_native(iri) - ascii_iri = native_iri.encode("ascii") - - # Only return if it doesn't have whitespace. (Why?) - if len(ascii_iri.split()) == 1: - return native_iri - except UnicodeError: - pass - - iri = url_parse(to_unicode(iri, charset, errors)) - path = url_quote(iri.path, charset, errors, _to_uri_safe) - query = url_quote(iri.query, charset, errors, _to_uri_safe) - fragment = url_quote(iri.fragment, charset, errors, _to_uri_safe) - return to_native( - url_unparse((iri.scheme, iri.encode_netloc(), path, query, fragment)) - ) - - -def url_decode( - s, - charset="utf-8", - decode_keys=False, - include_empty=True, - errors="replace", - separator="&", - cls=None, -): - """ - Parse a querystring and return it as :class:`MultiDict`. There is a - difference in key decoding on different Python versions. On Python 3 - keys will always be fully decoded whereas on Python 2, keys will - remain bytestrings if they fit into ASCII. On 2.x keys can be forced - to be unicode by setting `decode_keys` to `True`. - - If the charset is set to `None` no unicode decoding will happen and - raw bytes will be returned. - - Per default a missing value for a key will default to an empty key. If - you don't want that behavior you can set `include_empty` to `False`. - - Per default encoding errors are ignored. If you want a different behavior - you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a - `HTTPUnicodeError` is raised. - - .. versionchanged:: 0.5 - In previous versions ";" and "&" could be used for url decoding. - This changed in 0.5 where only "&" is supported. If you want to - use ";" instead a different `separator` can be provided. - - The `cls` parameter was added. - - :param s: a string with the query string to decode. - :param charset: the charset of the query string. If set to `None` - no unicode decoding will take place. - :param decode_keys: Used on Python 2.x to control whether keys should - be forced to be unicode objects. If set to `True` - then keys will be unicode in all cases. Otherwise, - they remain `str` if they fit into ASCII. - :param include_empty: Set to `False` if you don't want empty values to - appear in the dict. - :param errors: the decoding error behavior. - :param separator: the pair separator to be used, defaults to ``&`` - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - """ - if cls is None: - cls = MultiDict - if isinstance(s, text_type) and not isinstance(separator, text_type): - separator = separator.decode(charset or "ascii") - elif isinstance(s, bytes) and not isinstance(separator, bytes): - separator = separator.encode(charset or "ascii") - return cls( - _url_decode_impl( - s.split(separator), charset, decode_keys, include_empty, errors - ) - ) - - -def url_decode_stream( - stream, - charset="utf-8", - decode_keys=False, - include_empty=True, - errors="replace", - separator="&", - cls=None, - limit=None, - return_iterator=False, -): - """Works like :func:`url_decode` but decodes a stream. The behavior - of stream and limit follows functions like - :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is - directly fed to the `cls` so you can consume the data while it's - parsed. - - .. versionadded:: 0.8 - - :param stream: a stream with the encoded querystring - :param charset: the charset of the query string. If set to `None` - no unicode decoding will take place. - :param decode_keys: Used on Python 2.x to control whether keys should - be forced to be unicode objects. If set to `True`, - keys will be unicode in all cases. Otherwise, they - remain `str` if they fit into ASCII. - :param include_empty: Set to `False` if you don't want empty values to - appear in the dict. - :param errors: the decoding error behavior. - :param separator: the pair separator to be used, defaults to ``&`` - :param cls: an optional dict class to use. If this is not specified - or `None` the default :class:`MultiDict` is used. - :param limit: the content length of the URL data. Not necessary if - a limited stream is provided. - :param return_iterator: if set to `True` the `cls` argument is ignored - and an iterator over all decoded pairs is - returned - """ - from .wsgi import make_chunk_iter - - pair_iter = make_chunk_iter(stream, separator, limit) - decoder = _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors) - - if return_iterator: - return decoder - - if cls is None: - cls = MultiDict - - return cls(decoder) - - -def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors): - for pair in pair_iter: - if not pair: - continue - s = make_literal_wrapper(pair) - equal = s("=") - if equal in pair: - key, value = pair.split(equal, 1) - else: - if not include_empty: - continue - key = pair - value = s("") - key = url_unquote_plus(key, charset, errors) - if charset is not None and PY2 and not decode_keys: - key = try_coerce_native(key) - yield key, url_unquote_plus(value, charset, errors) - - -def url_encode( - obj, charset="utf-8", encode_keys=False, sort=False, key=None, separator=b"&" -): - """URL encode a dict/`MultiDict`. If a value is `None` it will not appear - in the result string. Per default only values are encoded into the target - charset strings. If `encode_keys` is set to ``True`` unicode keys are - supported too. - - If `sort` is set to `True` the items are sorted by `key` or the default - sorting algorithm. - - .. versionadded:: 0.5 - `sort`, `key`, and `separator` were added. - - :param obj: the object to encode into a query string. - :param charset: the charset of the query string. - :param encode_keys: set to `True` if you have unicode keys. (Ignored on - Python 3.x) - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - """ - separator = to_native(separator, "ascii") - return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key)) - - -def url_encode_stream( - obj, - stream=None, - charset="utf-8", - encode_keys=False, - sort=False, - key=None, - separator=b"&", -): - """Like :meth:`url_encode` but writes the results to a stream - object. If the stream is `None` a generator over all encoded - pairs is returned. - - .. versionadded:: 0.8 - - :param obj: the object to encode into a query string. - :param stream: a stream to write the encoded object into or `None` if - an iterator over the encoded pairs should be returned. In - that case the separator argument is ignored. - :param charset: the charset of the query string. - :param encode_keys: set to `True` if you have unicode keys. (Ignored on - Python 3.x) - :param sort: set to `True` if you want parameters to be sorted by `key`. - :param separator: the separator to be used for the pairs. - :param key: an optional function to be used for sorting. For more details - check out the :func:`sorted` documentation. - """ - separator = to_native(separator, "ascii") - gen = _url_encode_impl(obj, charset, encode_keys, sort, key) - if stream is None: - return gen - for idx, chunk in enumerate(gen): - if idx: - stream.write(separator) - stream.write(chunk) - - -def url_join(base, url, allow_fragments=True): - """Join a base URL and a possibly relative URL to form an absolute - interpretation of the latter. - - :param base: the base URL for the join operation. - :param url: the URL to join. - :param allow_fragments: indicates whether fragments should be allowed. - """ - if isinstance(base, tuple): - base = url_unparse(base) - if isinstance(url, tuple): - url = url_unparse(url) - - base, url = normalize_string_tuple((base, url)) - s = make_literal_wrapper(base) - - if not base: - return url - if not url: - return base - - bscheme, bnetloc, bpath, bquery, bfragment = url_parse( - base, allow_fragments=allow_fragments - ) - scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments) - if scheme != bscheme: - return url - if netloc: - return url_unparse((scheme, netloc, path, query, fragment)) - netloc = bnetloc - - if path[:1] == s("/"): - segments = path.split(s("/")) - elif not path: - segments = bpath.split(s("/")) - if not query: - query = bquery - else: - segments = bpath.split(s("/"))[:-1] + path.split(s("/")) - - # If the rightmost part is "./" we want to keep the slash but - # remove the dot. - if segments[-1] == s("."): - segments[-1] = s("") - - # Resolve ".." and "." - segments = [segment for segment in segments if segment != s(".")] - while 1: - i = 1 - n = len(segments) - 1 - while i < n: - if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")): - del segments[i - 1 : i + 1] - break - i += 1 - else: - break - - # Remove trailing ".." if the URL is absolute - unwanted_marker = [s(""), s("..")] - while segments[:2] == unwanted_marker: - del segments[1] - - path = s("/").join(segments) - return url_unparse((scheme, netloc, path, query, fragment)) - - -class Href(object): - """Implements a callable that constructs URLs with the given base. The - function can be called with any number of positional and keyword - arguments which than are used to assemble the URL. Works with URLs - and posix paths. - - Positional arguments are appended as individual segments to - the path of the URL: - - >>> href = Href('/foo') - >>> href('bar', 23) - '/foo/bar/23' - >>> href('foo', bar=23) - '/foo/foo?bar=23' - - If any of the arguments (positional or keyword) evaluates to `None` it - will be skipped. If no keyword arguments are given the last argument - can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), - otherwise the keyword arguments are used for the query parameters, cutting - off the first trailing underscore of the parameter name: - - >>> href(is_=42) - '/foo?is=42' - >>> href({'foo': 'bar'}) - '/foo?foo=bar' - - Combining of both methods is not allowed: - - >>> href({'foo': 'bar'}, bar=42) - Traceback (most recent call last): - ... - TypeError: keyword arguments and query-dicts can't be combined - - Accessing attributes on the href object creates a new href object with - the attribute name as prefix: - - >>> bar_href = href.bar - >>> bar_href("blub") - '/foo/bar/blub' - - If `sort` is set to `True` the items are sorted by `key` or the default - sorting algorithm: - - >>> href = Href("/", sort=True) - >>> href(a=1, b=2, c=3) - '/?a=1&b=2&c=3' - - .. versionadded:: 0.5 - `sort` and `key` were added. - """ - - def __init__(self, base="./", charset="utf-8", sort=False, key=None): - if not base: - base = "./" - self.base = base - self.charset = charset - self.sort = sort - self.key = key - - def __getattr__(self, name): - if name[:2] == "__": - raise AttributeError(name) - base = self.base - if base[-1:] != "/": - base += "/" - return Href(url_join(base, name), self.charset, self.sort, self.key) - - def __call__(self, *path, **query): - if path and isinstance(path[-1], dict): - if query: - raise TypeError("keyword arguments and query-dicts can't be combined") - query, path = path[-1], path[:-1] - elif query: - query = dict( - [(k.endswith("_") and k[:-1] or k, v) for k, v in query.items()] - ) - path = "/".join( - [ - to_unicode(url_quote(x, self.charset), "ascii") - for x in path - if x is not None - ] - ).lstrip("/") - rv = self.base - if path: - if not rv.endswith("/"): - rv += "/" - rv = url_join(rv, "./" + path) - if query: - rv += "?" + to_unicode( - url_encode(query, self.charset, sort=self.sort, key=self.key), "ascii" - ) - return to_native(rv) diff --git a/venv/lib/python3.6/site-packages/werkzeug/useragents.py b/venv/lib/python3.6/site-packages/werkzeug/useragents.py deleted file mode 100644 index e265e09..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/useragents.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.useragents - ~~~~~~~~~~~~~~~~~~~ - - This module provides a helper to inspect user agent strings. This module - is far from complete but should work for most of the currently available - browsers. - - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import re -import warnings - - -class UserAgentParser(object): - """A simple user agent parser. Used by the `UserAgent`.""" - - platforms = ( - ("cros", "chromeos"), - ("iphone|ios", "iphone"), - ("ipad", "ipad"), - (r"darwin|mac|os\s*x", "macos"), - ("win", "windows"), - (r"android", "android"), - ("netbsd", "netbsd"), - ("openbsd", "openbsd"), - ("freebsd", "freebsd"), - ("dragonfly", "dragonflybsd"), - ("(sun|i86)os", "solaris"), - (r"x11|lin(\b|ux)?", "linux"), - (r"nintendo\s+wii", "wii"), - ("irix", "irix"), - ("hp-?ux", "hpux"), - ("aix", "aix"), - ("sco|unix_sv", "sco"), - ("bsd", "bsd"), - ("amiga", "amiga"), - ("blackberry|playbook", "blackberry"), - ("symbian", "symbian"), - ) - browsers = ( - ("googlebot", "google"), - ("msnbot", "msn"), - ("yahoo", "yahoo"), - ("ask jeeves", "ask"), - (r"aol|america\s+online\s+browser", "aol"), - ("opera", "opera"), - ("edge", "edge"), - ("chrome|crios", "chrome"), - ("seamonkey", "seamonkey"), - ("firefox|firebird|phoenix|iceweasel", "firefox"), - ("galeon", "galeon"), - ("safari|version", "safari"), - ("webkit", "webkit"), - ("camino", "camino"), - ("konqueror", "konqueror"), - ("k-meleon", "kmeleon"), - ("netscape", "netscape"), - (r"msie|microsoft\s+internet\s+explorer|trident/.+? rv:", "msie"), - ("lynx", "lynx"), - ("links", "links"), - ("Baiduspider", "baidu"), - ("bingbot", "bing"), - ("mozilla", "mozilla"), - ) - - _browser_version_re = r"(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?" - _language_re = re.compile( - r"(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|" - r"(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)" - ) - - def __init__(self): - self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms] - self.browsers = [ - (b, re.compile(self._browser_version_re % a, re.I)) - for a, b in self.browsers - ] - - def __call__(self, user_agent): - for platform, regex in self.platforms: # noqa: B007 - match = regex.search(user_agent) - if match is not None: - break - else: - platform = None - for browser, regex in self.browsers: # noqa: B007 - match = regex.search(user_agent) - if match is not None: - version = match.group(1) - break - else: - browser = version = None - match = self._language_re.search(user_agent) - if match is not None: - language = match.group(1) or match.group(2) - else: - language = None - return platform, browser, version, language - - -class UserAgent(object): - """Represents a user agent. Pass it a WSGI environment or a user agent - string and you can inspect some of the details from the user agent - string via the attributes. The following attributes exist: - - .. attribute:: string - - the raw user agent string - - .. attribute:: platform - - the browser platform. The following platforms are currently - recognized: - - - `aix` - - `amiga` - - `android` - - `blackberry` - - `bsd` - - `chromeos` - - `dragonflybsd` - - `freebsd` - - `hpux` - - `ipad` - - `iphone` - - `irix` - - `linux` - - `macos` - - `netbsd` - - `openbsd` - - `sco` - - `solaris` - - `symbian` - - `wii` - - `windows` - - .. attribute:: browser - - the name of the browser. The following browsers are currently - recognized: - - - `aol` * - - `ask` * - - `baidu` * - - `bing` * - - `camino` - - `chrome` - - `edge` - - `firefox` - - `galeon` - - `google` * - - `kmeleon` - - `konqueror` - - `links` - - `lynx` - - `mozilla` - - `msie` - - `msn` - - `netscape` - - `opera` - - `safari` - - `seamonkey` - - `webkit` - - `yahoo` * - - (Browsers marked with a star (``*``) are crawlers.) - - .. attribute:: version - - the version of the browser - - .. attribute:: language - - the language of the browser - """ - - _parser = UserAgentParser() - - def __init__(self, environ_or_string): - if isinstance(environ_or_string, dict): - environ_or_string = environ_or_string.get("HTTP_USER_AGENT", "") - self.string = environ_or_string - self.platform, self.browser, self.version, self.language = self._parser( - environ_or_string - ) - - def to_header(self): - return self.string - - def __str__(self): - return self.string - - def __nonzero__(self): - return bool(self.browser) - - __bool__ = __nonzero__ - - def __repr__(self): - return "<%s %r/%s>" % (self.__class__.__name__, self.browser, self.version) - - -# DEPRECATED -from .wrappers import UserAgentMixin as _UserAgentMixin - - -class UserAgentMixin(_UserAgentMixin): - @property - def user_agent(self, *args, **kwargs): - warnings.warn( - "'werkzeug.useragents.UserAgentMixin' should be imported" - " from 'werkzeug.wrappers.UserAgentMixin'. This old import" - " will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - return super(_UserAgentMixin, self).user_agent diff --git a/venv/lib/python3.6/site-packages/werkzeug/utils.py b/venv/lib/python3.6/site-packages/werkzeug/utils.py deleted file mode 100644 index 2062057..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/utils.py +++ /dev/null @@ -1,836 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.utils - ~~~~~~~~~~~~~~ - - This module implements various utilities for WSGI applications. Most of - them are used by the request and response wrappers but especially for - middleware development it makes sense to use them without the wrappers. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import codecs -import os -import pkgutil -import re -import sys -import warnings - -from ._compat import iteritems -from ._compat import PY2 -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type -from ._compat import unichr -from ._internal import _DictAccessorProperty -from ._internal import _missing -from ._internal import _parse_signature - -try: - from html.entities import name2codepoint -except ImportError: - from htmlentitydefs import name2codepoint - - -_format_re = re.compile(r"\$(?:(%s)|\{(%s)\})" % (("[a-zA-Z_][a-zA-Z0-9_]*",) * 2)) -_entity_re = re.compile(r"&([^;]+);") -_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") -_windows_device_files = ( - "CON", - "AUX", - "COM1", - "COM2", - "COM3", - "COM4", - "LPT1", - "LPT2", - "LPT3", - "PRN", - "NUL", -) - - -class cached_property(property): - """A decorator that converts a function into a lazy property. The - function wrapped is called the first time to retrieve the result - and then that calculated result is used the next time you access - the value:: - - class Foo(object): - - @cached_property - def foo(self): - # calculate something important here - return 42 - - The class has to have a `__dict__` in order for this property to - work. - """ - - # implementation detail: A subclass of python's builtin property - # decorator, we override __get__ to check for a cached value. If one - # chooses to invoke __get__ by hand the property will still work as - # expected because the lookup logic is replicated in __get__ for - # manual invocation. - - def __init__(self, func, name=None, doc=None): - self.__name__ = name or func.__name__ - self.__module__ = func.__module__ - self.__doc__ = doc or func.__doc__ - self.func = func - - def __set__(self, obj, value): - obj.__dict__[self.__name__] = value - - def __get__(self, obj, type=None): - if obj is None: - return self - value = obj.__dict__.get(self.__name__, _missing) - if value is _missing: - value = self.func(obj) - obj.__dict__[self.__name__] = value - return value - - -class environ_property(_DictAccessorProperty): - """Maps request attributes to environment variables. This works not only - for the Werzeug request object, but also any other class with an - environ attribute: - - >>> class Test(object): - ... environ = {'key': 'value'} - ... test = environ_property('key') - >>> var = Test() - >>> var.test - 'value' - - If you pass it a second value it's used as default if the key does not - exist, the third one can be a converter that takes a value and converts - it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value - is used. If no default value is provided `None` is used. - - Per default the property is read only. You have to explicitly enable it - by passing ``read_only=False`` to the constructor. - """ - - read_only = True - - def lookup(self, obj): - return obj.environ - - -class header_property(_DictAccessorProperty): - """Like `environ_property` but for headers.""" - - def lookup(self, obj): - return obj.headers - - -class HTMLBuilder(object): - """Helper object for HTML generation. - - Per default there are two instances of that class. The `html` one, and - the `xhtml` one for those two dialects. The class uses keyword parameters - and positional parameters to generate small snippets of HTML. - - Keyword parameters are converted to XML/SGML attributes, positional - arguments are used as children. Because Python accepts positional - arguments before keyword arguments it's a good idea to use a list with the - star-syntax for some children: - - >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ', - ... html.a('bar', href='bar.html')]) - u'

    foo bar

    ' - - This class works around some browser limitations and can not be used for - arbitrary SGML/XML generation. For that purpose lxml and similar - libraries exist. - - Calling the builder escapes the string passed: - - >>> html.p(html("")) - u'

    <foo>

    ' - """ - - _entity_re = re.compile(r"&([^;]+);") - _entities = name2codepoint.copy() - _entities["apos"] = 39 - _empty_elements = { - "area", - "base", - "basefont", - "br", - "col", - "command", - "embed", - "frame", - "hr", - "img", - "input", - "keygen", - "isindex", - "link", - "meta", - "param", - "source", - "wbr", - } - _boolean_attributes = { - "selected", - "checked", - "compact", - "declare", - "defer", - "disabled", - "ismap", - "multiple", - "nohref", - "noresize", - "noshade", - "nowrap", - } - _plaintext_elements = {"textarea"} - _c_like_cdata = {"script", "style"} - - def __init__(self, dialect): - self._dialect = dialect - - def __call__(self, s): - return escape(s) - - def __getattr__(self, tag): - if tag[:2] == "__": - raise AttributeError(tag) - - def proxy(*children, **arguments): - buffer = "<" + tag - for key, value in iteritems(arguments): - if value is None: - continue - if key[-1] == "_": - key = key[:-1] - if key in self._boolean_attributes: - if not value: - continue - if self._dialect == "xhtml": - value = '="' + key + '"' - else: - value = "" - else: - value = '="' + escape(value) + '"' - buffer += " " + key + value - if not children and tag in self._empty_elements: - if self._dialect == "xhtml": - buffer += " />" - else: - buffer += ">" - return buffer - buffer += ">" - - children_as_string = "".join( - [text_type(x) for x in children if x is not None] - ) - - if children_as_string: - if tag in self._plaintext_elements: - children_as_string = escape(children_as_string) - elif tag in self._c_like_cdata and self._dialect == "xhtml": - children_as_string = ( - "/**/" - ) - buffer += children_as_string + "" - return buffer - - return proxy - - def __repr__(self): - return "<%s for %r>" % (self.__class__.__name__, self._dialect) - - -html = HTMLBuilder("html") -xhtml = HTMLBuilder("xhtml") - -# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in -# https://www.iana.org/assignments/media-types/media-types.xhtml -# Types listed in the XDG mime info that have a charset in the IANA registration. -_charset_mimetypes = { - "application/ecmascript", - "application/javascript", - "application/sql", - "application/xml", - "application/xml-dtd", - "application/xml-external-parsed-entity", -} - - -def get_content_type(mimetype, charset): - """Returns the full content type string with charset for a mimetype. - - If the mimetype represents text, the charset parameter will be - appended, otherwise the mimetype is returned unchanged. - - :param mimetype: The mimetype to be used as content type. - :param charset: The charset to be appended for text mimetypes. - :return: The content type. - - .. verionchanged:: 0.15 - Any type that ends with ``+xml`` gets a charset, not just those - that start with ``application/``. Known text types such as - ``application/javascript`` are also given charsets. - """ - if ( - mimetype.startswith("text/") - or mimetype in _charset_mimetypes - or mimetype.endswith("+xml") - ): - mimetype += "; charset=" + charset - - return mimetype - - -def detect_utf_encoding(data): - """Detect which UTF encoding was used to encode the given bytes. - - The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is - accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big - or little endian. Some editors or libraries may prepend a BOM. - - :internal: - - :param data: Bytes in unknown UTF encoding. - :return: UTF encoding name - - .. versionadded:: 0.15 - """ - head = data[:4] - - if head[:3] == codecs.BOM_UTF8: - return "utf-8-sig" - - if b"\x00" not in head: - return "utf-8" - - if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE): - return "utf-32" - - if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE): - return "utf-16" - - if len(head) == 4: - if head[:3] == b"\x00\x00\x00": - return "utf-32-be" - - if head[::2] == b"\x00\x00": - return "utf-16-be" - - if head[1:] == b"\x00\x00\x00": - return "utf-32-le" - - if head[1::2] == b"\x00\x00": - return "utf-16-le" - - if len(head) == 2: - return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le" - - return "utf-8" - - -def format_string(string, context): - """String-template format a string: - - >>> format_string('$foo and ${foo}s', dict(foo=42)) - '42 and 42s' - - This does not do any attribute lookup etc. For more advanced string - formattings have a look at the `werkzeug.template` module. - - :param string: the format string. - :param context: a dict with the variables to insert. - """ - - def lookup_arg(match): - x = context[match.group(1) or match.group(2)] - if not isinstance(x, string_types): - x = type(string)(x) - return x - - return _format_re.sub(lookup_arg, string) - - -def secure_filename(filename): - r"""Pass it a filename and it will return a secure version of it. This - filename can then safely be stored on a regular file system and passed - to :func:`os.path.join`. The filename returned is an ASCII only string - for maximum portability. - - On windows systems the function also makes sure that the file is not - named after one of the special device files. - - >>> secure_filename("My cool movie.mov") - 'My_cool_movie.mov' - >>> secure_filename("../../../etc/passwd") - 'etc_passwd' - >>> secure_filename(u'i contain cool \xfcml\xe4uts.txt') - 'i_contain_cool_umlauts.txt' - - The function might return an empty filename. It's your responsibility - to ensure that the filename is unique and that you abort or - generate a random filename if the function returned an empty one. - - .. versionadded:: 0.5 - - :param filename: the filename to secure - """ - if isinstance(filename, text_type): - from unicodedata import normalize - - filename = normalize("NFKD", filename).encode("ascii", "ignore") - if not PY2: - filename = filename.decode("ascii") - for sep in os.path.sep, os.path.altsep: - if sep: - filename = filename.replace(sep, " ") - filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip( - "._" - ) - - # on nt a couple of special files are present in each folder. We - # have to ensure that the target file is not such a filename. In - # this case we prepend an underline - if ( - os.name == "nt" - and filename - and filename.split(".")[0].upper() in _windows_device_files - ): - filename = "_" + filename - - return filename - - -def escape(s, quote=None): - """Replace special characters "&", "<", ">" and (") to HTML-safe sequences. - - There is a special handling for `None` which escapes to an empty string. - - .. versionchanged:: 0.9 - `quote` is now implicitly on. - - :param s: the string to escape. - :param quote: ignored. - """ - if s is None: - return "" - elif hasattr(s, "__html__"): - return text_type(s.__html__()) - elif not isinstance(s, string_types): - s = text_type(s) - if quote is not None: - from warnings import warn - - warn( - "The 'quote' parameter is no longer used as of version 0.9" - " and will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - s = ( - s.replace("&", "&") - .replace("<", "<") - .replace(">", ">") - .replace('"', """) - ) - return s - - -def unescape(s): - """The reverse function of `escape`. This unescapes all the HTML - entities, not only the XML entities inserted by `escape`. - - :param s: the string to unescape. - """ - - def handle_match(m): - name = m.group(1) - if name in HTMLBuilder._entities: - return unichr(HTMLBuilder._entities[name]) - try: - if name[:2] in ("#x", "#X"): - return unichr(int(name[2:], 16)) - elif name.startswith("#"): - return unichr(int(name[1:])) - except ValueError: - pass - return u"" - - return _entity_re.sub(handle_match, s) - - -def redirect(location, code=302, Response=None): - """Returns a response object (a WSGI application) that, if called, - redirects the client to the target location. Supported codes are - 301, 302, 303, 305, 307, and 308. 300 is not supported because - it's not a real redirect and 304 because it's the answer for a - request with a request with defined If-Modified-Since headers. - - .. versionadded:: 0.6 - The location can now be a unicode string that is encoded using - the :func:`iri_to_uri` function. - - .. versionadded:: 0.10 - The class used for the Response object can now be passed in. - - :param location: the location the response should redirect to. - :param code: the redirect status code. defaults to 302. - :param class Response: a Response class to use when instantiating a - response. The default is :class:`werkzeug.wrappers.Response` if - unspecified. - """ - if Response is None: - from .wrappers import Response - - display_location = escape(location) - if isinstance(location, text_type): - # Safe conversion is necessary here as we might redirect - # to a broken URI scheme (for instance itms-services). - from .urls import iri_to_uri - - location = iri_to_uri(location, safe_conversion=True) - response = Response( - '\n' - "Redirecting...\n" - "

    Redirecting...

    \n" - "

    You should be redirected automatically to target URL: " - '%s. If not click the link.' - % (escape(location), display_location), - code, - mimetype="text/html", - ) - response.headers["Location"] = location - return response - - -def append_slash_redirect(environ, code=301): - """Redirects to the same URL but with a slash appended. The behavior - of this function is undefined if the path ends with a slash already. - - :param environ: the WSGI environment for the request that triggers - the redirect. - :param code: the status code for the redirect. - """ - new_path = environ["PATH_INFO"].strip("/") + "/" - query_string = environ.get("QUERY_STRING") - if query_string: - new_path += "?" + query_string - return redirect(new_path, code) - - -def import_string(import_name, silent=False): - """Imports an object based on a string. This is useful if you want to - use import paths as endpoints or something similar. An import path can - be specified either in dotted notation (``xml.sax.saxutils.escape``) - or with a colon as object delimiter (``xml.sax.saxutils:escape``). - - If `silent` is True the return value will be `None` if the import fails. - - :param import_name: the dotted name for the object to import. - :param silent: if set to `True` import errors are ignored and - `None` is returned instead. - :return: imported object - """ - # force the import name to automatically convert to strings - # __import__ is not able to handle unicode strings in the fromlist - # if the module is a package - import_name = str(import_name).replace(":", ".") - try: - try: - __import__(import_name) - except ImportError: - if "." not in import_name: - raise - else: - return sys.modules[import_name] - - module_name, obj_name = import_name.rsplit(".", 1) - module = __import__(module_name, globals(), locals(), [obj_name]) - try: - return getattr(module, obj_name) - except AttributeError as e: - raise ImportError(e) - - except ImportError as e: - if not silent: - reraise( - ImportStringError, ImportStringError(import_name, e), sys.exc_info()[2] - ) - - -def find_modules(import_path, include_packages=False, recursive=False): - """Finds all the modules below a package. This can be useful to - automatically import all views / controllers so that their metaclasses / - function decorators have a chance to register themselves on the - application. - - Packages are not returned unless `include_packages` is `True`. This can - also recursively list modules but in that case it will import all the - packages to get the correct load path of that module. - - :param import_path: the dotted name for the package to find child modules. - :param include_packages: set to `True` if packages should be returned, too. - :param recursive: set to `True` if recursion should happen. - :return: generator - """ - module = import_string(import_path) - path = getattr(module, "__path__", None) - if path is None: - raise ValueError("%r is not a package" % import_path) - basename = module.__name__ + "." - for _importer, modname, ispkg in pkgutil.iter_modules(path): - modname = basename + modname - if ispkg: - if include_packages: - yield modname - if recursive: - for item in find_modules(modname, include_packages, True): - yield item - else: - yield modname - - -def validate_arguments(func, args, kwargs, drop_extra=True): - """Checks if the function accepts the arguments and keyword arguments. - Returns a new ``(args, kwargs)`` tuple that can safely be passed to - the function without causing a `TypeError` because the function signature - is incompatible. If `drop_extra` is set to `True` (which is the default) - any extra positional or keyword arguments are dropped automatically. - - The exception raised provides three attributes: - - `missing` - A set of argument names that the function expected but where - missing. - - `extra` - A dict of keyword arguments that the function can not handle but - where provided. - - `extra_positional` - A list of values that where given by positional argument but the - function cannot accept. - - This can be useful for decorators that forward user submitted data to - a view function:: - - from werkzeug.utils import ArgumentValidationError, validate_arguments - - def sanitize(f): - def proxy(request): - data = request.values.to_dict() - try: - args, kwargs = validate_arguments(f, (request,), data) - except ArgumentValidationError: - raise BadRequest('The browser failed to transmit all ' - 'the data expected.') - return f(*args, **kwargs) - return proxy - - :param func: the function the validation is performed against. - :param args: a tuple of positional arguments. - :param kwargs: a dict of keyword arguments. - :param drop_extra: set to `False` if you don't want extra arguments - to be silently dropped. - :return: tuple in the form ``(args, kwargs)``. - """ - parser = _parse_signature(func) - args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5] - if missing: - raise ArgumentValidationError(tuple(missing)) - elif (extra or extra_positional) and not drop_extra: - raise ArgumentValidationError(None, extra, extra_positional) - return tuple(args), kwargs - - -def bind_arguments(func, args, kwargs): - """Bind the arguments provided into a dict. When passed a function, - a tuple of arguments and a dict of keyword arguments `bind_arguments` - returns a dict of names as the function would see it. This can be useful - to implement a cache decorator that uses the function arguments to build - the cache key based on the values of the arguments. - - :param func: the function the arguments should be bound for. - :param args: tuple of positional arguments. - :param kwargs: a dict of keyword arguments. - :return: a :class:`dict` of bound keyword arguments. - """ - ( - args, - kwargs, - missing, - extra, - extra_positional, - arg_spec, - vararg_var, - kwarg_var, - ) = _parse_signature(func)(args, kwargs) - values = {} - for (name, _has_default, _default), value in zip(arg_spec, args): - values[name] = value - if vararg_var is not None: - values[vararg_var] = tuple(extra_positional) - elif extra_positional: - raise TypeError("too many positional arguments") - if kwarg_var is not None: - multikw = set(extra) & set([x[0] for x in arg_spec]) - if multikw: - raise TypeError( - "got multiple values for keyword argument " + repr(next(iter(multikw))) - ) - values[kwarg_var] = extra - elif extra: - raise TypeError("got unexpected keyword argument " + repr(next(iter(extra)))) - return values - - -class ArgumentValidationError(ValueError): - - """Raised if :func:`validate_arguments` fails to validate""" - - def __init__(self, missing=None, extra=None, extra_positional=None): - self.missing = set(missing or ()) - self.extra = extra or {} - self.extra_positional = extra_positional or [] - ValueError.__init__( - self, - "function arguments invalid. (%d missing, %d additional)" - % (len(self.missing), len(self.extra) + len(self.extra_positional)), - ) - - -class ImportStringError(ImportError): - """Provides information about a failed :func:`import_string` attempt.""" - - #: String in dotted notation that failed to be imported. - import_name = None - #: Wrapped exception. - exception = None - - def __init__(self, import_name, exception): - self.import_name = import_name - self.exception = exception - - msg = ( - "import_string() failed for %r. Possible reasons are:\n\n" - "- missing __init__.py in a package;\n" - "- package or module path not included in sys.path;\n" - "- duplicated package or module name taking precedence in " - "sys.path;\n" - "- missing module, class, function or variable;\n\n" - "Debugged import:\n\n%s\n\n" - "Original exception:\n\n%s: %s" - ) - - name = "" - tracked = [] - for part in import_name.replace(":", ".").split("."): - name += (name and ".") + part - imported = import_string(name, silent=True) - if imported: - tracked.append((name, getattr(imported, "__file__", None))) - else: - track = ["- %r found in %r." % (n, i) for n, i in tracked] - track.append("- %r not found." % name) - msg = msg % ( - import_name, - "\n".join(track), - exception.__class__.__name__, - str(exception), - ) - break - - ImportError.__init__(self, msg) - - def __repr__(self): - return "<%s(%r, %r)>" % ( - self.__class__.__name__, - self.import_name, - self.exception, - ) - - -# DEPRECATED -from .datastructures import CombinedMultiDict as _CombinedMultiDict -from .datastructures import EnvironHeaders as _EnvironHeaders -from .datastructures import Headers as _Headers -from .datastructures import MultiDict as _MultiDict -from .http import dump_cookie as _dump_cookie -from .http import parse_cookie as _parse_cookie - - -class MultiDict(_MultiDict): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.utils.MultiDict' has moved to 'werkzeug" - ".datastructures.MultiDict' as of version 0.5. This old" - " import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(MultiDict, self).__init__(*args, **kwargs) - - -class CombinedMultiDict(_CombinedMultiDict): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.utils.CombinedMultiDict' has moved to 'werkzeug" - ".datastructures.CombinedMultiDict' as of version 0.5. This" - " old import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(CombinedMultiDict, self).__init__(*args, **kwargs) - - -class Headers(_Headers): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.utils.Headers' has moved to 'werkzeug" - ".datastructures.Headers' as of version 0.5. This old" - " import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(Headers, self).__init__(*args, **kwargs) - - -class EnvironHeaders(_EnvironHeaders): - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.utils.EnvironHeaders' has moved to 'werkzeug" - ".datastructures.EnvironHeaders' as of version 0.5. This" - " old import will be removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(EnvironHeaders, self).__init__(*args, **kwargs) - - -def parse_cookie(*args, **kwargs): - warnings.warn( - "'werkzeug.utils.parse_cookie' as moved to 'werkzeug.http" - ".parse_cookie' as of version 0.5. This old import will be" - " removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - return _parse_cookie(*args, **kwargs) - - -def dump_cookie(*args, **kwargs): - warnings.warn( - "'werkzeug.utils.dump_cookie' as moved to 'werkzeug.http" - ".dump_cookie' as of version 0.5. This old import will be" - " removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - return _dump_cookie(*args, **kwargs) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/__init__.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/__init__.py deleted file mode 100644 index 56c764a..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -werkzeug.wrappers -~~~~~~~~~~~~~~~~~ - -The wrappers are simple request and response objects which you can -subclass to do whatever you want them to do. The request object contains -the information transmitted by the client (webbrowser) and the response -object contains all the information sent back to the browser. - -An important detail is that the request object is created with the WSGI -environ and will act as high-level proxy whereas the response object is an -actual WSGI application. - -Like everything else in Werkzeug these objects will work correctly with -unicode data. Incoming form data parsed by the response object will be -decoded into an unicode object if possible and if it makes sense. - -:copyright: 2007 Pallets -:license: BSD-3-Clause -""" -from .accept import AcceptMixin -from .auth import AuthorizationMixin -from .auth import WWWAuthenticateMixin -from .base_request import BaseRequest -from .base_response import BaseResponse -from .common_descriptors import CommonRequestDescriptorsMixin -from .common_descriptors import CommonResponseDescriptorsMixin -from .etag import ETagRequestMixin -from .etag import ETagResponseMixin -from .request import PlainRequest -from .request import Request -from .request import StreamOnlyMixin -from .response import Response -from .response import ResponseStream -from .response import ResponseStreamMixin -from .user_agent import UserAgentMixin diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/accept.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/accept.py deleted file mode 100644 index d0620a0..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/accept.py +++ /dev/null @@ -1,50 +0,0 @@ -from ..datastructures import CharsetAccept -from ..datastructures import LanguageAccept -from ..datastructures import MIMEAccept -from ..http import parse_accept_header -from ..utils import cached_property - - -class AcceptMixin(object): - """A mixin for classes with an :attr:`~BaseResponse.environ` attribute - to get all the HTTP accept headers as - :class:`~werkzeug.datastructures.Accept` objects (or subclasses - thereof). - """ - - @cached_property - def accept_mimetypes(self): - """List of mimetypes this client supports as - :class:`~werkzeug.datastructures.MIMEAccept` object. - """ - return parse_accept_header(self.environ.get("HTTP_ACCEPT"), MIMEAccept) - - @cached_property - def accept_charsets(self): - """List of charsets this client supports as - :class:`~werkzeug.datastructures.CharsetAccept` object. - """ - return parse_accept_header( - self.environ.get("HTTP_ACCEPT_CHARSET"), CharsetAccept - ) - - @cached_property - def accept_encodings(self): - """List of encodings this client accepts. Encodings in a HTTP term - are compression encodings such as gzip. For charsets have a look at - :attr:`accept_charset`. - """ - return parse_accept_header(self.environ.get("HTTP_ACCEPT_ENCODING")) - - @cached_property - def accept_languages(self): - """List of languages this client accepts as - :class:`~werkzeug.datastructures.LanguageAccept` object. - - .. versionchanged 0.5 - In previous versions this was a regular - :class:`~werkzeug.datastructures.Accept` object. - """ - return parse_accept_header( - self.environ.get("HTTP_ACCEPT_LANGUAGE"), LanguageAccept - ) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/auth.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/auth.py deleted file mode 100644 index 714f755..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/auth.py +++ /dev/null @@ -1,33 +0,0 @@ -from ..http import parse_authorization_header -from ..http import parse_www_authenticate_header -from ..utils import cached_property - - -class AuthorizationMixin(object): - """Adds an :attr:`authorization` property that represents the parsed - value of the `Authorization` header as - :class:`~werkzeug.datastructures.Authorization` object. - """ - - @cached_property - def authorization(self): - """The `Authorization` object in parsed form.""" - header = self.environ.get("HTTP_AUTHORIZATION") - return parse_authorization_header(header) - - -class WWWAuthenticateMixin(object): - """Adds a :attr:`www_authenticate` property to a response object.""" - - @property - def www_authenticate(self): - """The `WWW-Authenticate` header in a parsed form.""" - - def on_update(www_auth): - if not www_auth and "www-authenticate" in self.headers: - del self.headers["www-authenticate"] - elif www_auth: - self.headers["WWW-Authenticate"] = www_auth.to_header() - - header = self.headers.get("www-authenticate") - return parse_www_authenticate_header(header, on_update) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_request.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_request.py deleted file mode 100644 index 05a634e..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_request.py +++ /dev/null @@ -1,695 +0,0 @@ -import warnings -from functools import update_wrapper -from io import BytesIO - -from .._compat import to_native -from .._compat import to_unicode -from .._compat import wsgi_decoding_dance -from .._compat import wsgi_get_bytes -from ..datastructures import CombinedMultiDict -from ..datastructures import EnvironHeaders -from ..datastructures import ImmutableList -from ..datastructures import ImmutableMultiDict -from ..datastructures import ImmutableTypeConversionDict -from ..datastructures import iter_multi_items -from ..datastructures import MultiDict -from ..formparser import default_stream_factory -from ..formparser import FormDataParser -from ..http import parse_cookie -from ..http import parse_options_header -from ..urls import url_decode -from ..utils import cached_property -from ..utils import environ_property -from ..wsgi import get_content_length -from ..wsgi import get_current_url -from ..wsgi import get_host -from ..wsgi import get_input_stream - - -class BaseRequest(object): - """Very basic request object. This does not implement advanced stuff like - entity tag parsing or cache controls. The request object is created with - the WSGI environment as first argument and will add itself to the WSGI - environment as ``'werkzeug.request'`` unless it's created with - `populate_request` set to False. - - There are a couple of mixins available that add additional functionality - to the request object, there is also a class called `Request` which - subclasses `BaseRequest` and all the important mixins. - - It's a good idea to create a custom subclass of the :class:`BaseRequest` - and add missing functionality either via mixins or direct implementation. - Here an example for such subclasses:: - - from werkzeug.wrappers import BaseRequest, ETagRequestMixin - - class Request(BaseRequest, ETagRequestMixin): - pass - - Request objects are **read only**. As of 0.5 modifications are not - allowed in any place. Unlike the lower level parsing functions the - request object will use immutable objects everywhere possible. - - Per default the request object will assume all the text data is `utf-8` - encoded. Please refer to :doc:`the unicode chapter ` for more - details about customizing the behavior. - - Per default the request object will be added to the WSGI - environment as `werkzeug.request` to support the debugging system. - If you don't want that, set `populate_request` to `False`. - - If `shallow` is `True` the environment is initialized as shallow - object around the environ. Every operation that would modify the - environ in any way (such as consuming form data) raises an exception - unless the `shallow` attribute is explicitly set to `False`. This - is useful for middlewares where you don't want to consume the form - data by accident. A shallow request is not populated to the WSGI - environment. - - .. versionchanged:: 0.5 - read-only mode was enforced by using immutables classes for all - data. - """ - - #: the charset for the request, defaults to utf-8 - charset = "utf-8" - - #: the error handling procedure for errors, defaults to 'replace' - encoding_errors = "replace" - - #: the maximum content length. This is forwarded to the form data - #: parsing function (:func:`parse_form_data`). When set and the - #: :attr:`form` or :attr:`files` attribute is accessed and the - #: parsing fails because more than the specified value is transmitted - #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. - #: - #: Have a look at :ref:`dealing-with-request-data` for more details. - #: - #: .. versionadded:: 0.5 - max_content_length = None - - #: the maximum form field size. This is forwarded to the form data - #: parsing function (:func:`parse_form_data`). When set and the - #: :attr:`form` or :attr:`files` attribute is accessed and the - #: data in memory for post data is longer than the specified value a - #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. - #: - #: Have a look at :ref:`dealing-with-request-data` for more details. - #: - #: .. versionadded:: 0.5 - max_form_memory_size = None - - #: the class to use for `args` and `form`. The default is an - #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports - #: multiple values per key. alternatively it makes sense to use an - #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which - #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` - #: which is the fastest but only remembers the last key. It is also - #: possible to use mutable structures, but this is not recommended. - #: - #: .. versionadded:: 0.6 - parameter_storage_class = ImmutableMultiDict - - #: the type to be used for list values from the incoming WSGI environment. - #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used - #: (for example for :attr:`access_list`). - #: - #: .. versionadded:: 0.6 - list_storage_class = ImmutableList - - #: the type to be used for dict values from the incoming WSGI environment. - #: By default an - #: :class:`~werkzeug.datastructures.ImmutableTypeConversionDict` is used - #: (for example for :attr:`cookies`). - #: - #: .. versionadded:: 0.6 - dict_storage_class = ImmutableTypeConversionDict - - #: The form data parser that shoud be used. Can be replaced to customize - #: the form date parsing. - form_data_parser_class = FormDataParser - - #: Optionally a list of hosts that is trusted by this request. By default - #: all hosts are trusted which means that whatever the client sends the - #: host is will be accepted. - #: - #: Because `Host` and `X-Forwarded-Host` headers can be set to any value by - #: a malicious client, it is recommended to either set this property or - #: implement similar validation in the proxy (if application is being run - #: behind one). - #: - #: .. versionadded:: 0.9 - trusted_hosts = None - - #: Indicates whether the data descriptor should be allowed to read and - #: buffer up the input stream. By default it's enabled. - #: - #: .. versionadded:: 0.9 - disable_data_descriptor = False - - def __init__(self, environ, populate_request=True, shallow=False): - self.environ = environ - if populate_request and not shallow: - self.environ["werkzeug.request"] = self - self.shallow = shallow - - def __repr__(self): - # make sure the __repr__ even works if the request was created - # from an invalid WSGI environment. If we display the request - # in a debug session we don't want the repr to blow up. - args = [] - try: - args.append("'%s'" % to_native(self.url, self.url_charset)) - args.append("[%s]" % self.method) - except Exception: - args.append("(invalid WSGI environ)") - - return "<%s %s>" % (self.__class__.__name__, " ".join(args)) - - @property - def url_charset(self): - """The charset that is assumed for URLs. Defaults to the value - of :attr:`charset`. - - .. versionadded:: 0.6 - """ - return self.charset - - @classmethod - def from_values(cls, *args, **kwargs): - """Create a new request object based on the values provided. If - environ is given missing values are filled from there. This method is - useful for small scripts when you need to simulate a request from an URL. - Do not use this method for unittesting, there is a full featured client - object (:class:`Client`) that allows to create multipart requests, - support for cookies etc. - - This accepts the same options as the - :class:`~werkzeug.test.EnvironBuilder`. - - .. versionchanged:: 0.5 - This method now accepts the same arguments as - :class:`~werkzeug.test.EnvironBuilder`. Because of this the - `environ` parameter is now called `environ_overrides`. - - :return: request object - """ - from ..test import EnvironBuilder - - charset = kwargs.pop("charset", cls.charset) - kwargs["charset"] = charset - builder = EnvironBuilder(*args, **kwargs) - try: - return builder.get_request(cls) - finally: - builder.close() - - @classmethod - def application(cls, f): - """Decorate a function as responder that accepts the request as - the last argument. This works like the :func:`responder` - decorator but the function is passed the request object as the - last argument and the request object will be closed - automatically:: - - @Request.application - def my_wsgi_app(request): - return Response('Hello World!') - - As of Werkzeug 0.14 HTTP exceptions are automatically caught and - converted to responses instead of failing. - - :param f: the WSGI callable to decorate - :return: a new WSGI callable - """ - #: return a callable that wraps the -2nd argument with the request - #: and calls the function with all the arguments up to that one and - #: the request. The return value is then called with the latest - #: two arguments. This makes it possible to use this decorator for - #: both standalone WSGI functions as well as bound methods and - #: partially applied functions. - from ..exceptions import HTTPException - - def application(*args): - request = cls(args[-2]) - with request: - try: - resp = f(*args[:-2] + (request,)) - except HTTPException as e: - resp = e.get_response(args[-2]) - return resp(*args[-2:]) - - return update_wrapper(application, f) - - def _get_file_stream( - self, total_content_length, content_type, filename=None, content_length=None - ): - """Called to get a stream for the file upload. - - This must provide a file-like class with `read()`, `readline()` - and `seek()` methods that is both writeable and readable. - - The default implementation returns a temporary file if the total - content length is higher than 500KB. Because many browsers do not - provide a content length for the files only the total content - length matters. - - :param total_content_length: the total content length of all the - data in the request combined. This value - is guaranteed to be there. - :param content_type: the mimetype of the uploaded file. - :param filename: the filename of the uploaded file. May be `None`. - :param content_length: the length of this file. This value is usually - not provided because webbrowsers do not provide - this value. - """ - return default_stream_factory( - total_content_length=total_content_length, - filename=filename, - content_type=content_type, - content_length=content_length, - ) - - @property - def want_form_data_parsed(self): - """Returns True if the request method carries content. As of - Werkzeug 0.9 this will be the case if a content type is transmitted. - - .. versionadded:: 0.8 - """ - return bool(self.environ.get("CONTENT_TYPE")) - - def make_form_data_parser(self): - """Creates the form data parser. Instantiates the - :attr:`form_data_parser_class` with some parameters. - - .. versionadded:: 0.8 - """ - return self.form_data_parser_class( - self._get_file_stream, - self.charset, - self.encoding_errors, - self.max_form_memory_size, - self.max_content_length, - self.parameter_storage_class, - ) - - def _load_form_data(self): - """Method used internally to retrieve submitted data. After calling - this sets `form` and `files` on the request object to multi dicts - filled with the incoming form data. As a matter of fact the input - stream will be empty afterwards. You can also call this method to - force the parsing of the form data. - - .. versionadded:: 0.8 - """ - # abort early if we have already consumed the stream - if "form" in self.__dict__: - return - - _assert_not_shallow(self) - - if self.want_form_data_parsed: - content_type = self.environ.get("CONTENT_TYPE", "") - content_length = get_content_length(self.environ) - mimetype, options = parse_options_header(content_type) - parser = self.make_form_data_parser() - data = parser.parse( - self._get_stream_for_parsing(), mimetype, content_length, options - ) - else: - data = ( - self.stream, - self.parameter_storage_class(), - self.parameter_storage_class(), - ) - - # inject the values into the instance dict so that we bypass - # our cached_property non-data descriptor. - d = self.__dict__ - d["stream"], d["form"], d["files"] = data - - def _get_stream_for_parsing(self): - """This is the same as accessing :attr:`stream` with the difference - that if it finds cached data from calling :meth:`get_data` first it - will create a new stream out of the cached data. - - .. versionadded:: 0.9.3 - """ - cached_data = getattr(self, "_cached_data", None) - if cached_data is not None: - return BytesIO(cached_data) - return self.stream - - def close(self): - """Closes associated resources of this request object. This - closes all file handles explicitly. You can also use the request - object in a with statement which will automatically close it. - - .. versionadded:: 0.9 - """ - files = self.__dict__.get("files") - for _key, value in iter_multi_items(files or ()): - value.close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - self.close() - - @cached_property - def stream(self): - """ - If the incoming form data was not encoded with a known mimetype - the data is stored unmodified in this stream for consumption. Most - of the time it is a better idea to use :attr:`data` which will give - you that data as a string. The stream only returns the data once. - - Unlike :attr:`input_stream` this stream is properly guarded that you - can't accidentally read past the length of the input. Werkzeug will - internally always refer to this stream to read data which makes it - possible to wrap this object with a stream that does filtering. - - .. versionchanged:: 0.9 - This stream is now always available but might be consumed by the - form parser later on. Previously the stream was only set if no - parsing happened. - """ - _assert_not_shallow(self) - return get_input_stream(self.environ) - - input_stream = environ_property( - "wsgi.input", - """The WSGI input stream. - - In general it's a bad idea to use this one because you can - easily read past the boundary. Use the :attr:`stream` - instead.""", - ) - - @cached_property - def args(self): - """The parsed URL parameters (the part in the URL after the question - mark). - - By default an - :class:`~werkzeug.datastructures.ImmutableMultiDict` - is returned from this function. This can be changed by setting - :attr:`parameter_storage_class` to a different type. This might - be necessary if the order of the form data is important. - """ - return url_decode( - wsgi_get_bytes(self.environ.get("QUERY_STRING", "")), - self.url_charset, - errors=self.encoding_errors, - cls=self.parameter_storage_class, - ) - - @cached_property - def data(self): - """ - Contains the incoming request data as string in case it came with - a mimetype Werkzeug does not handle. - """ - - if self.disable_data_descriptor: - raise AttributeError("data descriptor is disabled") - # XXX: this should eventually be deprecated. - - # We trigger form data parsing first which means that the descriptor - # will not cache the data that would otherwise be .form or .files - # data. This restores the behavior that was there in Werkzeug - # before 0.9. New code should use :meth:`get_data` explicitly as - # this will make behavior explicit. - return self.get_data(parse_form_data=True) - - def get_data(self, cache=True, as_text=False, parse_form_data=False): - """This reads the buffered incoming data from the client into one - bytestring. By default this is cached but that behavior can be - changed by setting `cache` to `False`. - - Usually it's a bad idea to call this method without checking the - content length first as a client could send dozens of megabytes or more - to cause memory problems on the server. - - Note that if the form data was already parsed this method will not - return anything as form data parsing does not cache the data like - this method does. To implicitly invoke form data parsing function - set `parse_form_data` to `True`. When this is done the return value - of this method will be an empty string if the form parser handles - the data. This generally is not necessary as if the whole data is - cached (which is the default) the form parser will used the cached - data to parse the form data. Please be generally aware of checking - the content length first in any case before calling this method - to avoid exhausting server memory. - - If `as_text` is set to `True` the return value will be a decoded - unicode string. - - .. versionadded:: 0.9 - """ - rv = getattr(self, "_cached_data", None) - if rv is None: - if parse_form_data: - self._load_form_data() - rv = self.stream.read() - if cache: - self._cached_data = rv - if as_text: - rv = rv.decode(self.charset, self.encoding_errors) - return rv - - @cached_property - def form(self): - """The form parameters. By default an - :class:`~werkzeug.datastructures.ImmutableMultiDict` - is returned from this function. This can be changed by setting - :attr:`parameter_storage_class` to a different type. This might - be necessary if the order of the form data is important. - - Please keep in mind that file uploads will not end up here, but instead - in the :attr:`files` attribute. - - .. versionchanged:: 0.9 - - Previous to Werkzeug 0.9 this would only contain form data for POST - and PUT requests. - """ - self._load_form_data() - return self.form - - @cached_property - def values(self): - """A :class:`werkzeug.datastructures.CombinedMultiDict` that combines - :attr:`args` and :attr:`form`.""" - args = [] - for d in self.args, self.form: - if not isinstance(d, MultiDict): - d = MultiDict(d) - args.append(d) - return CombinedMultiDict(args) - - @cached_property - def files(self): - """:class:`~werkzeug.datastructures.MultiDict` object containing - all uploaded files. Each key in :attr:`files` is the name from the - ````. Each value in :attr:`files` is a - Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. - - It basically behaves like a standard file object you know from Python, - with the difference that it also has a - :meth:`~werkzeug.datastructures.FileStorage.save` function that can - store the file on the filesystem. - - Note that :attr:`files` will only contain data if the request method was - POST, PUT or PATCH and the ``

    `` that posted to the request had - ``enctype="multipart/form-data"``. It will be empty otherwise. - - See the :class:`~werkzeug.datastructures.MultiDict` / - :class:`~werkzeug.datastructures.FileStorage` documentation for - more details about the used data structure. - """ - self._load_form_data() - return self.files - - @cached_property - def cookies(self): - """A :class:`dict` with the contents of all cookies transmitted with - the request.""" - return parse_cookie( - self.environ, - self.charset, - self.encoding_errors, - cls=self.dict_storage_class, - ) - - @cached_property - def headers(self): - """The headers from the WSGI environ as immutable - :class:`~werkzeug.datastructures.EnvironHeaders`. - """ - return EnvironHeaders(self.environ) - - @cached_property - def path(self): - """Requested path as unicode. This works a bit like the regular path - info in the WSGI environment but will always include a leading slash, - even if the URL root is accessed. - """ - raw_path = wsgi_decoding_dance( - self.environ.get("PATH_INFO") or "", self.charset, self.encoding_errors - ) - return "/" + raw_path.lstrip("/") - - @cached_property - def full_path(self): - """Requested path as unicode, including the query string.""" - return self.path + u"?" + to_unicode(self.query_string, self.url_charset) - - @cached_property - def script_root(self): - """The root path of the script without the trailing slash.""" - raw_path = wsgi_decoding_dance( - self.environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors - ) - return raw_path.rstrip("/") - - @cached_property - def url(self): - """The reconstructed current URL as IRI. - See also: :attr:`trusted_hosts`. - """ - return get_current_url(self.environ, trusted_hosts=self.trusted_hosts) - - @cached_property - def base_url(self): - """Like :attr:`url` but without the querystring - See also: :attr:`trusted_hosts`. - """ - return get_current_url( - self.environ, strip_querystring=True, trusted_hosts=self.trusted_hosts - ) - - @cached_property - def url_root(self): - """The full URL root (with hostname), this is the application - root as IRI. - See also: :attr:`trusted_hosts`. - """ - return get_current_url(self.environ, True, trusted_hosts=self.trusted_hosts) - - @cached_property - def host_url(self): - """Just the host with scheme as IRI. - See also: :attr:`trusted_hosts`. - """ - return get_current_url( - self.environ, host_only=True, trusted_hosts=self.trusted_hosts - ) - - @cached_property - def host(self): - """Just the host including the port if available. - See also: :attr:`trusted_hosts`. - """ - return get_host(self.environ, trusted_hosts=self.trusted_hosts) - - query_string = environ_property( - "QUERY_STRING", - "", - read_only=True, - load_func=wsgi_get_bytes, - doc="The URL parameters as raw bytestring.", - ) - method = environ_property( - "REQUEST_METHOD", - "GET", - read_only=True, - load_func=lambda x: x.upper(), - doc="The request method. (For example ``'GET'`` or ``'POST'``).", - ) - - @cached_property - def access_route(self): - """If a forwarded header exists this is a list of all ip addresses - from the client ip to the last proxy server. - """ - if "HTTP_X_FORWARDED_FOR" in self.environ: - addr = self.environ["HTTP_X_FORWARDED_FOR"].split(",") - return self.list_storage_class([x.strip() for x in addr]) - elif "REMOTE_ADDR" in self.environ: - return self.list_storage_class([self.environ["REMOTE_ADDR"]]) - return self.list_storage_class() - - @property - def remote_addr(self): - """The remote address of the client.""" - return self.environ.get("REMOTE_ADDR") - - remote_user = environ_property( - "REMOTE_USER", - doc="""If the server supports user authentication, and the - script is protected, this attribute contains the username the - user has authenticated as.""", - ) - - scheme = environ_property( - "wsgi.url_scheme", - doc=""" - URL scheme (http or https). - - .. versionadded:: 0.7""", - ) - - @property - def is_xhr(self): - """True if the request was triggered via a JavaScript XMLHttpRequest. - This only works with libraries that support the ``X-Requested-With`` - header and set it to "XMLHttpRequest". Libraries that do that are - prototype, jQuery and Mochikit and probably some more. - - .. deprecated:: 0.13 - ``X-Requested-With`` is not standard and is unreliable. You - may be able to use :attr:`AcceptMixin.accept_mimetypes` - instead. - """ - warnings.warn( - "'Request.is_xhr' is deprecated as of version 0.13 and will" - " be removed in version 1.0. The 'X-Requested-With' header" - " is not standard and is unreliable. You may be able to use" - " 'accept_mimetypes' instead.", - DeprecationWarning, - stacklevel=2, - ) - return self.environ.get("HTTP_X_REQUESTED_WITH", "").lower() == "xmlhttprequest" - - is_secure = property( - lambda self: self.environ["wsgi.url_scheme"] == "https", - doc="`True` if the request is secure.", - ) - is_multithread = environ_property( - "wsgi.multithread", - doc="""boolean that is `True` if the application is served by a - multithreaded WSGI server.""", - ) - is_multiprocess = environ_property( - "wsgi.multiprocess", - doc="""boolean that is `True` if the application is served by a - WSGI server that spawns multiple processes.""", - ) - is_run_once = environ_property( - "wsgi.run_once", - doc="""boolean that is `True` if the application will be - executed only once in a process lifetime. This is the case for - CGI for example, but it's not guaranteed that the execution only - happens one time.""", - ) - - -def _assert_not_shallow(request): - if request.shallow: - raise RuntimeError( - "A shallow request tried to consume form data. If you really" - " want to do that, set `shallow` to False." - ) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_response.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_response.py deleted file mode 100644 index d944a7d..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/base_response.py +++ /dev/null @@ -1,702 +0,0 @@ -import warnings - -from .._compat import integer_types -from .._compat import string_types -from .._compat import text_type -from .._compat import to_bytes -from .._compat import to_native -from ..datastructures import Headers -from ..http import dump_cookie -from ..http import HTTP_STATUS_CODES -from ..http import remove_entity_headers -from ..urls import iri_to_uri -from ..urls import url_join -from ..utils import get_content_type -from ..wsgi import ClosingIterator -from ..wsgi import get_current_url - - -def _run_wsgi_app(*args): - """This function replaces itself to ensure that the test module is not - imported unless required. DO NOT USE! - """ - global _run_wsgi_app - from ..test import run_wsgi_app as _run_wsgi_app - - return _run_wsgi_app(*args) - - -def _warn_if_string(iterable): - """Helper for the response objects to check if the iterable returned - to the WSGI server is not a string. - """ - if isinstance(iterable, string_types): - warnings.warn( - "Response iterable was set to a string. This will appear to" - " work but means that the server will send the data to the" - " client one character at a time. This is almost never" - " intended behavior, use 'response.data' to assign strings" - " to the response object.", - stacklevel=2, - ) - - -def _iter_encoded(iterable, charset): - for item in iterable: - if isinstance(item, text_type): - yield item.encode(charset) - else: - yield item - - -def _clean_accept_ranges(accept_ranges): - if accept_ranges is True: - return "bytes" - elif accept_ranges is False: - return "none" - elif isinstance(accept_ranges, text_type): - return to_native(accept_ranges) - raise ValueError("Invalid accept_ranges value") - - -class BaseResponse(object): - """Base response class. The most important fact about a response object - is that it's a regular WSGI application. It's initialized with a couple - of response parameters (headers, body, status code etc.) and will start a - valid WSGI response when called with the environ and start response - callable. - - Because it's a WSGI application itself processing usually ends before the - actual response is sent to the server. This helps debugging systems - because they can catch all the exceptions before responses are started. - - Here a small example WSGI application that takes advantage of the - response objects:: - - from werkzeug.wrappers import BaseResponse as Response - - def index(): - return Response('Index page') - - def application(environ, start_response): - path = environ.get('PATH_INFO') or '/' - if path == '/': - response = index() - else: - response = Response('Not Found', status=404) - return response(environ, start_response) - - Like :class:`BaseRequest` which object is lacking a lot of functionality - implemented in mixins. This gives you a better control about the actual - API of your response objects, so you can create subclasses and add custom - functionality. A full featured response object is available as - :class:`Response` which implements a couple of useful mixins. - - To enforce a new type of already existing responses you can use the - :meth:`force_type` method. This is useful if you're working with different - subclasses of response objects and you want to post process them with a - known interface. - - Per default the response object will assume all the text data is `utf-8` - encoded. Please refer to :doc:`the unicode chapter ` for more - details about customizing the behavior. - - Response can be any kind of iterable or string. If it's a string it's - considered being an iterable with one item which is the string passed. - Headers can be a list of tuples or a - :class:`~werkzeug.datastructures.Headers` object. - - Special note for `mimetype` and `content_type`: For most mime types - `mimetype` and `content_type` work the same, the difference affects - only 'text' mimetypes. If the mimetype passed with `mimetype` is a - mimetype starting with `text/`, the charset parameter of the response - object is appended to it. In contrast the `content_type` parameter is - always added as header unmodified. - - .. versionchanged:: 0.5 - the `direct_passthrough` parameter was added. - - :param response: a string or response iterable. - :param status: a string with a status or an integer with the status code. - :param headers: a list of headers or a - :class:`~werkzeug.datastructures.Headers` object. - :param mimetype: the mimetype for the response. See notice above. - :param content_type: the content type for the response. See notice above. - :param direct_passthrough: if set to `True` :meth:`iter_encoded` is not - called before iteration which makes it - possible to pass special iterators through - unchanged (see :func:`wrap_file` for more - details.) - """ - - #: the charset of the response. - charset = "utf-8" - - #: the default status if none is provided. - default_status = 200 - - #: the default mimetype if none is provided. - default_mimetype = "text/plain" - - #: if set to `False` accessing properties on the response object will - #: not try to consume the response iterator and convert it into a list. - #: - #: .. versionadded:: 0.6.2 - #: - #: That attribute was previously called `implicit_seqence_conversion`. - #: (Notice the typo). If you did use this feature, you have to adapt - #: your code to the name change. - implicit_sequence_conversion = True - - #: Should this response object correct the location header to be RFC - #: conformant? This is true by default. - #: - #: .. versionadded:: 0.8 - autocorrect_location_header = True - - #: Should this response object automatically set the content-length - #: header if possible? This is true by default. - #: - #: .. versionadded:: 0.8 - automatically_set_content_length = True - - #: Warn if a cookie header exceeds this size. The default, 4093, should be - #: safely `supported by most browsers `_. A cookie larger than - #: this size will still be sent, but it may be ignored or handled - #: incorrectly by some browsers. Set to 0 to disable this check. - #: - #: .. versionadded:: 0.13 - #: - #: .. _`cookie`: http://browsercookielimits.squawky.net/ - max_cookie_size = 4093 - - def __init__( - self, - response=None, - status=None, - headers=None, - mimetype=None, - content_type=None, - direct_passthrough=False, - ): - if isinstance(headers, Headers): - self.headers = headers - elif not headers: - self.headers = Headers() - else: - self.headers = Headers(headers) - - if content_type is None: - if mimetype is None and "content-type" not in self.headers: - mimetype = self.default_mimetype - if mimetype is not None: - mimetype = get_content_type(mimetype, self.charset) - content_type = mimetype - if content_type is not None: - self.headers["Content-Type"] = content_type - if status is None: - status = self.default_status - if isinstance(status, integer_types): - self.status_code = status - else: - self.status = status - - self.direct_passthrough = direct_passthrough - self._on_close = [] - - # we set the response after the headers so that if a class changes - # the charset attribute, the data is set in the correct charset. - if response is None: - self.response = [] - elif isinstance(response, (text_type, bytes, bytearray)): - self.set_data(response) - else: - self.response = response - - def call_on_close(self, func): - """Adds a function to the internal list of functions that should - be called as part of closing down the response. Since 0.7 this - function also returns the function that was passed so that this - can be used as a decorator. - - .. versionadded:: 0.6 - """ - self._on_close.append(func) - return func - - def __repr__(self): - if self.is_sequence: - body_info = "%d bytes" % sum(map(len, self.iter_encoded())) - else: - body_info = "streamed" if self.is_streamed else "likely-streamed" - return "<%s %s [%s]>" % (self.__class__.__name__, body_info, self.status) - - @classmethod - def force_type(cls, response, environ=None): - """Enforce that the WSGI response is a response object of the current - type. Werkzeug will use the :class:`BaseResponse` internally in many - situations like the exceptions. If you call :meth:`get_response` on an - exception you will get back a regular :class:`BaseResponse` object, even - if you are using a custom subclass. - - This method can enforce a given response type, and it will also - convert arbitrary WSGI callables into response objects if an environ - is provided:: - - # convert a Werkzeug response object into an instance of the - # MyResponseClass subclass. - response = MyResponseClass.force_type(response) - - # convert any WSGI application into a response object - response = MyResponseClass.force_type(response, environ) - - This is especially useful if you want to post-process responses in - the main dispatcher and use functionality provided by your subclass. - - Keep in mind that this will modify response objects in place if - possible! - - :param response: a response object or wsgi application. - :param environ: a WSGI environment object. - :return: a response object. - """ - if not isinstance(response, BaseResponse): - if environ is None: - raise TypeError( - "cannot convert WSGI application into response" - " objects without an environ" - ) - response = BaseResponse(*_run_wsgi_app(response, environ)) - response.__class__ = cls - return response - - @classmethod - def from_app(cls, app, environ, buffered=False): - """Create a new response object from an application output. This - works best if you pass it an application that returns a generator all - the time. Sometimes applications may use the `write()` callable - returned by the `start_response` function. This tries to resolve such - edge cases automatically. But if you don't get the expected output - you should set `buffered` to `True` which enforces buffering. - - :param app: the WSGI application to execute. - :param environ: the WSGI environment to execute against. - :param buffered: set to `True` to enforce buffering. - :return: a response object. - """ - return cls(*_run_wsgi_app(app, environ, buffered)) - - def _get_status_code(self): - return self._status_code - - def _set_status_code(self, code): - self._status_code = code - try: - self._status = "%d %s" % (code, HTTP_STATUS_CODES[code].upper()) - except KeyError: - self._status = "%d UNKNOWN" % code - - status_code = property( - _get_status_code, _set_status_code, doc="The HTTP Status code as number" - ) - del _get_status_code, _set_status_code - - def _get_status(self): - return self._status - - def _set_status(self, value): - try: - self._status = to_native(value) - except AttributeError: - raise TypeError("Invalid status argument") - - try: - self._status_code = int(self._status.split(None, 1)[0]) - except ValueError: - self._status_code = 0 - self._status = "0 %s" % self._status - except IndexError: - raise ValueError("Empty status argument") - - status = property(_get_status, _set_status, doc="The HTTP Status code") - del _get_status, _set_status - - def get_data(self, as_text=False): - """The string representation of the request body. Whenever you call - this property the request iterable is encoded and flattened. This - can lead to unwanted behavior if you stream big data. - - This behavior can be disabled by setting - :attr:`implicit_sequence_conversion` to `False`. - - If `as_text` is set to `True` the return value will be a decoded - unicode string. - - .. versionadded:: 0.9 - """ - self._ensure_sequence() - rv = b"".join(self.iter_encoded()) - if as_text: - rv = rv.decode(self.charset) - return rv - - def set_data(self, value): - """Sets a new string as response. The value set must either by a - unicode or bytestring. If a unicode string is set it's encoded - automatically to the charset of the response (utf-8 by default). - - .. versionadded:: 0.9 - """ - # if an unicode string is set, it's encoded directly so that we - # can set the content length - if isinstance(value, text_type): - value = value.encode(self.charset) - else: - value = bytes(value) - self.response = [value] - if self.automatically_set_content_length: - self.headers["Content-Length"] = str(len(value)) - - data = property( - get_data, - set_data, - doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.", - ) - - def calculate_content_length(self): - """Returns the content length if available or `None` otherwise.""" - try: - self._ensure_sequence() - except RuntimeError: - return None - return sum(len(x) for x in self.iter_encoded()) - - def _ensure_sequence(self, mutable=False): - """This method can be called by methods that need a sequence. If - `mutable` is true, it will also ensure that the response sequence - is a standard Python list. - - .. versionadded:: 0.6 - """ - if self.is_sequence: - # if we need a mutable object, we ensure it's a list. - if mutable and not isinstance(self.response, list): - self.response = list(self.response) - return - if self.direct_passthrough: - raise RuntimeError( - "Attempted implicit sequence conversion but the" - " response object is in direct passthrough mode." - ) - if not self.implicit_sequence_conversion: - raise RuntimeError( - "The response object required the iterable to be a" - " sequence, but the implicit conversion was disabled." - " Call make_sequence() yourself." - ) - self.make_sequence() - - def make_sequence(self): - """Converts the response iterator in a list. By default this happens - automatically if required. If `implicit_sequence_conversion` is - disabled, this method is not automatically called and some properties - might raise exceptions. This also encodes all the items. - - .. versionadded:: 0.6 - """ - if not self.is_sequence: - # if we consume an iterable we have to ensure that the close - # method of the iterable is called if available when we tear - # down the response - close = getattr(self.response, "close", None) - self.response = list(self.iter_encoded()) - if close is not None: - self.call_on_close(close) - - def iter_encoded(self): - """Iter the response encoded with the encoding of the response. - If the response object is invoked as WSGI application the return - value of this method is used as application iterator unless - :attr:`direct_passthrough` was activated. - """ - if __debug__: - _warn_if_string(self.response) - # Encode in a separate function so that self.response is fetched - # early. This allows us to wrap the response with the return - # value from get_app_iter or iter_encoded. - return _iter_encoded(self.response, self.charset) - - def set_cookie( - self, - key, - value="", - max_age=None, - expires=None, - path="/", - domain=None, - secure=False, - httponly=False, - samesite=None, - ): - """Sets a cookie. The parameters are the same as in the cookie `Morsel` - object in the Python standard library but it accepts unicode data, too. - - A warning is raised if the size of the cookie header exceeds - :attr:`max_cookie_size`, but the header will still be set. - - :param key: the key (name) of the cookie to be set. - :param value: the value of the cookie. - :param max_age: should be a number of seconds, or `None` (default) if - the cookie should last only as long as the client's - browser session. - :param expires: should be a `datetime` object or UNIX timestamp. - :param path: limits the cookie to a given path, per default it will - span the whole domain. - :param domain: if you want to set a cross-domain cookie. For example, - ``domain=".example.com"`` will set a cookie that is - readable by the domain ``www.example.com``, - ``foo.example.com`` etc. Otherwise, a cookie will only - be readable by the domain that set it. - :param secure: If `True`, the cookie will only be available via HTTPS - :param httponly: disallow JavaScript to access the cookie. This is an - extension to the cookie standard and probably not - supported by all browsers. - :param samesite: Limits the scope of the cookie such that it will only - be attached to requests if those requests are - "same-site". - """ - self.headers.add( - "Set-Cookie", - dump_cookie( - key, - value=value, - max_age=max_age, - expires=expires, - path=path, - domain=domain, - secure=secure, - httponly=httponly, - charset=self.charset, - max_size=self.max_cookie_size, - samesite=samesite, - ), - ) - - def delete_cookie(self, key, path="/", domain=None): - """Delete a cookie. Fails silently if key doesn't exist. - - :param key: the key (name) of the cookie to be deleted. - :param path: if the cookie that should be deleted was limited to a - path, the path has to be defined here. - :param domain: if the cookie that should be deleted was limited to a - domain, that domain has to be defined here. - """ - self.set_cookie(key, expires=0, max_age=0, path=path, domain=domain) - - @property - def is_streamed(self): - """If the response is streamed (the response is not an iterable with - a length information) this property is `True`. In this case streamed - means that there is no information about the number of iterations. - This is usually `True` if a generator is passed to the response object. - - This is useful for checking before applying some sort of post - filtering that should not take place for streamed responses. - """ - try: - len(self.response) - except (TypeError, AttributeError): - return True - return False - - @property - def is_sequence(self): - """If the iterator is buffered, this property will be `True`. A - response object will consider an iterator to be buffered if the - response attribute is a list or tuple. - - .. versionadded:: 0.6 - """ - return isinstance(self.response, (tuple, list)) - - def close(self): - """Close the wrapped response if possible. You can also use the object - in a with statement which will automatically close it. - - .. versionadded:: 0.9 - Can now be used in a with statement. - """ - if hasattr(self.response, "close"): - self.response.close() - for func in self._on_close: - func() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, tb): - self.close() - - def freeze(self): - """Call this method if you want to make your response object ready for - being pickled. This buffers the generator if there is one. It will - also set the `Content-Length` header to the length of the body. - - .. versionchanged:: 0.6 - The `Content-Length` header is now set. - """ - # we explicitly set the length to a list of the *encoded* response - # iterator. Even if the implicit sequence conversion is disabled. - self.response = list(self.iter_encoded()) - self.headers["Content-Length"] = str(sum(map(len, self.response))) - - def get_wsgi_headers(self, environ): - """This is automatically called right before the response is started - and returns headers modified for the given environment. It returns a - copy of the headers from the response with some modifications applied - if necessary. - - For example the location header (if present) is joined with the root - URL of the environment. Also the content length is automatically set - to zero here for certain status codes. - - .. versionchanged:: 0.6 - Previously that function was called `fix_headers` and modified - the response object in place. Also since 0.6, IRIs in location - and content-location headers are handled properly. - - Also starting with 0.6, Werkzeug will attempt to set the content - length if it is able to figure it out on its own. This is the - case if all the strings in the response iterable are already - encoded and the iterable is buffered. - - :param environ: the WSGI environment of the request. - :return: returns a new :class:`~werkzeug.datastructures.Headers` - object. - """ - headers = Headers(self.headers) - location = None - content_location = None - content_length = None - status = self.status_code - - # iterate over the headers to find all values in one go. Because - # get_wsgi_headers is used each response that gives us a tiny - # speedup. - for key, value in headers: - ikey = key.lower() - if ikey == u"location": - location = value - elif ikey == u"content-location": - content_location = value - elif ikey == u"content-length": - content_length = value - - # make sure the location header is an absolute URL - if location is not None: - old_location = location - if isinstance(location, text_type): - # Safe conversion is necessary here as we might redirect - # to a broken URI scheme (for instance itms-services). - location = iri_to_uri(location, safe_conversion=True) - - if self.autocorrect_location_header: - current_url = get_current_url(environ, strip_querystring=True) - if isinstance(current_url, text_type): - current_url = iri_to_uri(current_url) - location = url_join(current_url, location) - if location != old_location: - headers["Location"] = location - - # make sure the content location is a URL - if content_location is not None and isinstance(content_location, text_type): - headers["Content-Location"] = iri_to_uri(content_location) - - if 100 <= status < 200 or status == 204: - # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a - # Content-Length header field in any response with a status - # code of 1xx (Informational) or 204 (No Content)." - headers.remove("Content-Length") - elif status == 304: - remove_entity_headers(headers) - - # if we can determine the content length automatically, we - # should try to do that. But only if this does not involve - # flattening the iterator or encoding of unicode strings in - # the response. We however should not do that if we have a 304 - # response. - if ( - self.automatically_set_content_length - and self.is_sequence - and content_length is None - and status not in (204, 304) - and not (100 <= status < 200) - ): - try: - content_length = sum(len(to_bytes(x, "ascii")) for x in self.response) - except UnicodeError: - # aha, something non-bytestringy in there, too bad, we - # can't safely figure out the length of the response. - pass - else: - headers["Content-Length"] = str(content_length) - - return headers - - def get_app_iter(self, environ): - """Returns the application iterator for the given environ. Depending - on the request method and the current status code the return value - might be an empty response rather than the one from the response. - - If the request method is `HEAD` or the status code is in a range - where the HTTP specification requires an empty response, an empty - iterable is returned. - - .. versionadded:: 0.6 - - :param environ: the WSGI environment of the request. - :return: a response iterable. - """ - status = self.status_code - if ( - environ["REQUEST_METHOD"] == "HEAD" - or 100 <= status < 200 - or status in (204, 304) - ): - iterable = () - elif self.direct_passthrough: - if __debug__: - _warn_if_string(self.response) - return self.response - else: - iterable = self.iter_encoded() - return ClosingIterator(iterable, self.close) - - def get_wsgi_response(self, environ): - """Returns the final WSGI response as tuple. The first item in - the tuple is the application iterator, the second the status and - the third the list of headers. The response returned is created - specially for the given environment. For example if the request - method in the WSGI environment is ``'HEAD'`` the response will - be empty and only the headers and status code will be present. - - .. versionadded:: 0.6 - - :param environ: the WSGI environment of the request. - :return: an ``(app_iter, status, headers)`` tuple. - """ - headers = self.get_wsgi_headers(environ) - app_iter = self.get_app_iter(environ) - return app_iter, self.status, headers.to_wsgi_list() - - def __call__(self, environ, start_response): - """Process this response as WSGI application. - - :param environ: the WSGI environment. - :param start_response: the response callable provided by the WSGI - server. - :return: an application iterator - """ - app_iter, status, headers = self.get_wsgi_response(environ) - start_response(status, headers) - return app_iter diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/common_descriptors.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/common_descriptors.py deleted file mode 100644 index e4107ee..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/common_descriptors.py +++ /dev/null @@ -1,322 +0,0 @@ -from datetime import datetime -from datetime import timedelta - -from .._compat import string_types -from ..datastructures import CallbackDict -from ..http import dump_age -from ..http import dump_header -from ..http import dump_options_header -from ..http import http_date -from ..http import parse_age -from ..http import parse_date -from ..http import parse_options_header -from ..http import parse_set_header -from ..utils import cached_property -from ..utils import environ_property -from ..utils import get_content_type -from ..utils import header_property -from ..wsgi import get_content_length - - -class CommonRequestDescriptorsMixin(object): - """A mixin for :class:`BaseRequest` subclasses. Request objects that - mix this class in will automatically get descriptors for a couple of - HTTP headers with automatic type conversion. - - .. versionadded:: 0.5 - """ - - content_type = environ_property( - "CONTENT_TYPE", - doc="""The Content-Type entity-header field indicates the media - type of the entity-body sent to the recipient or, in the case of - the HEAD method, the media type that would have been sent had - the request been a GET.""", - ) - - @cached_property - def content_length(self): - """The Content-Length entity-header field indicates the size of the - entity-body in bytes or, in the case of the HEAD method, the size of - the entity-body that would have been sent had the request been a - GET. - """ - return get_content_length(self.environ) - - content_encoding = environ_property( - "HTTP_CONTENT_ENCODING", - doc="""The Content-Encoding entity-header field is used as a - modifier to the media-type. When present, its value indicates - what additional content codings have been applied to the - entity-body, and thus what decoding mechanisms must be applied - in order to obtain the media-type referenced by the Content-Type - header field. - - .. versionadded:: 0.9""", - ) - content_md5 = environ_property( - "HTTP_CONTENT_MD5", - doc="""The Content-MD5 entity-header field, as defined in - RFC 1864, is an MD5 digest of the entity-body for the purpose of - providing an end-to-end message integrity check (MIC) of the - entity-body. (Note: a MIC is good for detecting accidental - modification of the entity-body in transit, but is not proof - against malicious attacks.) - - .. versionadded:: 0.9""", - ) - referrer = environ_property( - "HTTP_REFERER", - doc="""The Referer[sic] request-header field allows the client - to specify, for the server's benefit, the address (URI) of the - resource from which the Request-URI was obtained (the - "referrer", although the header field is misspelled).""", - ) - date = environ_property( - "HTTP_DATE", - None, - parse_date, - doc="""The Date general-header field represents the date and - time at which the message was originated, having the same - semantics as orig-date in RFC 822.""", - ) - max_forwards = environ_property( - "HTTP_MAX_FORWARDS", - None, - int, - doc="""The Max-Forwards request-header field provides a - mechanism with the TRACE and OPTIONS methods to limit the number - of proxies or gateways that can forward the request to the next - inbound server.""", - ) - - def _parse_content_type(self): - if not hasattr(self, "_parsed_content_type"): - self._parsed_content_type = parse_options_header( - self.environ.get("CONTENT_TYPE", "") - ) - - @property - def mimetype(self): - """Like :attr:`content_type`, but without parameters (eg, without - charset, type etc.) and always lowercase. For example if the content - type is ``text/HTML; charset=utf-8`` the mimetype would be - ``'text/html'``. - """ - self._parse_content_type() - return self._parsed_content_type[0].lower() - - @property - def mimetype_params(self): - """The mimetype parameters as dict. For example if the content - type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - """ - self._parse_content_type() - return self._parsed_content_type[1] - - @cached_property - def pragma(self): - """The Pragma general-header field is used to include - implementation-specific directives that might apply to any recipient - along the request/response chain. All pragma directives specify - optional behavior from the viewpoint of the protocol; however, some - systems MAY require that behavior be consistent with the directives. - """ - return parse_set_header(self.environ.get("HTTP_PRAGMA", "")) - - -class CommonResponseDescriptorsMixin(object): - """A mixin for :class:`BaseResponse` subclasses. Response objects that - mix this class in will automatically get descriptors for a couple of - HTTP headers with automatic type conversion. - """ - - @property - def mimetype(self): - """The mimetype (content type without charset etc.)""" - ct = self.headers.get("content-type") - if ct: - return ct.split(";")[0].strip() - - @mimetype.setter - def mimetype(self, value): - self.headers["Content-Type"] = get_content_type(value, self.charset) - - @property - def mimetype_params(self): - """The mimetype parameters as dict. For example if the - content type is ``text/html; charset=utf-8`` the params would be - ``{'charset': 'utf-8'}``. - - .. versionadded:: 0.5 - """ - - def on_update(d): - self.headers["Content-Type"] = dump_options_header(self.mimetype, d) - - d = parse_options_header(self.headers.get("content-type", ""))[1] - return CallbackDict(d, on_update) - - location = header_property( - "Location", - doc="""The Location response-header field is used to redirect - the recipient to a location other than the Request-URI for - completion of the request or identification of a new - resource.""", - ) - age = header_property( - "Age", - None, - parse_age, - dump_age, - doc="""The Age response-header field conveys the sender's - estimate of the amount of time since the response (or its - revalidation) was generated at the origin server. - - Age values are non-negative decimal integers, representing time - in seconds.""", - ) - content_type = header_property( - "Content-Type", - doc="""The Content-Type entity-header field indicates the media - type of the entity-body sent to the recipient or, in the case of - the HEAD method, the media type that would have been sent had - the request been a GET.""", - ) - content_length = header_property( - "Content-Length", - None, - int, - str, - doc="""The Content-Length entity-header field indicates the size - of the entity-body, in decimal number of OCTETs, sent to the - recipient or, in the case of the HEAD method, the size of the - entity-body that would have been sent had the request been a - GET.""", - ) - content_location = header_property( - "Content-Location", - doc="""The Content-Location entity-header field MAY be used to - supply the resource location for the entity enclosed in the - message when that entity is accessible from a location separate - from the requested resource's URI.""", - ) - content_encoding = header_property( - "Content-Encoding", - doc="""The Content-Encoding entity-header field is used as a - modifier to the media-type. When present, its value indicates - what additional content codings have been applied to the - entity-body, and thus what decoding mechanisms must be applied - in order to obtain the media-type referenced by the Content-Type - header field.""", - ) - content_md5 = header_property( - "Content-MD5", - doc="""The Content-MD5 entity-header field, as defined in - RFC 1864, is an MD5 digest of the entity-body for the purpose of - providing an end-to-end message integrity check (MIC) of the - entity-body. (Note: a MIC is good for detecting accidental - modification of the entity-body in transit, but is not proof - against malicious attacks.)""", - ) - date = header_property( - "Date", - None, - parse_date, - http_date, - doc="""The Date general-header field represents the date and - time at which the message was originated, having the same - semantics as orig-date in RFC 822.""", - ) - expires = header_property( - "Expires", - None, - parse_date, - http_date, - doc="""The Expires entity-header field gives the date/time after - which the response is considered stale. A stale cache entry may - not normally be returned by a cache.""", - ) - last_modified = header_property( - "Last-Modified", - None, - parse_date, - http_date, - doc="""The Last-Modified entity-header field indicates the date - and time at which the origin server believes the variant was - last modified.""", - ) - - @property - def retry_after(self): - """The Retry-After response-header field can be used with a - 503 (Service Unavailable) response to indicate how long the - service is expected to be unavailable to the requesting client. - - Time in seconds until expiration or date. - """ - value = self.headers.get("retry-after") - if value is None: - return - elif value.isdigit(): - return datetime.utcnow() + timedelta(seconds=int(value)) - return parse_date(value) - - @retry_after.setter - def retry_after(self, value): - if value is None: - if "retry-after" in self.headers: - del self.headers["retry-after"] - return - elif isinstance(value, datetime): - value = http_date(value) - else: - value = str(value) - self.headers["Retry-After"] = value - - def _set_property(name, doc=None): # noqa: B902 - def fget(self): - def on_update(header_set): - if not header_set and name in self.headers: - del self.headers[name] - elif header_set: - self.headers[name] = header_set.to_header() - - return parse_set_header(self.headers.get(name), on_update) - - def fset(self, value): - if not value: - del self.headers[name] - elif isinstance(value, string_types): - self.headers[name] = value - else: - self.headers[name] = dump_header(value) - - return property(fget, fset, doc=doc) - - vary = _set_property( - "Vary", - doc="""The Vary field value indicates the set of request-header - fields that fully determines, while the response is fresh, - whether a cache is permitted to use the response to reply to a - subsequent request without revalidation.""", - ) - content_language = _set_property( - "Content-Language", - doc="""The Content-Language entity-header field describes the - natural language(s) of the intended audience for the enclosed - entity. Note that this might not be equivalent to all the - languages used within the entity-body.""", - ) - allow = _set_property( - "Allow", - doc="""The Allow entity-header field lists the set of methods - supported by the resource identified by the Request-URI. The - purpose of this field is strictly to inform the recipient of - valid methods associated with the resource. An Allow header - field MUST be present in a 405 (Method Not Allowed) - response.""", - ) - - del _set_property diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/etag.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/etag.py deleted file mode 100644 index 0733506..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/etag.py +++ /dev/null @@ -1,304 +0,0 @@ -from .._compat import string_types -from .._internal import _get_environ -from ..datastructures import ContentRange -from ..datastructures import RequestCacheControl -from ..datastructures import ResponseCacheControl -from ..http import generate_etag -from ..http import http_date -from ..http import is_resource_modified -from ..http import parse_cache_control_header -from ..http import parse_content_range_header -from ..http import parse_date -from ..http import parse_etags -from ..http import parse_if_range_header -from ..http import parse_range_header -from ..http import quote_etag -from ..http import unquote_etag -from ..utils import cached_property -from ..utils import header_property -from ..wrappers.base_response import _clean_accept_ranges -from ..wsgi import _RangeWrapper - - -class ETagRequestMixin(object): - """Add entity tag and cache descriptors to a request object or object with - a WSGI environment available as :attr:`~BaseRequest.environ`. This not - only provides access to etags but also to the cache control header. - """ - - @cached_property - def cache_control(self): - """A :class:`~werkzeug.datastructures.RequestCacheControl` object - for the incoming cache control headers. - """ - cache_control = self.environ.get("HTTP_CACHE_CONTROL") - return parse_cache_control_header(cache_control, None, RequestCacheControl) - - @cached_property - def if_match(self): - """An object containing all the etags in the `If-Match` header. - - :rtype: :class:`~werkzeug.datastructures.ETags` - """ - return parse_etags(self.environ.get("HTTP_IF_MATCH")) - - @cached_property - def if_none_match(self): - """An object containing all the etags in the `If-None-Match` header. - - :rtype: :class:`~werkzeug.datastructures.ETags` - """ - return parse_etags(self.environ.get("HTTP_IF_NONE_MATCH")) - - @cached_property - def if_modified_since(self): - """The parsed `If-Modified-Since` header as datetime object.""" - return parse_date(self.environ.get("HTTP_IF_MODIFIED_SINCE")) - - @cached_property - def if_unmodified_since(self): - """The parsed `If-Unmodified-Since` header as datetime object.""" - return parse_date(self.environ.get("HTTP_IF_UNMODIFIED_SINCE")) - - @cached_property - def if_range(self): - """The parsed `If-Range` header. - - .. versionadded:: 0.7 - - :rtype: :class:`~werkzeug.datastructures.IfRange` - """ - return parse_if_range_header(self.environ.get("HTTP_IF_RANGE")) - - @cached_property - def range(self): - """The parsed `Range` header. - - .. versionadded:: 0.7 - - :rtype: :class:`~werkzeug.datastructures.Range` - """ - return parse_range_header(self.environ.get("HTTP_RANGE")) - - -class ETagResponseMixin(object): - """Adds extra functionality to a response object for etag and cache - handling. This mixin requires an object with at least a `headers` - object that implements a dict like interface similar to - :class:`~werkzeug.datastructures.Headers`. - - If you want the :meth:`freeze` method to automatically add an etag, you - have to mixin this method before the response base class. The default - response class does not do that. - """ - - @property - def cache_control(self): - """The Cache-Control general-header field is used to specify - directives that MUST be obeyed by all caching mechanisms along the - request/response chain. - """ - - def on_update(cache_control): - if not cache_control and "cache-control" in self.headers: - del self.headers["cache-control"] - elif cache_control: - self.headers["Cache-Control"] = cache_control.to_header() - - return parse_cache_control_header( - self.headers.get("cache-control"), on_update, ResponseCacheControl - ) - - def _wrap_response(self, start, length): - """Wrap existing Response in case of Range Request context.""" - if self.status_code == 206: - self.response = _RangeWrapper(self.response, start, length) - - def _is_range_request_processable(self, environ): - """Return ``True`` if `Range` header is present and if underlying - resource is considered unchanged when compared with `If-Range` header. - """ - return ( - "HTTP_IF_RANGE" not in environ - or not is_resource_modified( - environ, - self.headers.get("etag"), - None, - self.headers.get("last-modified"), - ignore_if_range=False, - ) - ) and "HTTP_RANGE" in environ - - def _process_range_request(self, environ, complete_length=None, accept_ranges=None): - """Handle Range Request related headers (RFC7233). If `Accept-Ranges` - header is valid, and Range Request is processable, we set the headers - as described by the RFC, and wrap the underlying response in a - RangeWrapper. - - Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. - - :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` - if `Range` header could not be parsed or satisfied. - """ - from ..exceptions import RequestedRangeNotSatisfiable - - if accept_ranges is None: - return False - self.headers["Accept-Ranges"] = accept_ranges - if not self._is_range_request_processable(environ) or complete_length is None: - return False - parsed_range = parse_range_header(environ.get("HTTP_RANGE")) - if parsed_range is None: - raise RequestedRangeNotSatisfiable(complete_length) - range_tuple = parsed_range.range_for_length(complete_length) - content_range_header = parsed_range.to_content_range_header(complete_length) - if range_tuple is None or content_range_header is None: - raise RequestedRangeNotSatisfiable(complete_length) - content_length = range_tuple[1] - range_tuple[0] - # Be sure not to send 206 response - # if requested range is the full content. - if content_length != complete_length: - self.headers["Content-Length"] = content_length - self.content_range = content_range_header - self.status_code = 206 - self._wrap_response(range_tuple[0], content_length) - return True - return False - - def make_conditional( - self, request_or_environ, accept_ranges=False, complete_length=None - ): - """Make the response conditional to the request. This method works - best if an etag was defined for the response already. The `add_etag` - method can be used to do that. If called without etag just the date - header is set. - - This does nothing if the request method in the request or environ is - anything but GET or HEAD. - - For optimal performance when handling range requests, it's recommended - that your response data object implements `seekable`, `seek` and `tell` - methods as described by :py:class:`io.IOBase`. Objects returned by - :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods. - - It does not remove the body of the response because that's something - the :meth:`__call__` function does for us automatically. - - Returns self so that you can do ``return resp.make_conditional(req)`` - but modifies the object in-place. - - :param request_or_environ: a request object or WSGI environment to be - used to make the response conditional - against. - :param accept_ranges: This parameter dictates the value of - `Accept-Ranges` header. If ``False`` (default), - the header is not set. If ``True``, it will be set - to ``"bytes"``. If ``None``, it will be set to - ``"none"``. If it's a string, it will use this - value. - :param complete_length: Will be used only in valid Range Requests. - It will set `Content-Range` complete length - value and compute `Content-Length` real value. - This parameter is mandatory for successful - Range Requests completion. - :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` - if `Range` header could not be parsed or satisfied. - """ - environ = _get_environ(request_or_environ) - if environ["REQUEST_METHOD"] in ("GET", "HEAD"): - # if the date is not in the headers, add it now. We however - # will not override an already existing header. Unfortunately - # this header will be overriden by many WSGI servers including - # wsgiref. - if "date" not in self.headers: - self.headers["Date"] = http_date() - accept_ranges = _clean_accept_ranges(accept_ranges) - is206 = self._process_range_request(environ, complete_length, accept_ranges) - if not is206 and not is_resource_modified( - environ, - self.headers.get("etag"), - None, - self.headers.get("last-modified"), - ): - if parse_etags(environ.get("HTTP_IF_MATCH")): - self.status_code = 412 - else: - self.status_code = 304 - if ( - self.automatically_set_content_length - and "content-length" not in self.headers - ): - length = self.calculate_content_length() - if length is not None: - self.headers["Content-Length"] = length - return self - - def add_etag(self, overwrite=False, weak=False): - """Add an etag for the current response if there is none yet.""" - if overwrite or "etag" not in self.headers: - self.set_etag(generate_etag(self.get_data()), weak) - - def set_etag(self, etag, weak=False): - """Set the etag, and override the old one if there was one.""" - self.headers["ETag"] = quote_etag(etag, weak) - - def get_etag(self): - """Return a tuple in the form ``(etag, is_weak)``. If there is no - ETag the return value is ``(None, None)``. - """ - return unquote_etag(self.headers.get("ETag")) - - def freeze(self, no_etag=False): - """Call this method if you want to make your response object ready for - pickeling. This buffers the generator if there is one. This also - sets the etag unless `no_etag` is set to `True`. - """ - if not no_etag: - self.add_etag() - super(ETagResponseMixin, self).freeze() - - accept_ranges = header_property( - "Accept-Ranges", - doc="""The `Accept-Ranges` header. Even though the name would - indicate that multiple values are supported, it must be one - string token only. - - The values ``'bytes'`` and ``'none'`` are common. - - .. versionadded:: 0.7""", - ) - - def _get_content_range(self): - def on_update(rng): - if not rng: - del self.headers["content-range"] - else: - self.headers["Content-Range"] = rng.to_header() - - rv = parse_content_range_header(self.headers.get("content-range"), on_update) - # always provide a content range object to make the descriptor - # more user friendly. It provides an unset() method that can be - # used to remove the header quickly. - if rv is None: - rv = ContentRange(None, None, None, on_update=on_update) - return rv - - def _set_content_range(self, value): - if not value: - del self.headers["content-range"] - elif isinstance(value, string_types): - self.headers["Content-Range"] = value - else: - self.headers["Content-Range"] = value.to_header() - - content_range = property( - _get_content_range, - _set_content_range, - doc="""The ``Content-Range`` header as - :class:`~werkzeug.datastructures.ContentRange` object. Even if - the header is not set it wil provide such an object for easier - manipulation. - - .. versionadded:: 0.7""", - ) - del _get_content_range, _set_content_range diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/json.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/json.py deleted file mode 100644 index 6d5dc33..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/json.py +++ /dev/null @@ -1,145 +0,0 @@ -from __future__ import absolute_import - -import datetime -import uuid - -from .._compat import text_type -from ..exceptions import BadRequest -from ..utils import detect_utf_encoding - -try: - import simplejson as _json -except ImportError: - import json as _json - - -class _JSONModule(object): - @staticmethod - def _default(o): - if isinstance(o, datetime.date): - return o.isoformat() - - if isinstance(o, uuid.UUID): - return str(o) - - if hasattr(o, "__html__"): - return text_type(o.__html__()) - - raise TypeError() - - @classmethod - def dumps(cls, obj, **kw): - kw.setdefault("separators", (",", ":")) - kw.setdefault("default", cls._default) - kw.setdefault("sort_keys", True) - return _json.dumps(obj, **kw) - - @staticmethod - def loads(s, **kw): - if isinstance(s, bytes): - # Needed for Python < 3.6 - encoding = detect_utf_encoding(s) - s = s.decode(encoding) - - return _json.loads(s, **kw) - - -class JSONMixin(object): - """Mixin to parse :attr:`data` as JSON. Can be mixed in for both - :class:`~werkzeug.wrappers.Request` and - :class:`~werkzeug.wrappers.Response` classes. - - If `simplejson`_ is installed it is preferred over Python's built-in - :mod:`json` module. - - .. _simplejson: https://simplejson.readthedocs.io/en/latest/ - """ - - #: A module or other object that has ``dumps`` and ``loads`` - #: functions that match the API of the built-in :mod:`json` module. - json_module = _JSONModule - - @property - def json(self): - """The parsed JSON data if :attr:`mimetype` indicates JSON - (:mimetype:`application/json`, see :meth:`is_json`). - - Calls :meth:`get_json` with default arguments. - """ - return self.get_json() - - @property - def is_json(self): - """Check if the mimetype indicates JSON data, either - :mimetype:`application/json` or :mimetype:`application/*+json`. - """ - mt = self.mimetype - return ( - mt == "application/json" - or mt.startswith("application/") - and mt.endswith("+json") - ) - - def _get_data_for_json(self, cache): - try: - return self.get_data(cache=cache) - except TypeError: - # Response doesn't have cache param. - return self.get_data() - - # Cached values for ``(silent=False, silent=True)``. Initialized - # with sentinel values. - _cached_json = (Ellipsis, Ellipsis) - - def get_json(self, force=False, silent=False, cache=True): - """Parse :attr:`data` as JSON. - - If the mimetype does not indicate JSON - (:mimetype:`application/json`, see :meth:`is_json`), this - returns ``None``. - - If parsing fails, :meth:`on_json_loading_failed` is called and - its return value is used as the return value. - - :param force: Ignore the mimetype and always try to parse JSON. - :param silent: Silence parsing errors and return ``None`` - instead. - :param cache: Store the parsed JSON to return for subsequent - calls. - """ - if cache and self._cached_json[silent] is not Ellipsis: - return self._cached_json[silent] - - if not (force or self.is_json): - return None - - data = self._get_data_for_json(cache=cache) - - try: - rv = self.json_module.loads(data) - except ValueError as e: - if silent: - rv = None - - if cache: - normal_rv, _ = self._cached_json - self._cached_json = (normal_rv, rv) - else: - rv = self.on_json_loading_failed(e) - - if cache: - _, silent_rv = self._cached_json - self._cached_json = (rv, silent_rv) - else: - if cache: - self._cached_json = (rv, rv) - - return rv - - def on_json_loading_failed(self, e): - """Called if :meth:`get_json` parsing fails and isn't silenced. - If this method returns a value, it is used as the return value - for :meth:`get_json`. The default implementation raises - :exc:`~werkzeug.exceptions.BadRequest`. - """ - raise BadRequest("Failed to decode JSON object: {0}".format(e)) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/request.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/request.py deleted file mode 100644 index d1c71b6..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/request.py +++ /dev/null @@ -1,44 +0,0 @@ -from .accept import AcceptMixin -from .auth import AuthorizationMixin -from .base_request import BaseRequest -from .common_descriptors import CommonRequestDescriptorsMixin -from .etag import ETagRequestMixin -from .user_agent import UserAgentMixin - - -class Request( - BaseRequest, - AcceptMixin, - ETagRequestMixin, - UserAgentMixin, - AuthorizationMixin, - CommonRequestDescriptorsMixin, -): - """Full featured request object implementing the following mixins: - - - :class:`AcceptMixin` for accept header parsing - - :class:`ETagRequestMixin` for etag and cache control handling - - :class:`UserAgentMixin` for user agent introspection - - :class:`AuthorizationMixin` for http auth handling - - :class:`CommonRequestDescriptorsMixin` for common headers - """ - - -class StreamOnlyMixin(object): - """If mixed in before the request object this will change the bahavior - of it to disable handling of form parsing. This disables the - :attr:`files`, :attr:`form` attributes and will just provide a - :attr:`stream` attribute that however is always available. - - .. versionadded:: 0.9 - """ - - disable_data_descriptor = True - want_form_data_parsed = False - - -class PlainRequest(StreamOnlyMixin, Request): - """A request object without special form parsing capabilities. - - .. versionadded:: 0.9 - """ diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/response.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/response.py deleted file mode 100644 index cd86cac..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/response.py +++ /dev/null @@ -1,78 +0,0 @@ -from ..utils import cached_property -from .auth import WWWAuthenticateMixin -from .base_response import BaseResponse -from .common_descriptors import CommonResponseDescriptorsMixin -from .etag import ETagResponseMixin - - -class ResponseStream(object): - """A file descriptor like object used by the :class:`ResponseStreamMixin` to - represent the body of the stream. It directly pushes into the response - iterable of the response object. - """ - - mode = "wb+" - - def __init__(self, response): - self.response = response - self.closed = False - - def write(self, value): - if self.closed: - raise ValueError("I/O operation on closed file") - self.response._ensure_sequence(mutable=True) - self.response.response.append(value) - self.response.headers.pop("Content-Length", None) - return len(value) - - def writelines(self, seq): - for item in seq: - self.write(item) - - def close(self): - self.closed = True - - def flush(self): - if self.closed: - raise ValueError("I/O operation on closed file") - - def isatty(self): - if self.closed: - raise ValueError("I/O operation on closed file") - return False - - def tell(self): - self.response._ensure_sequence() - return sum(map(len, self.response.response)) - - @property - def encoding(self): - return self.response.charset - - -class ResponseStreamMixin(object): - """Mixin for :class:`BaseRequest` subclasses. Classes that inherit from - this mixin will automatically get a :attr:`stream` property that provides - a write-only interface to the response iterable. - """ - - @cached_property - def stream(self): - """The response iterable as write-only stream.""" - return ResponseStream(self) - - -class Response( - BaseResponse, - ETagResponseMixin, - ResponseStreamMixin, - CommonResponseDescriptorsMixin, - WWWAuthenticateMixin, -): - """Full featured response object implementing the following mixins: - - - :class:`ETagResponseMixin` for etag and cache control handling - - :class:`ResponseStreamMixin` to add support for the `stream` property - - :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors - - :class:`WWWAuthenticateMixin` for HTTP authentication support - """ diff --git a/venv/lib/python3.6/site-packages/werkzeug/wrappers/user_agent.py b/venv/lib/python3.6/site-packages/werkzeug/wrappers/user_agent.py deleted file mode 100644 index 72588dd..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wrappers/user_agent.py +++ /dev/null @@ -1,15 +0,0 @@ -from ..utils import cached_property - - -class UserAgentMixin(object): - """Adds a `user_agent` attribute to the request object which - contains the parsed user agent of the browser that triggered the - request as a :class:`~werkzeug.useragents.UserAgent` object. - """ - - @cached_property - def user_agent(self): - """The current user agent.""" - from ..useragents import UserAgent - - return UserAgent(self.environ) diff --git a/venv/lib/python3.6/site-packages/werkzeug/wsgi.py b/venv/lib/python3.6/site-packages/werkzeug/wsgi.py deleted file mode 100644 index f069f2d..0000000 --- a/venv/lib/python3.6/site-packages/werkzeug/wsgi.py +++ /dev/null @@ -1,1067 +0,0 @@ -# -*- coding: utf-8 -*- -""" - werkzeug.wsgi - ~~~~~~~~~~~~~ - - This module implements WSGI related helpers. - - :copyright: 2007 Pallets - :license: BSD-3-Clause -""" -import io -import re -import warnings -from functools import partial -from functools import update_wrapper -from itertools import chain - -from ._compat import BytesIO -from ._compat import implements_iterator -from ._compat import make_literal_wrapper -from ._compat import string_types -from ._compat import text_type -from ._compat import to_bytes -from ._compat import to_unicode -from ._compat import try_coerce_native -from ._compat import wsgi_get_bytes -from ._internal import _encode_idna -from .urls import uri_to_iri -from .urls import url_join -from .urls import url_parse -from .urls import url_quote - - -def responder(f): - """Marks a function as responder. Decorate a function with it and it - will automatically call the return value as WSGI application. - - Example:: - - @responder - def application(environ, start_response): - return Response('Hello World!') - """ - return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) - - -def get_current_url( - environ, - root_only=False, - strip_querystring=False, - host_only=False, - trusted_hosts=None, -): - """A handy helper function that recreates the full URL as IRI for the - current request or parts of it. Here's an example: - - >>> from werkzeug.test import create_environ - >>> env = create_environ("/?param=foo", "http://localhost/script") - >>> get_current_url(env) - 'http://localhost/script/?param=foo' - >>> get_current_url(env, root_only=True) - 'http://localhost/script/' - >>> get_current_url(env, host_only=True) - 'http://localhost/' - >>> get_current_url(env, strip_querystring=True) - 'http://localhost/script/' - - This optionally it verifies that the host is in a list of trusted hosts. - If the host is not in there it will raise a - :exc:`~werkzeug.exceptions.SecurityError`. - - Note that the string returned might contain unicode characters as the - representation is an IRI not an URI. If you need an ASCII only - representation you can use the :func:`~werkzeug.urls.iri_to_uri` - function: - - >>> from werkzeug.urls import iri_to_uri - >>> iri_to_uri(get_current_url(env)) - 'http://localhost/script/?param=foo' - - :param environ: the WSGI environment to get the current URL from. - :param root_only: set `True` if you only want the root URL. - :param strip_querystring: set to `True` if you don't want the querystring. - :param host_only: set to `True` if the host URL should be returned. - :param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted` - for more information. - """ - tmp = [environ["wsgi.url_scheme"], "://", get_host(environ, trusted_hosts)] - cat = tmp.append - if host_only: - return uri_to_iri("".join(tmp) + "/") - cat(url_quote(wsgi_get_bytes(environ.get("SCRIPT_NAME", ""))).rstrip("/")) - cat("/") - if not root_only: - cat(url_quote(wsgi_get_bytes(environ.get("PATH_INFO", "")).lstrip(b"/"))) - if not strip_querystring: - qs = get_query_string(environ) - if qs: - cat("?" + qs) - return uri_to_iri("".join(tmp)) - - -def host_is_trusted(hostname, trusted_list): - """Checks if a host is trusted against a list. This also takes care - of port normalization. - - .. versionadded:: 0.9 - - :param hostname: the hostname to check - :param trusted_list: a list of hostnames to check against. If a - hostname starts with a dot it will match against - all subdomains as well. - """ - if not hostname: - return False - - if isinstance(trusted_list, string_types): - trusted_list = [trusted_list] - - def _normalize(hostname): - if ":" in hostname: - hostname = hostname.rsplit(":", 1)[0] - return _encode_idna(hostname) - - try: - hostname = _normalize(hostname) - except UnicodeError: - return False - for ref in trusted_list: - if ref.startswith("."): - ref = ref[1:] - suffix_match = True - else: - suffix_match = False - try: - ref = _normalize(ref) - except UnicodeError: - return False - if ref == hostname: - return True - if suffix_match and hostname.endswith(b"." + ref): - return True - return False - - -def get_host(environ, trusted_hosts=None): - """Return the host for the given WSGI environment. This first checks - the ``Host`` header. If it's not present, then ``SERVER_NAME`` and - ``SERVER_PORT`` are used. The host will only contain the port if it - is different than the standard port for the protocol. - - Optionally, verify that the host is trusted using - :func:`host_is_trusted` and raise a - :exc:`~werkzeug.exceptions.SecurityError` if it is not. - - :param environ: The WSGI environment to get the host from. - :param trusted_hosts: A list of trusted hosts. - :return: Host, with port if necessary. - :raise ~werkzeug.exceptions.SecurityError: If the host is not - trusted. - """ - if "HTTP_HOST" in environ: - rv = environ["HTTP_HOST"] - if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): - rv = rv[:-3] - elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): - rv = rv[:-4] - else: - rv = environ["SERVER_NAME"] - if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in ( - ("https", "443"), - ("http", "80"), - ): - rv += ":" + environ["SERVER_PORT"] - if trusted_hosts is not None: - if not host_is_trusted(rv, trusted_hosts): - from .exceptions import SecurityError - - raise SecurityError('Host "%s" is not trusted' % rv) - return rv - - -def get_content_length(environ): - """Returns the content length from the WSGI environment as - integer. If it's not available or chunked transfer encoding is used, - ``None`` is returned. - - .. versionadded:: 0.9 - - :param environ: the WSGI environ to fetch the content length from. - """ - if environ.get("HTTP_TRANSFER_ENCODING", "") == "chunked": - return None - - content_length = environ.get("CONTENT_LENGTH") - if content_length is not None: - try: - return max(0, int(content_length)) - except (ValueError, TypeError): - pass - - -def get_input_stream(environ, safe_fallback=True): - """Returns the input stream from the WSGI environment and wraps it - in the most sensible way possible. The stream returned is not the - raw WSGI stream in most cases but one that is safe to read from - without taking into account the content length. - - If content length is not set, the stream will be empty for safety reasons. - If the WSGI server supports chunked or infinite streams, it should set - the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. - - .. versionadded:: 0.9 - - :param environ: the WSGI environ to fetch the stream from. - :param safe_fallback: use an empty stream as a safe fallback when the - content length is not set. Disabling this allows infinite streams, - which can be a denial-of-service risk. - """ - stream = environ["wsgi.input"] - content_length = get_content_length(environ) - - # A wsgi extension that tells us if the input is terminated. In - # that case we return the stream unchanged as we know we can safely - # read it until the end. - if environ.get("wsgi.input_terminated"): - return stream - - # If the request doesn't specify a content length, returning the stream is - # potentially dangerous because it could be infinite, malicious or not. If - # safe_fallback is true, return an empty stream instead for safety. - if content_length is None: - return BytesIO() if safe_fallback else stream - - # Otherwise limit the stream to the content length - return LimitedStream(stream, content_length) - - -def get_query_string(environ): - """Returns the `QUERY_STRING` from the WSGI environment. This also takes - care about the WSGI decoding dance on Python 3 environments as a - native string. The string returned will be restricted to ASCII - characters. - - .. versionadded:: 0.9 - - :param environ: the WSGI environment object to get the query string from. - """ - qs = wsgi_get_bytes(environ.get("QUERY_STRING", "")) - # QUERY_STRING really should be ascii safe but some browsers - # will send us some unicode stuff (I am looking at you IE). - # In that case we want to urllib quote it badly. - return try_coerce_native(url_quote(qs, safe=":&%=+$!*'(),")) - - -def get_path_info(environ, charset="utf-8", errors="replace"): - """Returns the `PATH_INFO` from the WSGI environment and properly - decodes it. This also takes care about the WSGI decoding dance - on Python 3 environments. if the `charset` is set to `None` a - bytestring is returned. - - .. versionadded:: 0.9 - - :param environ: the WSGI environment object to get the path from. - :param charset: the charset for the path info, or `None` if no - decoding should be performed. - :param errors: the decoding error handling. - """ - path = wsgi_get_bytes(environ.get("PATH_INFO", "")) - return to_unicode(path, charset, errors, allow_none_charset=True) - - -def get_script_name(environ, charset="utf-8", errors="replace"): - """Returns the `SCRIPT_NAME` from the WSGI environment and properly - decodes it. This also takes care about the WSGI decoding dance - on Python 3 environments. if the `charset` is set to `None` a - bytestring is returned. - - .. versionadded:: 0.9 - - :param environ: the WSGI environment object to get the path from. - :param charset: the charset for the path, or `None` if no - decoding should be performed. - :param errors: the decoding error handling. - """ - path = wsgi_get_bytes(environ.get("SCRIPT_NAME", "")) - return to_unicode(path, charset, errors, allow_none_charset=True) - - -def pop_path_info(environ, charset="utf-8", errors="replace"): - """Removes and returns the next segment of `PATH_INFO`, pushing it onto - `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. - - If the `charset` is set to `None` a bytestring is returned. - - If there are empty segments (``'/foo//bar``) these are ignored but - properly pushed to the `SCRIPT_NAME`: - - >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} - >>> pop_path_info(env) - 'a' - >>> env['SCRIPT_NAME'] - '/foo/a' - >>> pop_path_info(env) - 'b' - >>> env['SCRIPT_NAME'] - '/foo/a/b' - - .. versionadded:: 0.5 - - .. versionchanged:: 0.9 - The path is now decoded and a charset and encoding - parameter can be provided. - - :param environ: the WSGI environment that is modified. - """ - path = environ.get("PATH_INFO") - if not path: - return None - - script_name = environ.get("SCRIPT_NAME", "") - - # shift multiple leading slashes over - old_path = path - path = path.lstrip("/") - if path != old_path: - script_name += "/" * (len(old_path) - len(path)) - - if "/" not in path: - environ["PATH_INFO"] = "" - environ["SCRIPT_NAME"] = script_name + path - rv = wsgi_get_bytes(path) - else: - segment, path = path.split("/", 1) - environ["PATH_INFO"] = "/" + path - environ["SCRIPT_NAME"] = script_name + segment - rv = wsgi_get_bytes(segment) - - return to_unicode(rv, charset, errors, allow_none_charset=True) - - -def peek_path_info(environ, charset="utf-8", errors="replace"): - """Returns the next segment on the `PATH_INFO` or `None` if there - is none. Works like :func:`pop_path_info` without modifying the - environment: - - >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} - >>> peek_path_info(env) - 'a' - >>> peek_path_info(env) - 'a' - - If the `charset` is set to `None` a bytestring is returned. - - .. versionadded:: 0.5 - - .. versionchanged:: 0.9 - The path is now decoded and a charset and encoding - parameter can be provided. - - :param environ: the WSGI environment that is checked. - """ - segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1) - if segments: - return to_unicode( - wsgi_get_bytes(segments[0]), charset, errors, allow_none_charset=True - ) - - -def extract_path_info( - environ_or_baseurl, - path_or_url, - charset="utf-8", - errors="werkzeug.url_quote", - collapse_http_schemes=True, -): - """Extracts the path info from the given URL (or WSGI environment) and - path. The path info returned is a unicode string, not a bytestring - suitable for a WSGI environment. The URLs might also be IRIs. - - If the path info could not be determined, `None` is returned. - - Some examples: - - >>> extract_path_info('http://example.com/app', '/app/hello') - u'/hello' - >>> extract_path_info('http://example.com/app', - ... 'https://example.com/app/hello') - u'/hello' - >>> extract_path_info('http://example.com/app', - ... 'https://example.com/app/hello', - ... collapse_http_schemes=False) is None - True - - Instead of providing a base URL you can also pass a WSGI environment. - - :param environ_or_baseurl: a WSGI environment dict, a base URL or - base IRI. This is the root of the - application. - :param path_or_url: an absolute path from the server root, a - relative path (in which case it's the path info) - or a full URL. Also accepts IRIs and unicode - parameters. - :param charset: the charset for byte data in URLs - :param errors: the error handling on decode - :param collapse_http_schemes: if set to `False` the algorithm does - not assume that http and https on the - same server point to the same - resource. - - .. versionchanged:: 0.15 - The ``errors`` parameter defaults to leaving invalid bytes - quoted instead of replacing them. - - .. versionadded:: 0.6 - """ - - def _normalize_netloc(scheme, netloc): - parts = netloc.split(u"@", 1)[-1].split(u":", 1) - if len(parts) == 2: - netloc, port = parts - if (scheme == u"http" and port == u"80") or ( - scheme == u"https" and port == u"443" - ): - port = None - else: - netloc = parts[0] - port = None - if port is not None: - netloc += u":" + port - return netloc - - # make sure whatever we are working on is a IRI and parse it - path = uri_to_iri(path_or_url, charset, errors) - if isinstance(environ_or_baseurl, dict): - environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) - base_iri = uri_to_iri(environ_or_baseurl, charset, errors) - base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] - cur_scheme, cur_netloc, cur_path, = url_parse(url_join(base_iri, path))[:3] - - # normalize the network location - base_netloc = _normalize_netloc(base_scheme, base_netloc) - cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) - - # is that IRI even on a known HTTP scheme? - if collapse_http_schemes: - for scheme in base_scheme, cur_scheme: - if scheme not in (u"http", u"https"): - return None - else: - if not (base_scheme in (u"http", u"https") and base_scheme == cur_scheme): - return None - - # are the netlocs compatible? - if base_netloc != cur_netloc: - return None - - # are we below the application path? - base_path = base_path.rstrip(u"/") - if not cur_path.startswith(base_path): - return None - - return u"/" + cur_path[len(base_path) :].lstrip(u"/") - - -@implements_iterator -class ClosingIterator(object): - """The WSGI specification requires that all middlewares and gateways - respect the `close` callback of the iterable returned by the application. - Because it is useful to add another close action to a returned iterable - and adding a custom iterable is a boring task this class can be used for - that:: - - return ClosingIterator(app(environ, start_response), [cleanup_session, - cleanup_locals]) - - If there is just one close function it can be passed instead of the list. - - A closing iterator is not needed if the application uses response objects - and finishes the processing if the response is started:: - - try: - return response(environ, start_response) - finally: - cleanup_session() - cleanup_locals() - """ - - def __init__(self, iterable, callbacks=None): - iterator = iter(iterable) - self._next = partial(next, iterator) - if callbacks is None: - callbacks = [] - elif callable(callbacks): - callbacks = [callbacks] - else: - callbacks = list(callbacks) - iterable_close = getattr(iterable, "close", None) - if iterable_close: - callbacks.insert(0, iterable_close) - self._callbacks = callbacks - - def __iter__(self): - return self - - def __next__(self): - return self._next() - - def close(self): - for callback in self._callbacks: - callback() - - -def wrap_file(environ, file, buffer_size=8192): - """Wraps a file. This uses the WSGI server's file wrapper if available - or otherwise the generic :class:`FileWrapper`. - - .. versionadded:: 0.5 - - If the file wrapper from the WSGI server is used it's important to not - iterate over it from inside the application but to pass it through - unchanged. If you want to pass out a file wrapper inside a response - object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`. - - More information about file wrappers are available in :pep:`333`. - - :param file: a :class:`file`-like object with a :meth:`~file.read` method. - :param buffer_size: number of bytes for one iteration. - """ - return environ.get("wsgi.file_wrapper", FileWrapper)(file, buffer_size) - - -@implements_iterator -class FileWrapper(object): - """This class can be used to convert a :class:`file`-like object into - an iterable. It yields `buffer_size` blocks until the file is fully - read. - - You should not use this class directly but rather use the - :func:`wrap_file` function that uses the WSGI server's file wrapper - support if it's available. - - .. versionadded:: 0.5 - - If you're using this object together with a :class:`BaseResponse` you have - to use the `direct_passthrough` mode. - - :param file: a :class:`file`-like object with a :meth:`~file.read` method. - :param buffer_size: number of bytes for one iteration. - """ - - def __init__(self, file, buffer_size=8192): - self.file = file - self.buffer_size = buffer_size - - def close(self): - if hasattr(self.file, "close"): - self.file.close() - - def seekable(self): - if hasattr(self.file, "seekable"): - return self.file.seekable() - if hasattr(self.file, "seek"): - return True - return False - - def seek(self, *args): - if hasattr(self.file, "seek"): - self.file.seek(*args) - - def tell(self): - if hasattr(self.file, "tell"): - return self.file.tell() - return None - - def __iter__(self): - return self - - def __next__(self): - data = self.file.read(self.buffer_size) - if data: - return data - raise StopIteration() - - -@implements_iterator -class _RangeWrapper(object): - # private for now, but should we make it public in the future ? - - """This class can be used to convert an iterable object into - an iterable that will only yield a piece of the underlying content. - It yields blocks until the underlying stream range is fully read. - The yielded blocks will have a size that can't exceed the original - iterator defined block size, but that can be smaller. - - If you're using this object together with a :class:`BaseResponse` you have - to use the `direct_passthrough` mode. - - :param iterable: an iterable object with a :meth:`__next__` method. - :param start_byte: byte from which read will start. - :param byte_range: how many bytes to read. - """ - - def __init__(self, iterable, start_byte=0, byte_range=None): - self.iterable = iter(iterable) - self.byte_range = byte_range - self.start_byte = start_byte - self.end_byte = None - if byte_range is not None: - self.end_byte = self.start_byte + self.byte_range - self.read_length = 0 - self.seekable = hasattr(iterable, "seekable") and iterable.seekable() - self.end_reached = False - - def __iter__(self): - return self - - def _next_chunk(self): - try: - chunk = next(self.iterable) - self.read_length += len(chunk) - return chunk - except StopIteration: - self.end_reached = True - raise - - def _first_iteration(self): - chunk = None - if self.seekable: - self.iterable.seek(self.start_byte) - self.read_length = self.iterable.tell() - contextual_read_length = self.read_length - else: - while self.read_length <= self.start_byte: - chunk = self._next_chunk() - if chunk is not None: - chunk = chunk[self.start_byte - self.read_length :] - contextual_read_length = self.start_byte - return chunk, contextual_read_length - - def _next(self): - if self.end_reached: - raise StopIteration() - chunk = None - contextual_read_length = self.read_length - if self.read_length == 0: - chunk, contextual_read_length = self._first_iteration() - if chunk is None: - chunk = self._next_chunk() - if self.end_byte is not None and self.read_length >= self.end_byte: - self.end_reached = True - return chunk[: self.end_byte - contextual_read_length] - return chunk - - def __next__(self): - chunk = self._next() - if chunk: - return chunk - self.end_reached = True - raise StopIteration() - - def close(self): - if hasattr(self.iterable, "close"): - self.iterable.close() - - -def _make_chunk_iter(stream, limit, buffer_size): - """Helper for the line and chunk iter functions.""" - if isinstance(stream, (bytes, bytearray, text_type)): - raise TypeError( - "Passed a string or byte object instead of true iterator or stream." - ) - if not hasattr(stream, "read"): - for item in stream: - if item: - yield item - return - if not isinstance(stream, LimitedStream) and limit is not None: - stream = LimitedStream(stream, limit) - _read = stream.read - while 1: - item = _read(buffer_size) - if not item: - break - yield item - - -def make_line_iter(stream, limit=None, buffer_size=10 * 1024, cap_at_buffer=False): - """Safely iterates line-based over an input stream. If the input stream - is not a :class:`LimitedStream` the `limit` parameter is mandatory. - - This uses the stream's :meth:`~file.read` method internally as opposite - to the :meth:`~file.readline` method that is unsafe and can only be used - in violation of the WSGI specification. The same problem applies to the - `__iter__` function of the input stream which calls :meth:`~file.readline` - without arguments. - - If you need line-by-line processing it's strongly recommended to iterate - over the input stream using this helper function. - - .. versionchanged:: 0.8 - This function now ensures that the limit was reached. - - .. versionadded:: 0.9 - added support for iterators as input stream. - - .. versionadded:: 0.11.10 - added support for the `cap_at_buffer` parameter. - - :param stream: the stream or iterate to iterate over. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is a :class:`LimitedStream`. - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, "") - if not first_item: - return - - s = make_literal_wrapper(first_item) - empty = s("") - cr = s("\r") - lf = s("\n") - crlf = s("\r\n") - - _iter = chain((first_item,), _iter) - - def _iter_basic_lines(): - _join = empty.join - buffer = [] - while 1: - new_data = next(_iter, "") - if not new_data: - break - new_buf = [] - buf_size = 0 - for item in chain(buffer, new_data.splitlines(True)): - new_buf.append(item) - buf_size += len(item) - if item and item[-1:] in crlf: - yield _join(new_buf) - new_buf = [] - elif cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buffer = new_buf - if buffer: - yield _join(buffer) - - # This hackery is necessary to merge 'foo\r' and '\n' into one item - # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. - previous = empty - for item in _iter_basic_lines(): - if item == lf and previous[-1:] == cr: - previous += item - item = empty - if previous: - yield previous - previous = item - if previous: - yield previous - - -def make_chunk_iter( - stream, separator, limit=None, buffer_size=10 * 1024, cap_at_buffer=False -): - """Works like :func:`make_line_iter` but accepts a separator - which divides chunks. If you want newline based processing - you should use :func:`make_line_iter` instead as it - supports arbitrary newline markers. - - .. versionadded:: 0.8 - - .. versionadded:: 0.9 - added support for iterators as input stream. - - .. versionadded:: 0.11.10 - added support for the `cap_at_buffer` parameter. - - :param stream: the stream or iterate to iterate over. - :param separator: the separator that divides chunks. - :param limit: the limit in bytes for the stream. (Usually - content length. Not necessary if the `stream` - is otherwise already limited). - :param buffer_size: The optional buffer size. - :param cap_at_buffer: if this is set chunks are split if they are longer - than the buffer size. Internally this is implemented - that the buffer size might be exhausted by a factor - of two however. - """ - _iter = _make_chunk_iter(stream, limit, buffer_size) - - first_item = next(_iter, "") - if not first_item: - return - - _iter = chain((first_item,), _iter) - if isinstance(first_item, text_type): - separator = to_unicode(separator) - _split = re.compile(r"(%s)" % re.escape(separator)).split - _join = u"".join - else: - separator = to_bytes(separator) - _split = re.compile(b"(" + re.escape(separator) + b")").split - _join = b"".join - - buffer = [] - while 1: - new_data = next(_iter, "") - if not new_data: - break - chunks = _split(new_data) - new_buf = [] - buf_size = 0 - for item in chain(buffer, chunks): - if item == separator: - yield _join(new_buf) - new_buf = [] - buf_size = 0 - else: - buf_size += len(item) - new_buf.append(item) - - if cap_at_buffer and buf_size >= buffer_size: - rv = _join(new_buf) - while len(rv) >= buffer_size: - yield rv[:buffer_size] - rv = rv[buffer_size:] - new_buf = [rv] - buf_size = len(rv) - - buffer = new_buf - if buffer: - yield _join(buffer) - - -@implements_iterator -class LimitedStream(io.IOBase): - """Wraps a stream so that it doesn't read more than n bytes. If the - stream is exhausted and the caller tries to get more bytes from it - :func:`on_exhausted` is called which by default returns an empty - string. The return value of that function is forwarded - to the reader function. So if it returns an empty string - :meth:`read` will return an empty string as well. - - The limit however must never be higher than what the stream can - output. Otherwise :meth:`readlines` will try to read past the - limit. - - .. admonition:: Note on WSGI compliance - - calls to :meth:`readline` and :meth:`readlines` are not - WSGI compliant because it passes a size argument to the - readline methods. Unfortunately the WSGI PEP is not safely - implementable without a size argument to :meth:`readline` - because there is no EOF marker in the stream. As a result - of that the use of :meth:`readline` is discouraged. - - For the same reason iterating over the :class:`LimitedStream` - is not portable. It internally calls :meth:`readline`. - - We strongly suggest using :meth:`read` only or using the - :func:`make_line_iter` which safely iterates line-based - over a WSGI input stream. - - :param stream: the stream to wrap. - :param limit: the limit for the stream, must not be longer than - what the string can provide if the stream does not - end with `EOF` (like `wsgi.input`) - """ - - def __init__(self, stream, limit): - self._read = stream.read - self._readline = stream.readline - self._pos = 0 - self.limit = limit - - def __iter__(self): - return self - - @property - def is_exhausted(self): - """If the stream is exhausted this attribute is `True`.""" - return self._pos >= self.limit - - def on_exhausted(self): - """This is called when the stream tries to read past the limit. - The return value of this function is returned from the reading - function. - """ - # Read null bytes from the stream so that we get the - # correct end of stream marker. - return self._read(0) - - def on_disconnect(self): - """What should happen if a disconnect is detected? The return - value of this function is returned from read functions in case - the client went away. By default a - :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised. - """ - from .exceptions import ClientDisconnected - - raise ClientDisconnected() - - def exhaust(self, chunk_size=1024 * 64): - """Exhaust the stream. This consumes all the data left until the - limit is reached. - - :param chunk_size: the size for a chunk. It will read the chunk - until the stream is exhausted and throw away - the results. - """ - to_read = self.limit - self._pos - chunk = chunk_size - while to_read > 0: - chunk = min(to_read, chunk) - self.read(chunk) - to_read -= chunk - - def read(self, size=None): - """Read `size` bytes or if size is not provided everything is read. - - :param size: the number of bytes read. - """ - if self._pos >= self.limit: - return self.on_exhausted() - if size is None or size == -1: # -1 is for consistence with file - size = self.limit - to_read = min(self.limit - self._pos, size) - try: - read = self._read(to_read) - except (IOError, ValueError): - return self.on_disconnect() - if to_read and len(read) != to_read: - return self.on_disconnect() - self._pos += len(read) - return read - - def readline(self, size=None): - """Reads one line from the stream.""" - if self._pos >= self.limit: - return self.on_exhausted() - if size is None: - size = self.limit - self._pos - else: - size = min(size, self.limit - self._pos) - try: - line = self._readline(size) - except (ValueError, IOError): - return self.on_disconnect() - if size and not line: - return self.on_disconnect() - self._pos += len(line) - return line - - def readlines(self, size=None): - """Reads a file into a list of strings. It calls :meth:`readline` - until the file is read to the end. It does support the optional - `size` argument if the underlaying stream supports it for - `readline`. - """ - last_pos = self._pos - result = [] - if size is not None: - end = min(self.limit, last_pos + size) - else: - end = self.limit - while 1: - if size is not None: - size -= last_pos - self._pos - if self._pos >= end: - break - result.append(self.readline(size)) - if size is not None: - last_pos = self._pos - return result - - def tell(self): - """Returns the position of the stream. - - .. versionadded:: 0.9 - """ - return self._pos - - def __next__(self): - line = self.readline() - if not line: - raise StopIteration() - return line - - def readable(self): - return True - - -# DEPRECATED -from .middleware.dispatcher import DispatcherMiddleware as _DispatcherMiddleware -from .middleware.http_proxy import ProxyMiddleware as _ProxyMiddleware -from .middleware.shared_data import SharedDataMiddleware as _SharedDataMiddleware - - -class ProxyMiddleware(_ProxyMiddleware): - """ - .. deprecated:: 0.15 - ``werkzeug.wsgi.ProxyMiddleware`` has moved to - :mod:`werkzeug.middleware.http_proxy`. This import will be - removed in 1.0. - """ - - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.wsgi.ProxyMiddleware' has moved to 'werkzeug" - ".middleware.http_proxy.ProxyMiddleware'. This import is" - " deprecated as of version 0.15 and will be removed in" - " version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(ProxyMiddleware, self).__init__(*args, **kwargs) - - -class SharedDataMiddleware(_SharedDataMiddleware): - """ - .. deprecated:: 0.15 - ``werkzeug.wsgi.SharedDataMiddleware`` has moved to - :mod:`werkzeug.middleware.shared_data`. This import will be - removed in 1.0. - """ - - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.wsgi.SharedDataMiddleware' has moved to" - " 'werkzeug.middleware.shared_data.SharedDataMiddleware'." - " This import is deprecated as of version 0.15 and will be" - " removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(SharedDataMiddleware, self).__init__(*args, **kwargs) - - -class DispatcherMiddleware(_DispatcherMiddleware): - """ - .. deprecated:: 0.15 - ``werkzeug.wsgi.DispatcherMiddleware`` has moved to - :mod:`werkzeug.middleware.dispatcher`. This import will be - removed in 1.0. - """ - - def __init__(self, *args, **kwargs): - warnings.warn( - "'werkzeug.wsgi.DispatcherMiddleware' has moved to" - " 'werkzeug.middleware.dispatcher.DispatcherMiddleware'." - " This import is deprecated as of version 0.15 and will be" - " removed in version 1.0.", - DeprecationWarning, - stacklevel=2, - ) - super(DispatcherMiddleware, self).__init__(*args, **kwargs) diff --git a/venv/lib/python3.6/site-packages/yaml/__init__.py b/venv/lib/python3.6/site-packages/yaml/__init__.py deleted file mode 100644 index 3f499d3..0000000 --- a/venv/lib/python3.6/site-packages/yaml/__init__.py +++ /dev/null @@ -1,402 +0,0 @@ - -from .error import * - -from .tokens import * -from .events import * -from .nodes import * - -from .loader import * -from .dumper import * - -__version__ = '5.1.2' -try: - from .cyaml import * - __with_libyaml__ = True -except ImportError: - __with_libyaml__ = False - -import io - -#------------------------------------------------------------------------------ -# Warnings control -#------------------------------------------------------------------------------ - -# 'Global' warnings state: -_warnings_enabled = { - 'YAMLLoadWarning': True, -} - -# Get or set global warnings' state -def warnings(settings=None): - if settings is None: - return _warnings_enabled - - if type(settings) is dict: - for key in settings: - if key in _warnings_enabled: - _warnings_enabled[key] = settings[key] - -# Warn when load() is called without Loader=... -class YAMLLoadWarning(RuntimeWarning): - pass - -def load_warning(method): - if _warnings_enabled['YAMLLoadWarning'] is False: - return - - import warnings - - message = ( - "calling yaml.%s() without Loader=... is deprecated, as the " - "default Loader is unsafe. Please read " - "https://msg.pyyaml.org/load for full details." - ) % method - - warnings.warn(message, YAMLLoadWarning, stacklevel=3) - -#------------------------------------------------------------------------------ -def scan(stream, Loader=Loader): - """ - Scan a YAML stream and produce scanning tokens. - """ - loader = Loader(stream) - try: - while loader.check_token(): - yield loader.get_token() - finally: - loader.dispose() - -def parse(stream, Loader=Loader): - """ - Parse a YAML stream and produce parsing events. - """ - loader = Loader(stream) - try: - while loader.check_event(): - yield loader.get_event() - finally: - loader.dispose() - -def compose(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding representation tree. - """ - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() - -def compose_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding representation trees. - """ - loader = Loader(stream) - try: - while loader.check_node(): - yield loader.get_node() - finally: - loader.dispose() - -def load(stream, Loader=None): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - """ - if Loader is None: - load_warning('load') - Loader = FullLoader - - loader = Loader(stream) - try: - return loader.get_single_data() - finally: - loader.dispose() - -def load_all(stream, Loader=None): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - """ - if Loader is None: - load_warning('load_all') - Loader = FullLoader - - loader = Loader(stream) - try: - while loader.check_data(): - yield loader.get_data() - finally: - loader.dispose() - -def full_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - - Resolve all tags except those known to be - unsafe on untrusted input. - """ - return load(stream, FullLoader) - -def full_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - - Resolve all tags except those known to be - unsafe on untrusted input. - """ - return load_all(stream, FullLoader) - -def safe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - - Resolve only basic YAML tags. This is known - to be safe for untrusted input. - """ - return load(stream, SafeLoader) - -def safe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - - Resolve only basic YAML tags. This is known - to be safe for untrusted input. - """ - return load_all(stream, SafeLoader) - -def unsafe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - - Resolve all tags, even those known to be - unsafe on untrusted input. - """ - return load(stream, UnsafeLoader) - -def unsafe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - - Resolve all tags, even those known to be - unsafe on untrusted input. - """ - return load_all(stream, UnsafeLoader) - -def emit(events, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - """ - Emit YAML parsing events into a stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - stream = io.StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - try: - for event in events: - dumper.emit(event) - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize_all(nodes, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of representation trees into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for node in nodes: - dumper.serialize(node) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize(node, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a representation tree into a YAML stream. - If stream is None, return the produced string instead. - """ - return serialize_all([node], stream, Dumper=Dumper, **kwds) - -def dump_all(documents, stream=None, Dumper=Dumper, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - """ - Serialize a sequence of Python objects into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys) - try: - dumper.open() - for data in documents: - dumper.represent(data) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def dump(data, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a Python object into a YAML stream. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=Dumper, **kwds) - -def safe_dump_all(documents, stream=None, **kwds): - """ - Serialize a sequence of Python objects into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all(documents, stream, Dumper=SafeDumper, **kwds) - -def safe_dump(data, stream=None, **kwds): - """ - Serialize a Python object into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=SafeDumper, **kwds) - -def add_implicit_resolver(tag, regexp, first=None, - Loader=Loader, Dumper=Dumper): - """ - Add an implicit scalar detector. - If an implicit scalar value matches the given regexp, - the corresponding tag is assigned to the scalar. - first is a sequence of possible initial characters or None. - """ - Loader.add_implicit_resolver(tag, regexp, first) - Dumper.add_implicit_resolver(tag, regexp, first) - -def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): - """ - Add a path based resolver for the given tag. - A path is a list of keys that forms a path - to a node in the representation tree. - Keys can be string values, integers, or None. - """ - Loader.add_path_resolver(tag, path, kind) - Dumper.add_path_resolver(tag, path, kind) - -def add_constructor(tag, constructor, Loader=Loader): - """ - Add a constructor for the given tag. - Constructor is a function that accepts a Loader instance - and a node object and produces the corresponding Python object. - """ - Loader.add_constructor(tag, constructor) - -def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): - """ - Add a multi-constructor for the given tag prefix. - Multi-constructor is called for a node if its tag starts with tag_prefix. - Multi-constructor accepts a Loader instance, a tag suffix, - and a node object and produces the corresponding Python object. - """ - Loader.add_multi_constructor(tag_prefix, multi_constructor) - -def add_representer(data_type, representer, Dumper=Dumper): - """ - Add a representer for the given type. - Representer is a function accepting a Dumper instance - and an instance of the given data type - and producing the corresponding representation node. - """ - Dumper.add_representer(data_type, representer) - -def add_multi_representer(data_type, multi_representer, Dumper=Dumper): - """ - Add a representer for the given type. - Multi-representer is a function accepting a Dumper instance - and an instance of the given data type or subtype - and producing the corresponding representation node. - """ - Dumper.add_multi_representer(data_type, multi_representer) - -class YAMLObjectMetaclass(type): - """ - The metaclass for YAMLObject. - """ - def __init__(cls, name, bases, kwds): - super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) - if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: - cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) - cls.yaml_dumper.add_representer(cls, cls.to_yaml) - -class YAMLObject(metaclass=YAMLObjectMetaclass): - """ - An object that can dump itself to a YAML stream - and load itself from a YAML stream. - """ - - __slots__ = () # no direct instantiation, so allow immutable subclasses - - yaml_loader = Loader - yaml_dumper = Dumper - - yaml_tag = None - yaml_flow_style = None - - @classmethod - def from_yaml(cls, loader, node): - """ - Convert a representation node to a Python object. - """ - return loader.construct_yaml_object(node, cls) - - @classmethod - def to_yaml(cls, dumper, data): - """ - Convert a Python object to a representation node. - """ - return dumper.represent_yaml_object(cls.yaml_tag, data, cls, - flow_style=cls.yaml_flow_style) - diff --git a/venv/lib/python3.6/site-packages/yaml/composer.py b/venv/lib/python3.6/site-packages/yaml/composer.py deleted file mode 100644 index 6d15cb4..0000000 --- a/venv/lib/python3.6/site-packages/yaml/composer.py +++ /dev/null @@ -1,139 +0,0 @@ - -__all__ = ['Composer', 'ComposerError'] - -from .error import MarkedYAMLError -from .events import * -from .nodes import * - -class ComposerError(MarkedYAMLError): - pass - -class Composer: - - def __init__(self): - self.anchors = {} - - def check_node(self): - # Drop the STREAM-START event. - if self.check_event(StreamStartEvent): - self.get_event() - - # If there are more documents available? - return not self.check_event(StreamEndEvent) - - def get_node(self): - # Get the root node of the next document. - if not self.check_event(StreamEndEvent): - return self.compose_document() - - def get_single_node(self): - # Drop the STREAM-START event. - self.get_event() - - # Compose a document if the stream is not empty. - document = None - if not self.check_event(StreamEndEvent): - document = self.compose_document() - - # Ensure that the stream contains no more documents. - if not self.check_event(StreamEndEvent): - event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) - - # Drop the STREAM-END event. - self.get_event() - - return document - - def compose_document(self): - # Drop the DOCUMENT-START event. - self.get_event() - - # Compose the root node. - node = self.compose_node(None, None) - - # Drop the DOCUMENT-END event. - self.get_event() - - self.anchors = {} - return node - - def compose_node(self, parent, index): - if self.check_event(AliasEvent): - event = self.get_event() - anchor = event.anchor - if anchor not in self.anchors: - raise ComposerError(None, None, "found undefined alias %r" - % anchor, event.start_mark) - return self.anchors[anchor] - event = self.peek_event() - anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: - raise ComposerError("found duplicate anchor %r; first occurrence" - % anchor, self.anchors[anchor].start_mark, - "second occurrence", event.start_mark) - self.descend_resolver(parent, index) - if self.check_event(ScalarEvent): - node = self.compose_scalar_node(anchor) - elif self.check_event(SequenceStartEvent): - node = self.compose_sequence_node(anchor) - elif self.check_event(MappingStartEvent): - node = self.compose_mapping_node(anchor) - self.ascend_resolver() - return node - - def compose_scalar_node(self, anchor): - event = self.get_event() - tag = event.tag - if tag is None or tag == '!': - tag = self.resolve(ScalarNode, event.value, event.implicit) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) - if anchor is not None: - self.anchors[anchor] = node - return node - - def compose_sequence_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - index = 0 - while not self.check_event(SequenceEndEvent): - node.value.append(self.compose_node(node, index)) - index += 1 - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - - def compose_mapping_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() - item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) - item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value - node.value.append((item_key, item_value)) - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - diff --git a/venv/lib/python3.6/site-packages/yaml/constructor.py b/venv/lib/python3.6/site-packages/yaml/constructor.py deleted file mode 100644 index 34fc1ae..0000000 --- a/venv/lib/python3.6/site-packages/yaml/constructor.py +++ /dev/null @@ -1,720 +0,0 @@ - -__all__ = [ - 'BaseConstructor', - 'SafeConstructor', - 'FullConstructor', - 'UnsafeConstructor', - 'Constructor', - 'ConstructorError' -] - -from .error import * -from .nodes import * - -import collections.abc, datetime, base64, binascii, re, sys, types - -class ConstructorError(MarkedYAMLError): - pass - -class BaseConstructor: - - yaml_constructors = {} - yaml_multi_constructors = {} - - def __init__(self): - self.constructed_objects = {} - self.recursive_objects = {} - self.state_generators = [] - self.deep_construct = False - - def check_data(self): - # If there are more documents available? - return self.check_node() - - def get_data(self): - # Construct and return the next document. - if self.check_node(): - return self.construct_document(self.get_node()) - - def get_single_data(self): - # Ensure that the stream contains a single document and construct it. - node = self.get_single_node() - if node is not None: - return self.construct_document(node) - return None - - def construct_document(self, node): - data = self.construct_object(node) - while self.state_generators: - state_generators = self.state_generators - self.state_generators = [] - for generator in state_generators: - for dummy in generator: - pass - self.constructed_objects = {} - self.recursive_objects = {} - self.deep_construct = False - return data - - def construct_object(self, node, deep=False): - if node in self.constructed_objects: - return self.constructed_objects[node] - if deep: - old_deep = self.deep_construct - self.deep_construct = True - if node in self.recursive_objects: - raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) - self.recursive_objects[node] = None - constructor = None - tag_suffix = None - if node.tag in self.yaml_constructors: - constructor = self.yaml_constructors[node.tag] - else: - for tag_prefix in self.yaml_multi_constructors: - if node.tag.startswith(tag_prefix): - tag_suffix = node.tag[len(tag_prefix):] - constructor = self.yaml_multi_constructors[tag_prefix] - break - else: - if None in self.yaml_multi_constructors: - tag_suffix = node.tag - constructor = self.yaml_multi_constructors[None] - elif None in self.yaml_constructors: - constructor = self.yaml_constructors[None] - elif isinstance(node, ScalarNode): - constructor = self.__class__.construct_scalar - elif isinstance(node, SequenceNode): - constructor = self.__class__.construct_sequence - elif isinstance(node, MappingNode): - constructor = self.__class__.construct_mapping - if tag_suffix is None: - data = constructor(self, node) - else: - data = constructor(self, tag_suffix, node) - if isinstance(data, types.GeneratorType): - generator = data - data = next(generator) - if self.deep_construct: - for dummy in generator: - pass - else: - self.state_generators.append(generator) - self.constructed_objects[node] = data - del self.recursive_objects[node] - if deep: - self.deep_construct = old_deep - return data - - def construct_scalar(self, node): - if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) - return node.value - - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] - - def construct_mapping(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - mapping = {} - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - if not isinstance(key, collections.abc.Hashable): - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unhashable key", key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - mapping[key] = value - return mapping - - def construct_pairs(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - pairs = [] - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - value = self.construct_object(value_node, deep=deep) - pairs.append((key, value)) - return pairs - - @classmethod - def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: - cls.yaml_constructors = cls.yaml_constructors.copy() - cls.yaml_constructors[tag] = constructor - - @classmethod - def add_multi_constructor(cls, tag_prefix, multi_constructor): - if not 'yaml_multi_constructors' in cls.__dict__: - cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() - cls.yaml_multi_constructors[tag_prefix] = multi_constructor - -class SafeConstructor(BaseConstructor): - - def construct_scalar(self, node): - if isinstance(node, MappingNode): - for key_node, value_node in node.value: - if key_node.tag == 'tag:yaml.org,2002:value': - return self.construct_scalar(value_node) - return super().construct_scalar(node) - - def flatten_mapping(self, node): - merge = [] - index = 0 - while index < len(node.value): - key_node, value_node = node.value[index] - if key_node.tag == 'tag:yaml.org,2002:merge': - del node.value[index] - if isinstance(value_node, MappingNode): - self.flatten_mapping(value_node) - merge.extend(value_node.value) - elif isinstance(value_node, SequenceNode): - submerge = [] - for subnode in value_node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) - self.flatten_mapping(subnode) - submerge.append(subnode.value) - submerge.reverse() - for value in submerge: - merge.extend(value) - else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) - elif key_node.tag == 'tag:yaml.org,2002:value': - key_node.tag = 'tag:yaml.org,2002:str' - index += 1 - else: - index += 1 - if merge: - node.value = merge + node.value - - def construct_mapping(self, node, deep=False): - if isinstance(node, MappingNode): - self.flatten_mapping(node) - return super().construct_mapping(node, deep=deep) - - def construct_yaml_null(self, node): - self.construct_scalar(node) - return None - - bool_values = { - 'yes': True, - 'no': False, - 'true': True, - 'false': False, - 'on': True, - 'off': False, - } - - def construct_yaml_bool(self, node): - value = self.construct_scalar(node) - return self.bool_values[value.lower()] - - def construct_yaml_int(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '') - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '0': - return 0 - elif value.startswith('0b'): - return sign*int(value[2:], 2) - elif value.startswith('0x'): - return sign*int(value[2:], 16) - elif value[0] == '0': - return sign*int(value, 8) - elif ':' in value: - digits = [int(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*int(value) - - inf_value = 1e300 - while inf_value != inf_value*inf_value: - inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). - - def construct_yaml_float(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '').lower() - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '.inf': - return sign*self.inf_value - elif value == '.nan': - return self.nan_value - elif ':' in value: - digits = [float(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0.0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*float(value) - - def construct_yaml_binary(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - timestamp_regexp = re.compile( - r'''^(?P[0-9][0-9][0-9][0-9]) - -(?P[0-9][0-9]?) - -(?P[0-9][0-9]?) - (?:(?:[Tt]|[ \t]+) - (?P[0-9][0-9]?) - :(?P[0-9][0-9]) - :(?P[0-9][0-9]) - (?:\.(?P[0-9]*))? - (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) - (?::(?P[0-9][0-9]))?))?)?$''', re.X) - - def construct_yaml_timestamp(self, node): - value = self.construct_scalar(node) - match = self.timestamp_regexp.match(node.value) - values = match.groupdict() - year = int(values['year']) - month = int(values['month']) - day = int(values['day']) - if not values['hour']: - return datetime.date(year, month, day) - hour = int(values['hour']) - minute = int(values['minute']) - second = int(values['second']) - fraction = 0 - if values['fraction']: - fraction = values['fraction'][:6] - while len(fraction) < 6: - fraction += '0' - fraction = int(fraction) - delta = None - if values['tz_sign']: - tz_hour = int(values['tz_hour']) - tz_minute = int(values['tz_minute'] or 0) - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) - if values['tz_sign'] == '-': - delta = -delta - data = datetime.datetime(year, month, day, hour, minute, second, fraction) - if delta: - data -= delta - return data - - def construct_yaml_omap(self, node): - # Note: we do not check for duplicate keys, because it's too - # CPU-expensive. - omap = [] - yield omap - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - omap.append((key, value)) - - def construct_yaml_pairs(self, node): - # Note: the same code as `construct_yaml_omap`. - pairs = [] - yield pairs - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - pairs.append((key, value)) - - def construct_yaml_set(self, node): - data = set() - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_str(self, node): - return self.construct_scalar(node) - - def construct_yaml_seq(self, node): - data = [] - yield data - data.extend(self.construct_sequence(node)) - - def construct_yaml_map(self, node): - data = {} - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_object(self, node, cls): - data = cls.__new__(cls) - yield data - if hasattr(data, '__setstate__'): - state = self.construct_mapping(node, deep=True) - data.__setstate__(state) - else: - state = self.construct_mapping(node) - data.__dict__.update(state) - - def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag, - node.start_mark) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) - -SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) - -class FullConstructor(SafeConstructor): - - def construct_python_str(self, node): - return self.construct_scalar(node) - - def construct_python_unicode(self, node): - return self.construct_scalar(node) - - def construct_python_bytes(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - def construct_python_long(self, node): - return self.construct_yaml_int(node) - - def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) - - def construct_python_tuple(self, node): - return tuple(self.construct_sequence(node)) - - def find_python_module(self, name, mark, unsafe=False): - if not name: - raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) - if unsafe: - try: - __import__(name) - except ImportError as exc: - raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name, exc), mark) - if not name in sys.modules: - raise ConstructorError("while constructing a Python module", mark, - "module %r is not imported" % name, mark) - return sys.modules[name] - - def find_python_name(self, name, mark, unsafe=False): - if not name: - raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) - if '.' in name: - module_name, object_name = name.rsplit('.', 1) - else: - module_name = 'builtins' - object_name = name - if unsafe: - try: - __import__(module_name) - except ImportError as exc: - raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name, exc), mark) - if not module_name in sys.modules: - raise ConstructorError("while constructing a Python object", mark, - "module %r is not imported" % module_name, mark) - module = sys.modules[module_name] - if not hasattr(module, object_name): - raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" - % (object_name, module.__name__), mark) - return getattr(module, object_name) - - def construct_python_name(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_name(suffix, node.start_mark) - - def construct_python_module(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_module(suffix, node.start_mark) - - def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False, unsafe=False): - if not args: - args = [] - if not kwds: - kwds = {} - cls = self.find_python_name(suffix, node.start_mark) - if not (unsafe or isinstance(cls, type)): - raise ConstructorError("while constructing a Python instance", node.start_mark, - "expected a class, but found %r" % type(cls), - node.start_mark) - if newobj and isinstance(cls, type): - return cls.__new__(cls, *args, **kwds) - else: - return cls(*args, **kwds) - - def set_python_instance_state(self, instance, state): - if hasattr(instance, '__setstate__'): - instance.__setstate__(state) - else: - slotstate = {} - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if hasattr(instance, '__dict__'): - instance.__dict__.update(state) - elif state: - slotstate.update(state) - for key, value in slotstate.items(): - setattr(object, key, value) - - def construct_python_object(self, suffix, node): - # Format: - # !!python/object:module.name { ... state ... } - instance = self.make_python_instance(suffix, node, newobj=True) - yield instance - deep = hasattr(instance, '__setstate__') - state = self.construct_mapping(node, deep=deep) - self.set_python_instance_state(instance, state) - - def construct_python_object_apply(self, suffix, node, newobj=False): - # Format: - # !!python/object/apply # (or !!python/object/new) - # args: [ ... arguments ... ] - # kwds: { ... keywords ... } - # state: ... state ... - # listitems: [ ... listitems ... ] - # dictitems: { ... dictitems ... } - # or short format: - # !!python/object/apply [ ... arguments ... ] - # The difference between !!python/object/apply and !!python/object/new - # is how an object is created, check make_python_instance for details. - if isinstance(node, SequenceNode): - args = self.construct_sequence(node, deep=True) - kwds = {} - state = {} - listitems = [] - dictitems = {} - else: - value = self.construct_mapping(node, deep=True) - args = value.get('args', []) - kwds = value.get('kwds', {}) - state = value.get('state', {}) - listitems = value.get('listitems', []) - dictitems = value.get('dictitems', {}) - instance = self.make_python_instance(suffix, node, args, kwds, newobj) - if state: - self.set_python_instance_state(instance, state) - if listitems: - instance.extend(listitems) - if dictitems: - for key in dictitems: - instance[key] = dictitems[key] - return instance - - def construct_python_object_new(self, suffix, node): - return self.construct_python_object_apply(suffix, node, newobj=True) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/none', - FullConstructor.construct_yaml_null) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/bool', - FullConstructor.construct_yaml_bool) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/str', - FullConstructor.construct_python_str) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/unicode', - FullConstructor.construct_python_unicode) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/bytes', - FullConstructor.construct_python_bytes) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/int', - FullConstructor.construct_yaml_int) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/long', - FullConstructor.construct_python_long) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/float', - FullConstructor.construct_yaml_float) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/complex', - FullConstructor.construct_python_complex) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/list', - FullConstructor.construct_yaml_seq) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/tuple', - FullConstructor.construct_python_tuple) - -FullConstructor.add_constructor( - 'tag:yaml.org,2002:python/dict', - FullConstructor.construct_yaml_map) - -FullConstructor.add_multi_constructor( - 'tag:yaml.org,2002:python/name:', - FullConstructor.construct_python_name) - -FullConstructor.add_multi_constructor( - 'tag:yaml.org,2002:python/module:', - FullConstructor.construct_python_module) - -FullConstructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object:', - FullConstructor.construct_python_object) - -FullConstructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/apply:', - FullConstructor.construct_python_object_apply) - -FullConstructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/new:', - FullConstructor.construct_python_object_new) - -class UnsafeConstructor(FullConstructor): - - def find_python_module(self, name, mark): - return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True) - - def find_python_name(self, name, mark): - return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True) - - def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False): - return super(UnsafeConstructor, self).make_python_instance( - suffix, node, args, kwds, newobj, unsafe=True) - -# Constructor is same as UnsafeConstructor. Need to leave this in place in case -# people have extended it directly. -class Constructor(UnsafeConstructor): - pass diff --git a/venv/lib/python3.6/site-packages/yaml/cyaml.py b/venv/lib/python3.6/site-packages/yaml/cyaml.py deleted file mode 100644 index 1e606c7..0000000 --- a/venv/lib/python3.6/site-packages/yaml/cyaml.py +++ /dev/null @@ -1,101 +0,0 @@ - -__all__ = [ - 'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader', - 'CBaseDumper', 'CSafeDumper', 'CDumper' -] - -from _yaml import CParser, CEmitter - -from .constructor import * - -from .serializer import * -from .representer import * - -from .resolver import * - -class CBaseLoader(CParser, BaseConstructor, BaseResolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class CSafeLoader(CParser, SafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class CFullLoader(CParser, FullConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - FullConstructor.__init__(self) - Resolver.__init__(self) - -class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - UnsafeConstructor.__init__(self) - Resolver.__init__(self) - -class CLoader(CParser, Constructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - Constructor.__init__(self) - Resolver.__init__(self) - -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - -class CSafeDumper(CEmitter, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - -class CDumper(CEmitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - diff --git a/venv/lib/python3.6/site-packages/yaml/dumper.py b/venv/lib/python3.6/site-packages/yaml/dumper.py deleted file mode 100644 index 6aadba5..0000000 --- a/venv/lib/python3.6/site-packages/yaml/dumper.py +++ /dev/null @@ -1,62 +0,0 @@ - -__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] - -from .emitter import * -from .serializer import * -from .representer import * -from .resolver import * - -class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - -class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - -class Dumper(Emitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=False, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None, sort_keys=True): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style, sort_keys=sort_keys) - Resolver.__init__(self) - diff --git a/venv/lib/python3.6/site-packages/yaml/emitter.py b/venv/lib/python3.6/site-packages/yaml/emitter.py deleted file mode 100644 index a664d01..0000000 --- a/venv/lib/python3.6/site-packages/yaml/emitter.py +++ /dev/null @@ -1,1137 +0,0 @@ - -# Emitter expects events obeying the following grammar: -# stream ::= STREAM-START document* STREAM-END -# document ::= DOCUMENT-START node DOCUMENT-END -# node ::= SCALAR | sequence | mapping -# sequence ::= SEQUENCE-START node* SEQUENCE-END -# mapping ::= MAPPING-START (node node)* MAPPING-END - -__all__ = ['Emitter', 'EmitterError'] - -from .error import YAMLError -from .events import * - -class EmitterError(YAMLError): - pass - -class ScalarAnalysis: - def __init__(self, scalar, empty, multiline, - allow_flow_plain, allow_block_plain, - allow_single_quoted, allow_double_quoted, - allow_block): - self.scalar = scalar - self.empty = empty - self.multiline = multiline - self.allow_flow_plain = allow_flow_plain - self.allow_block_plain = allow_block_plain - self.allow_single_quoted = allow_single_quoted - self.allow_double_quoted = allow_double_quoted - self.allow_block = allow_block - -class Emitter: - - DEFAULT_TAG_PREFIXES = { - '!' : '!', - 'tag:yaml.org,2002:' : '!!', - } - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - - # The stream should have the methods `write` and possibly `flush`. - self.stream = stream - - # Encoding can be overridden by STREAM-START. - self.encoding = None - - # Emitter is a state machine with a stack of states to handle nested - # structures. - self.states = [] - self.state = self.expect_stream_start - - # Current event and the event queue. - self.events = [] - self.event = None - - # The current indentation level and the stack of previous indents. - self.indents = [] - self.indent = None - - # Flow level. - self.flow_level = 0 - - # Contexts. - self.root_context = False - self.sequence_context = False - self.mapping_context = False - self.simple_key_context = False - - # Characteristics of the last emitted character: - # - current position. - # - is it a whitespace? - # - is it an indention character - # (indentation space, '-', '?', or ':')? - self.line = 0 - self.column = 0 - self.whitespace = True - self.indention = True - - # Whether the document requires an explicit document indicator - self.open_ended = False - - # Formatting details. - self.canonical = canonical - self.allow_unicode = allow_unicode - self.best_indent = 2 - if indent and 1 < indent < 10: - self.best_indent = indent - self.best_width = 80 - if width and width > self.best_indent*2: - self.best_width = width - self.best_line_break = '\n' - if line_break in ['\r', '\n', '\r\n']: - self.best_line_break = line_break - - # Tag prefixes. - self.tag_prefixes = None - - # Prepared anchor and tag. - self.prepared_anchor = None - self.prepared_tag = None - - # Scalar analysis and style. - self.analysis = None - self.style = None - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def emit(self, event): - self.events.append(event) - while not self.need_more_events(): - self.event = self.events.pop(0) - self.state() - self.event = None - - # In some cases, we wait for a few next events before emitting. - - def need_more_events(self): - if not self.events: - return True - event = self.events[0] - if isinstance(event, DocumentStartEvent): - return self.need_events(1) - elif isinstance(event, SequenceStartEvent): - return self.need_events(2) - elif isinstance(event, MappingStartEvent): - return self.need_events(3) - else: - return False - - def need_events(self, count): - level = 0 - for event in self.events[1:]: - if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): - level += 1 - elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): - level -= 1 - elif isinstance(event, StreamEndEvent): - level = -1 - if level < 0: - return False - return (len(self.events) < count+1) - - def increase_indent(self, flow=False, indentless=False): - self.indents.append(self.indent) - if self.indent is None: - if flow: - self.indent = self.best_indent - else: - self.indent = 0 - elif not indentless: - self.indent += self.best_indent - - # States. - - # Stream handlers. - - def expect_stream_start(self): - if isinstance(self.event, StreamStartEvent): - if self.event.encoding and not hasattr(self.stream, 'encoding'): - self.encoding = self.event.encoding - self.write_stream_start() - self.state = self.expect_first_document_start - else: - raise EmitterError("expected StreamStartEvent, but got %s" - % self.event) - - def expect_nothing(self): - raise EmitterError("expected nothing, but got %s" % self.event) - - # Document handlers. - - def expect_first_document_start(self): - return self.expect_document_start(first=True) - - def expect_document_start(self, first=False): - if isinstance(self.event, DocumentStartEvent): - if (self.event.version or self.event.tags) and self.open_ended: - self.write_indicator('...', True) - self.write_indent() - if self.event.version: - version_text = self.prepare_version(self.event.version) - self.write_version_directive(version_text) - self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() - if self.event.tags: - handles = sorted(self.event.tags.keys()) - for handle in handles: - prefix = self.event.tags[handle] - self.tag_prefixes[prefix] = handle - handle_text = self.prepare_tag_handle(handle) - prefix_text = self.prepare_tag_prefix(prefix) - self.write_tag_directive(handle_text, prefix_text) - implicit = (first and not self.event.explicit and not self.canonical - and not self.event.version and not self.event.tags - and not self.check_empty_document()) - if not implicit: - self.write_indent() - self.write_indicator('---', True) - if self.canonical: - self.write_indent() - self.state = self.expect_document_root - elif isinstance(self.event, StreamEndEvent): - if self.open_ended: - self.write_indicator('...', True) - self.write_indent() - self.write_stream_end() - self.state = self.expect_nothing - else: - raise EmitterError("expected DocumentStartEvent, but got %s" - % self.event) - - def expect_document_end(self): - if isinstance(self.event, DocumentEndEvent): - self.write_indent() - if self.event.explicit: - self.write_indicator('...', True) - self.write_indent() - self.flush_stream() - self.state = self.expect_document_start - else: - raise EmitterError("expected DocumentEndEvent, but got %s" - % self.event) - - def expect_document_root(self): - self.states.append(self.expect_document_end) - self.expect_node(root=True) - - # Node handlers. - - def expect_node(self, root=False, sequence=False, mapping=False, - simple_key=False): - self.root_context = root - self.sequence_context = sequence - self.mapping_context = mapping - self.simple_key_context = simple_key - if isinstance(self.event, AliasEvent): - self.expect_alias() - elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): - self.process_anchor('&') - self.process_tag() - if isinstance(self.event, ScalarEvent): - self.expect_scalar() - elif isinstance(self.event, SequenceStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_sequence(): - self.expect_flow_sequence() - else: - self.expect_block_sequence() - elif isinstance(self.event, MappingStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_mapping(): - self.expect_flow_mapping() - else: - self.expect_block_mapping() - else: - raise EmitterError("expected NodeEvent, but got %s" % self.event) - - def expect_alias(self): - if self.event.anchor is None: - raise EmitterError("anchor is not specified for alias") - self.process_anchor('*') - self.state = self.states.pop() - - def expect_scalar(self): - self.increase_indent(flow=True) - self.process_scalar() - self.indent = self.indents.pop() - self.state = self.states.pop() - - # Flow sequence handlers. - - def expect_flow_sequence(self): - self.write_indicator('[', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_sequence_item - - def expect_first_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(']', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - def expect_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator(']', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - # Flow mapping handlers. - - def expect_flow_mapping(self): - self.write_indicator('{', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_mapping_key - - def expect_first_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator('}', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator('}', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - def expect_flow_mapping_value(self): - if self.canonical or self.column > self.best_width: - self.write_indent() - self.write_indicator(':', True) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - # Block sequence handlers. - - def expect_block_sequence(self): - indentless = (self.mapping_context and not self.indention) - self.increase_indent(flow=False, indentless=indentless) - self.state = self.expect_first_block_sequence_item - - def expect_first_block_sequence_item(self): - return self.expect_block_sequence_item(first=True) - - def expect_block_sequence_item(self, first=False): - if not first and isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - self.write_indicator('-', True, indention=True) - self.states.append(self.expect_block_sequence_item) - self.expect_node(sequence=True) - - # Block mapping handlers. - - def expect_block_mapping(self): - self.increase_indent(flow=False) - self.state = self.expect_first_block_mapping_key - - def expect_first_block_mapping_key(self): - return self.expect_block_mapping_key(first=True) - - def expect_block_mapping_key(self, first=False): - if not first and isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - if self.check_simple_key(): - self.states.append(self.expect_block_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True, indention=True) - self.states.append(self.expect_block_mapping_value) - self.expect_node(mapping=True) - - def expect_block_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - def expect_block_mapping_value(self): - self.write_indent() - self.write_indicator(':', True, indention=True) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - # Checkers. - - def check_empty_sequence(self): - return (isinstance(self.event, SequenceStartEvent) and self.events - and isinstance(self.events[0], SequenceEndEvent)) - - def check_empty_mapping(self): - return (isinstance(self.event, MappingStartEvent) and self.events - and isinstance(self.events[0], MappingEndEvent)) - - def check_empty_document(self): - if not isinstance(self.event, DocumentStartEvent) or not self.events: - return False - event = self.events[0] - return (isinstance(event, ScalarEvent) and event.anchor is None - and event.tag is None and event.implicit and event.value == '') - - def check_simple_key(self): - length = 0 - if isinstance(self.event, NodeEvent) and self.event.anchor is not None: - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - length += len(self.prepared_anchor) - if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ - and self.event.tag is not None: - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(self.event.tag) - length += len(self.prepared_tag) - if isinstance(self.event, ScalarEvent): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - length += len(self.analysis.scalar) - return (length < 128 and (isinstance(self.event, AliasEvent) - or (isinstance(self.event, ScalarEvent) - and not self.analysis.empty and not self.analysis.multiline) - or self.check_empty_sequence() or self.check_empty_mapping())) - - # Anchor, Tag, and Scalar processors. - - def process_anchor(self, indicator): - if self.event.anchor is None: - self.prepared_anchor = None - return - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - if self.prepared_anchor: - self.write_indicator(indicator+self.prepared_anchor, True) - self.prepared_anchor = None - - def process_tag(self): - tag = self.event.tag - if isinstance(self.event, ScalarEvent): - if self.style is None: - self.style = self.choose_scalar_style() - if ((not self.canonical or tag is None) and - ((self.style == '' and self.event.implicit[0]) - or (self.style != '' and self.event.implicit[1]))): - self.prepared_tag = None - return - if self.event.implicit[0] and tag is None: - tag = '!' - self.prepared_tag = None - else: - if (not self.canonical or tag is None) and self.event.implicit: - self.prepared_tag = None - return - if tag is None: - raise EmitterError("tag is not specified") - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(tag) - if self.prepared_tag: - self.write_indicator(self.prepared_tag, True) - self.prepared_tag = None - - def choose_scalar_style(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.event.style == '"' or self.canonical: - return '"' - if not self.event.style and self.event.implicit[0]: - if (not (self.simple_key_context and - (self.analysis.empty or self.analysis.multiline)) - and (self.flow_level and self.analysis.allow_flow_plain - or (not self.flow_level and self.analysis.allow_block_plain))): - return '' - if self.event.style and self.event.style in '|>': - if (not self.flow_level and not self.simple_key_context - and self.analysis.allow_block): - return self.event.style - if not self.event.style or self.event.style == '\'': - if (self.analysis.allow_single_quoted and - not (self.simple_key_context and self.analysis.multiline)): - return '\'' - return '"' - - def process_scalar(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.style is None: - self.style = self.choose_scalar_style() - split = (not self.simple_key_context) - #if self.analysis.multiline and split \ - # and (not self.style or self.style in '\'\"'): - # self.write_indent() - if self.style == '"': - self.write_double_quoted(self.analysis.scalar, split) - elif self.style == '\'': - self.write_single_quoted(self.analysis.scalar, split) - elif self.style == '>': - self.write_folded(self.analysis.scalar) - elif self.style == '|': - self.write_literal(self.analysis.scalar) - else: - self.write_plain(self.analysis.scalar, split) - self.analysis = None - self.style = None - - # Analyzers. - - def prepare_version(self, version): - major, minor = version - if major != 1: - raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) - return '%d.%d' % (major, minor) - - def prepare_tag_handle(self, handle): - if not handle: - raise EmitterError("tag handle must not be empty") - if handle[0] != '!' or handle[-1] != '!': - raise EmitterError("tag handle must start and end with '!': %r" % handle) - for ch in handle[1:-1]: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the tag handle: %r" - % (ch, handle)) - return handle - - def prepare_tag_prefix(self, prefix): - if not prefix: - raise EmitterError("tag prefix must not be empty") - chunks = [] - start = end = 0 - if prefix[0] == '!': - end = 1 - while end < len(prefix): - ch = prefix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?!:@&=+$,_.~*\'()[]': - end += 1 - else: - if start < end: - chunks.append(prefix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ord(ch)) - if start < end: - chunks.append(prefix[start:end]) - return ''.join(chunks) - - def prepare_tag(self, tag): - if not tag: - raise EmitterError("tag must not be empty") - if tag == '!': - return tag - handle = None - suffix = tag - prefixes = sorted(self.tag_prefixes.keys()) - for prefix in prefixes: - if tag.startswith(prefix) \ - and (prefix == '!' or len(prefix) < len(tag)): - handle = self.tag_prefixes[prefix] - suffix = tag[len(prefix):] - chunks = [] - start = end = 0 - while end < len(suffix): - ch = suffix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.~*\'()[]' \ - or (ch == '!' and handle != '!'): - end += 1 - else: - if start < end: - chunks.append(suffix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ch) - if start < end: - chunks.append(suffix[start:end]) - suffix_text = ''.join(chunks) - if handle: - return '%s%s' % (handle, suffix_text) - else: - return '!<%s>' % suffix_text - - def prepare_anchor(self, anchor): - if not anchor: - raise EmitterError("anchor must not be empty") - for ch in anchor: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the anchor: %r" - % (ch, anchor)) - return anchor - - def analyze_scalar(self, scalar): - - # Empty scalar is a special case. - if not scalar: - return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, - allow_flow_plain=False, allow_block_plain=True, - allow_single_quoted=True, allow_double_quoted=True, - allow_block=False) - - # Indicators and special characters. - block_indicators = False - flow_indicators = False - line_breaks = False - special_characters = False - - # Important whitespace combinations. - leading_space = False - leading_break = False - trailing_space = False - trailing_break = False - break_space = False - space_break = False - - # Check document indicators. - if scalar.startswith('---') or scalar.startswith('...'): - block_indicators = True - flow_indicators = True - - # First character or preceded by a whitespace. - preceded_by_whitespace = True - - # Last character or followed by a whitespace. - followed_by_whitespace = (len(scalar) == 1 or - scalar[1] in '\0 \t\r\n\x85\u2028\u2029') - - # The previous character is a space. - previous_space = False - - # The previous character is a break. - previous_break = False - - index = 0 - while index < len(scalar): - ch = scalar[index] - - # Check for indicators. - if index == 0: - # Leading indicators are special characters. - if ch in '#,[]{}&*!|>\'\"%@`': - flow_indicators = True - block_indicators = True - if ch in '?:': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '-' and followed_by_whitespace: - flow_indicators = True - block_indicators = True - else: - # Some indicators cannot appear within a scalar as well. - if ch in ',?[]{}': - flow_indicators = True - if ch == ':': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '#' and preceded_by_whitespace: - flow_indicators = True - block_indicators = True - - # Check for line breaks, special, and unicode characters. - if ch in '\n\x85\u2028\u2029': - line_breaks = True - if not (ch == '\n' or '\x20' <= ch <= '\x7E'): - if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD' - or '\U00010000' <= ch < '\U0010ffff') and ch != '\uFEFF': - unicode_characters = True - if not self.allow_unicode: - special_characters = True - else: - special_characters = True - - # Detect important whitespace combinations. - if ch == ' ': - if index == 0: - leading_space = True - if index == len(scalar)-1: - trailing_space = True - if previous_break: - break_space = True - previous_space = True - previous_break = False - elif ch in '\n\x85\u2028\u2029': - if index == 0: - leading_break = True - if index == len(scalar)-1: - trailing_break = True - if previous_space: - space_break = True - previous_space = False - previous_break = True - else: - previous_space = False - previous_break = False - - # Prepare for the next character. - index += 1 - preceded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') - followed_by_whitespace = (index+1 >= len(scalar) or - scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') - - # Let's decide what styles are allowed. - allow_flow_plain = True - allow_block_plain = True - allow_single_quoted = True - allow_double_quoted = True - allow_block = True - - # Leading and trailing whitespaces are bad for plain scalars. - if (leading_space or leading_break - or trailing_space or trailing_break): - allow_flow_plain = allow_block_plain = False - - # We do not permit trailing spaces for block scalars. - if trailing_space: - allow_block = False - - # Spaces at the beginning of a new line are only acceptable for block - # scalars. - if break_space: - allow_flow_plain = allow_block_plain = allow_single_quoted = False - - # Spaces followed by breaks, as well as special character are only - # allowed for double quoted scalars. - if space_break or special_characters: - allow_flow_plain = allow_block_plain = \ - allow_single_quoted = allow_block = False - - # Although the plain scalar writer supports breaks, we never emit - # multiline plain scalars. - if line_breaks: - allow_flow_plain = allow_block_plain = False - - # Flow indicators are forbidden for flow plain scalars. - if flow_indicators: - allow_flow_plain = False - - # Block indicators are forbidden for block plain scalars. - if block_indicators: - allow_block_plain = False - - return ScalarAnalysis(scalar=scalar, - empty=False, multiline=line_breaks, - allow_flow_plain=allow_flow_plain, - allow_block_plain=allow_block_plain, - allow_single_quoted=allow_single_quoted, - allow_double_quoted=allow_double_quoted, - allow_block=allow_block) - - # Writers. - - def flush_stream(self): - if hasattr(self.stream, 'flush'): - self.stream.flush() - - def write_stream_start(self): - # Write BOM if needed. - if self.encoding and self.encoding.startswith('utf-16'): - self.stream.write('\uFEFF'.encode(self.encoding)) - - def write_stream_end(self): - self.flush_stream() - - def write_indicator(self, indicator, need_whitespace, - whitespace=False, indention=False): - if self.whitespace or not need_whitespace: - data = indicator - else: - data = ' '+indicator - self.whitespace = whitespace - self.indention = self.indention and indention - self.column += len(data) - self.open_ended = False - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_indent(self): - indent = self.indent or 0 - if not self.indention or self.column > indent \ - or (self.column == indent and not self.whitespace): - self.write_line_break() - if self.column < indent: - self.whitespace = True - data = ' '*(indent-self.column) - self.column = indent - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_line_break(self, data=None): - if data is None: - data = self.best_line_break - self.whitespace = True - self.indention = True - self.line += 1 - self.column = 0 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_version_directive(self, version_text): - data = '%%YAML %s' % version_text - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - def write_tag_directive(self, handle_text, prefix_text): - data = '%%TAG %s %s' % (handle_text, prefix_text) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - # Scalar streams. - - def write_single_quoted(self, text, split=True): - self.write_indicator('\'', True) - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch is None or ch != ' ': - if start+1 == end and self.column > self.best_width and split \ - and start != 0 and end != len(text): - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch == '\'': - data = '\'\'' - self.column += 2 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end + 1 - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - self.write_indicator('\'', False) - - ESCAPE_REPLACEMENTS = { - '\0': '0', - '\x07': 'a', - '\x08': 'b', - '\x09': 't', - '\x0A': 'n', - '\x0B': 'v', - '\x0C': 'f', - '\x0D': 'r', - '\x1B': 'e', - '\"': '\"', - '\\': '\\', - '\x85': 'N', - '\xA0': '_', - '\u2028': 'L', - '\u2029': 'P', - } - - def write_double_quoted(self, text, split=True): - self.write_indicator('"', True) - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ - or not ('\x20' <= ch <= '\x7E' - or (self.allow_unicode - and ('\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD'))): - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - if ch in self.ESCAPE_REPLACEMENTS: - data = '\\'+self.ESCAPE_REPLACEMENTS[ch] - elif ch <= '\xFF': - data = '\\x%02X' % ord(ch) - elif ch <= '\uFFFF': - data = '\\u%04X' % ord(ch) - else: - data = '\\U%08X' % ord(ch) - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end+1 - if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ - and self.column+(end-start) > self.best_width and split: - data = text[start:end]+'\\' - if start < end: - start = end - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_indent() - self.whitespace = False - self.indention = False - if text[start] == ' ': - data = '\\' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - end += 1 - self.write_indicator('"', False) - - def determine_block_hints(self, text): - hints = '' - if text: - if text[0] in ' \n\x85\u2028\u2029': - hints += str(self.best_indent) - if text[-1] not in '\n\x85\u2028\u2029': - hints += '-' - elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': - hints += '+' - return hints - - def write_folded(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('>'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - leading_space = True - spaces = False - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if not leading_space and ch is not None and ch != ' ' \ - and text[start] == '\n': - self.write_line_break() - leading_space = (ch == ' ') - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - elif spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width: - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - spaces = (ch == ' ') - end += 1 - - def write_literal(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('|'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - else: - if ch is None or ch in '\n\x85\u2028\u2029': - data = text[start:end] - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - - def write_plain(self, text, split=True): - if self.root_context: - self.open_ended = True - if not text: - return - if not self.whitespace: - data = ' ' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.whitespace = False - self.indention = False - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width and split: - self.write_indent() - self.whitespace = False - self.indention = False - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - self.whitespace = False - self.indention = False - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 diff --git a/venv/lib/python3.6/site-packages/yaml/error.py b/venv/lib/python3.6/site-packages/yaml/error.py deleted file mode 100644 index b796b4d..0000000 --- a/venv/lib/python3.6/site-packages/yaml/error.py +++ /dev/null @@ -1,75 +0,0 @@ - -__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] - -class Mark: - - def __init__(self, name, index, line, column, buffer, pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self, indent=4, max_length=75): - if self.buffer is None: - return None - head = '' - start = self.pointer - while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': - start -= 1 - if self.pointer-start > max_length/2-1: - head = ' ... ' - start += 5 - break - tail = '' - end = self.pointer - while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': - end += 1 - if end-self.pointer > max_length/2-1: - tail = ' ... ' - end -= 5 - break - snippet = self.buffer[start:end] - return ' '*indent + head + snippet + tail + '\n' \ - + ' '*(indent+self.pointer-start+len(head)) + '^' - - def __str__(self): - snippet = self.get_snippet() - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - if snippet is not None: - where += ":\n"+snippet - return where - -class YAMLError(Exception): - pass - -class MarkedYAMLError(YAMLError): - - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - self.context = context - self.context_mark = context_mark - self.problem = problem - self.problem_mark = problem_mark - self.note = note - - def __str__(self): - lines = [] - if self.context is not None: - lines.append(self.context) - if self.context_mark is not None \ - and (self.problem is None or self.problem_mark is None - or self.context_mark.name != self.problem_mark.name - or self.context_mark.line != self.problem_mark.line - or self.context_mark.column != self.problem_mark.column): - lines.append(str(self.context_mark)) - if self.problem is not None: - lines.append(self.problem) - if self.problem_mark is not None: - lines.append(str(self.problem_mark)) - if self.note is not None: - lines.append(self.note) - return '\n'.join(lines) - diff --git a/venv/lib/python3.6/site-packages/yaml/events.py b/venv/lib/python3.6/site-packages/yaml/events.py deleted file mode 100644 index f79ad38..0000000 --- a/venv/lib/python3.6/site-packages/yaml/events.py +++ /dev/null @@ -1,86 +0,0 @@ - -# Abstract classes. - -class Event(object): - def __init__(self, start_mark=None, end_mark=None): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -class NodeEvent(Event): - def __init__(self, anchor, start_mark=None, end_mark=None): - self.anchor = anchor - self.start_mark = start_mark - self.end_mark = end_mark - -class CollectionStartEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, - flow_style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class CollectionEndEvent(Event): - pass - -# Implementations. - -class StreamStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndEvent(Event): - pass - -class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - self.version = version - self.tags = tags - -class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - -class AliasEvent(NodeEvent): - pass - -class ScalarEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, value, - start_mark=None, end_mark=None, style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class SequenceStartEvent(CollectionStartEvent): - pass - -class SequenceEndEvent(CollectionEndEvent): - pass - -class MappingStartEvent(CollectionStartEvent): - pass - -class MappingEndEvent(CollectionEndEvent): - pass - diff --git a/venv/lib/python3.6/site-packages/yaml/loader.py b/venv/lib/python3.6/site-packages/yaml/loader.py deleted file mode 100644 index 414cb2c..0000000 --- a/venv/lib/python3.6/site-packages/yaml/loader.py +++ /dev/null @@ -1,63 +0,0 @@ - -__all__ = ['BaseLoader', 'FullLoader', 'SafeLoader', 'Loader', 'UnsafeLoader'] - -from .reader import * -from .scanner import * -from .parser import * -from .composer import * -from .constructor import * -from .resolver import * - -class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - FullConstructor.__init__(self) - Resolver.__init__(self) - -class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) - -# UnsafeLoader is the same as Loader (which is and was always unsafe on -# untrusted input). Use of either Loader or UnsafeLoader should be rare, since -# FullLoad should be able to load almost all YAML safely. Loader is left intact -# to ensure backwards compatability. -class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) diff --git a/venv/lib/python3.6/site-packages/yaml/nodes.py b/venv/lib/python3.6/site-packages/yaml/nodes.py deleted file mode 100644 index c4f070c..0000000 --- a/venv/lib/python3.6/site-packages/yaml/nodes.py +++ /dev/null @@ -1,49 +0,0 @@ - -class Node(object): - def __init__(self, tag, value, start_mark, end_mark): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) - value = repr(value) - return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) - -class ScalarNode(Node): - id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class SequenceNode(CollectionNode): - id = 'sequence' - -class MappingNode(CollectionNode): - id = 'mapping' - diff --git a/venv/lib/python3.6/site-packages/yaml/parser.py b/venv/lib/python3.6/site-packages/yaml/parser.py deleted file mode 100644 index 13a5995..0000000 --- a/venv/lib/python3.6/site-packages/yaml/parser.py +++ /dev/null @@ -1,589 +0,0 @@ - -# The following YAML grammar is LL(1) and is parsed by a recursive descent -# parser. -# -# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -# implicit_document ::= block_node DOCUMENT-END* -# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -# block_node_or_indentless_sequence ::= -# ALIAS -# | properties (block_content | indentless_block_sequence)? -# | block_content -# | indentless_block_sequence -# block_node ::= ALIAS -# | properties block_content? -# | block_content -# flow_node ::= ALIAS -# | properties flow_content? -# | flow_content -# properties ::= TAG ANCHOR? | ANCHOR TAG? -# block_content ::= block_collection | flow_collection | SCALAR -# flow_content ::= flow_collection | SCALAR -# block_collection ::= block_sequence | block_mapping -# flow_collection ::= flow_sequence | flow_mapping -# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -# block_mapping ::= BLOCK-MAPPING_START -# ((KEY block_node_or_indentless_sequence?)? -# (VALUE block_node_or_indentless_sequence?)?)* -# BLOCK-END -# flow_sequence ::= FLOW-SEQUENCE-START -# (flow_sequence_entry FLOW-ENTRY)* -# flow_sequence_entry? -# FLOW-SEQUENCE-END -# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# flow_mapping ::= FLOW-MAPPING-START -# (flow_mapping_entry FLOW-ENTRY)* -# flow_mapping_entry? -# FLOW-MAPPING-END -# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# -# FIRST sets: -# -# stream: { STREAM-START } -# explicit_document: { DIRECTIVE DOCUMENT-START } -# implicit_document: FIRST(block_node) -# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_sequence: { BLOCK-SEQUENCE-START } -# block_mapping: { BLOCK-MAPPING-START } -# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } -# indentless_sequence: { ENTRY } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_sequence: { FLOW-SEQUENCE-START } -# flow_mapping: { FLOW-MAPPING-START } -# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } -# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } - -__all__ = ['Parser', 'ParserError'] - -from .error import MarkedYAMLError -from .tokens import * -from .events import * -from .scanner import * - -class ParserError(MarkedYAMLError): - pass - -class Parser: - # Since writing a recursive-descendant parser is a straightforward task, we - # do not give many comments here. - - DEFAULT_TAGS = { - '!': '!', - '!!': 'tag:yaml.org,2002:', - } - - def __init__(self): - self.current_event = None - self.yaml_version = None - self.tag_handles = {} - self.states = [] - self.marks = [] - self.state = self.parse_stream_start - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def check_event(self, *choices): - # Check the type of the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - if self.current_event is not None: - if not choices: - return True - for choice in choices: - if isinstance(self.current_event, choice): - return True - return False - - def peek_event(self): - # Get the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - return self.current_event - - def get_event(self): - # Get the next event and proceed further. - if self.current_event is None: - if self.state: - self.current_event = self.state() - value = self.current_event - self.current_event = None - return value - - # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END - # implicit_document ::= block_node DOCUMENT-END* - # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* - - def parse_stream_start(self): - - # Parse the stream start. - token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) - - # Prepare the next state. - self.state = self.parse_implicit_document_start - - return event - - def parse_implicit_document_start(self): - - # Parse an implicit document. - if not self.check_token(DirectiveToken, DocumentStartToken, - StreamEndToken): - self.tag_handles = self.DEFAULT_TAGS - token = self.peek_token() - start_mark = end_mark = token.start_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=False) - - # Prepare the next state. - self.states.append(self.parse_document_end) - self.state = self.parse_block_node - - return event - - else: - return self.parse_document_start() - - def parse_document_start(self): - - # Parse any extra document end indicators. - while self.check_token(DocumentEndToken): - self.get_token() - - # Parse an explicit document. - if not self.check_token(StreamEndToken): - token = self.peek_token() - start_mark = token.start_mark - version, tags = self.process_directives() - if not self.check_token(DocumentStartToken): - raise ParserError(None, None, - "expected '', but found %r" - % self.peek_token().id, - self.peek_token().start_mark) - token = self.get_token() - end_mark = token.end_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=True, version=version, tags=tags) - self.states.append(self.parse_document_end) - self.state = self.parse_document_content - else: - # Parse the end of the stream. - token = self.get_token() - event = StreamEndEvent(token.start_mark, token.end_mark) - assert not self.states - assert not self.marks - self.state = None - return event - - def parse_document_end(self): - - # Parse the document end. - token = self.peek_token() - start_mark = end_mark = token.start_mark - explicit = False - if self.check_token(DocumentEndToken): - token = self.get_token() - end_mark = token.end_mark - explicit = True - event = DocumentEndEvent(start_mark, end_mark, - explicit=explicit) - - # Prepare the next state. - self.state = self.parse_document_start - - return event - - def parse_document_content(self): - if self.check_token(DirectiveToken, - DocumentStartToken, DocumentEndToken, StreamEndToken): - event = self.process_empty_scalar(self.peek_token().start_mark) - self.state = self.states.pop() - return event - else: - return self.parse_block_node() - - def process_directives(self): - self.yaml_version = None - self.tag_handles = {} - while self.check_token(DirectiveToken): - token = self.get_token() - if token.name == 'YAML': - if self.yaml_version is not None: - raise ParserError(None, None, - "found duplicate YAML directive", token.start_mark) - major, minor = token.value - if major != 1: - raise ParserError(None, None, - "found incompatible YAML document (version 1.* is required)", - token.start_mark) - self.yaml_version = token.value - elif token.name == 'TAG': - handle, prefix = token.value - if handle in self.tag_handles: - raise ParserError(None, None, - "duplicate tag handle %r" % handle, - token.start_mark) - self.tag_handles[handle] = prefix - if self.tag_handles: - value = self.yaml_version, self.tag_handles.copy() - else: - value = self.yaml_version, None - for key in self.DEFAULT_TAGS: - if key not in self.tag_handles: - self.tag_handles[key] = self.DEFAULT_TAGS[key] - return value - - # block_node_or_indentless_sequence ::= ALIAS - # | properties (block_content | indentless_block_sequence)? - # | block_content - # | indentless_block_sequence - # block_node ::= ALIAS - # | properties block_content? - # | block_content - # flow_node ::= ALIAS - # | properties flow_content? - # | flow_content - # properties ::= TAG ANCHOR? | ANCHOR TAG? - # block_content ::= block_collection | flow_collection | SCALAR - # flow_content ::= flow_collection | SCALAR - # block_collection ::= block_sequence | block_mapping - # flow_collection ::= flow_sequence | flow_mapping - - def parse_block_node(self): - return self.parse_node(block=True) - - def parse_flow_node(self): - return self.parse_node() - - def parse_block_node_or_indentless_sequence(self): - return self.parse_node(block=True, indentless_sequence=True) - - def parse_node(self, block=False, indentless_sequence=False): - if self.check_token(AliasToken): - token = self.get_token() - event = AliasEvent(token.value, token.start_mark, token.end_mark) - self.state = self.states.pop() - else: - anchor = None - tag = None - start_mark = end_mark = tag_mark = None - if self.check_token(AnchorToken): - token = self.get_token() - start_mark = token.start_mark - end_mark = token.end_mark - anchor = token.value - if self.check_token(TagToken): - token = self.get_token() - tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - elif self.check_token(TagToken): - token = self.get_token() - start_mark = tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - if self.check_token(AnchorToken): - token = self.get_token() - end_mark = token.end_mark - anchor = token.value - if tag is not None: - handle, suffix = tag - if handle is not None: - if handle not in self.tag_handles: - raise ParserError("while parsing a node", start_mark, - "found undefined tag handle %r" % handle, - tag_mark) - tag = self.tag_handles[handle]+suffix - else: - tag = suffix - #if tag == '!': - # raise ParserError("while parsing a node", start_mark, - # "found non-specific tag '!'", tag_mark, - # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") - if start_mark is None: - start_mark = end_mark = self.peek_token().start_mark - event = None - implicit = (tag is None or tag == '!') - if indentless_sequence and self.check_token(BlockEntryToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark) - self.state = self.parse_indentless_sequence_entry - else: - if self.check_token(ScalarToken): - token = self.get_token() - end_mark = token.end_mark - if (token.plain and tag is None) or tag == '!': - implicit = (True, False) - elif tag is None: - implicit = (False, True) - else: - implicit = (False, False) - event = ScalarEvent(anchor, tag, implicit, token.value, - start_mark, end_mark, style=token.style) - self.state = self.states.pop() - elif self.check_token(FlowSequenceStartToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_sequence_first_entry - elif self.check_token(FlowMappingStartToken): - end_mark = self.peek_token().end_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_mapping_first_key - elif block and self.check_token(BlockSequenceStartToken): - end_mark = self.peek_token().start_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_sequence_first_entry - elif block and self.check_token(BlockMappingStartToken): - end_mark = self.peek_token().start_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_mapping_first_key - elif anchor is not None or tag is not None: - # Empty scalars are allowed even if a tag or an anchor is - # specified. - event = ScalarEvent(anchor, tag, (implicit, False), '', - start_mark, end_mark) - self.state = self.states.pop() - else: - if block: - node = 'block' - else: - node = 'flow' - token = self.peek_token() - raise ParserError("while parsing a %s node" % node, start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) - return event - - # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END - - def parse_block_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_sequence_entry() - - def parse_block_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, BlockEndToken): - self.states.append(self.parse_block_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_block_sequence_entry - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block collection", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ - - def parse_indentless_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, - KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_indentless_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_indentless_sequence_entry - return self.process_empty_scalar(token.end_mark) - token = self.peek_token() - event = SequenceEndEvent(token.start_mark, token.start_mark) - self.state = self.states.pop() - return event - - # block_mapping ::= BLOCK-MAPPING_START - # ((KEY block_node_or_indentless_sequence?)? - # (VALUE block_node_or_indentless_sequence?)?)* - # BLOCK-END - - def parse_block_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_mapping_key() - - def parse_block_mapping_key(self): - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_value) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_value - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block mapping", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_block_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_key) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_block_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - # flow_sequence ::= FLOW-SEQUENCE-START - # (flow_sequence_entry FLOW-ENTRY)* - # flow_sequence_entry? - # FLOW-SEQUENCE-END - # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - # - # Note that while production rules for both flow_sequence_entry and - # flow_mapping_entry are equal, their interpretations are different. - # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` - # generate an inline mapping (set syntax). - - def parse_flow_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_sequence_entry(first=True) - - def parse_flow_sequence_entry(self, first=False): - if not self.check_token(FlowSequenceEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) - - if self.check_token(KeyToken): - token = self.peek_token() - event = MappingStartEvent(None, None, True, - token.start_mark, token.end_mark, - flow_style=True) - self.state = self.parse_flow_sequence_entry_mapping_key - return event - elif not self.check_token(FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry) - return self.parse_flow_node() - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_sequence_entry_mapping_key(self): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_value - return self.process_empty_scalar(token.end_mark) - - def parse_flow_sequence_entry_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_end) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_end - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_sequence_entry_mapping_end - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_sequence_entry_mapping_end(self): - self.state = self.parse_flow_sequence_entry - token = self.peek_token() - return MappingEndEvent(token.start_mark, token.start_mark) - - # flow_mapping ::= FLOW-MAPPING-START - # (flow_mapping_entry FLOW-ENTRY)* - # flow_mapping_entry? - # FLOW-MAPPING-END - # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - - def parse_flow_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_mapping_key(first=True) - - def parse_flow_mapping_key(self, first=False): - if not self.check_token(FlowMappingEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_value - return self.process_empty_scalar(token.end_mark) - elif not self.check_token(FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_empty_value) - return self.parse_flow_node() - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_key) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_mapping_empty_value(self): - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(self.peek_token().start_mark) - - def process_empty_scalar(self, mark): - return ScalarEvent(None, None, (True, False), '', mark, mark) - diff --git a/venv/lib/python3.6/site-packages/yaml/reader.py b/venv/lib/python3.6/site-packages/yaml/reader.py deleted file mode 100644 index 774b021..0000000 --- a/venv/lib/python3.6/site-packages/yaml/reader.py +++ /dev/null @@ -1,185 +0,0 @@ -# This module contains abstractions for the input stream. You don't have to -# looks further, there are no pretty code. -# -# We define two classes here. -# -# Mark(source, line, column) -# It's just a record and its only use is producing nice error messages. -# Parser does not use it for any other purposes. -# -# Reader(source, data) -# Reader determines the encoding of `data` and converts it to unicode. -# Reader provides the following methods and attributes: -# reader.peek(length=1) - return the next `length` characters -# reader.forward(length=1) - move the current position to `length` characters. -# reader.index - the number of the current character. -# reader.line, stream.column - the line and the column of the current character. - -__all__ = ['Reader', 'ReaderError'] - -from .error import YAMLError, Mark - -import codecs, re - -class ReaderError(YAMLError): - - def __init__(self, name, position, character, encoding, reason): - self.name = name - self.character = character - self.position = position - self.encoding = encoding - self.reason = reason - - def __str__(self): - if isinstance(self.character, bytes): - return "'%s' codec can't decode byte #x%02x: %s\n" \ - " in \"%s\", position %d" \ - % (self.encoding, ord(self.character), self.reason, - self.name, self.position) - else: - return "unacceptable character #x%04x: %s\n" \ - " in \"%s\", position %d" \ - % (self.character, self.reason, - self.name, self.position) - -class Reader(object): - # Reader: - # - determines the data encoding and converts it to a unicode string, - # - checks if characters are in allowed range, - # - adds '\0' to the end. - - # Reader accepts - # - a `bytes` object, - # - a `str` object, - # - a file-like object with its `read` method returning `str`, - # - a file-like object with its `read` method returning `unicode`. - - # Yeah, it's ugly and slow. - - def __init__(self, stream): - self.name = None - self.stream = None - self.stream_pointer = 0 - self.eof = True - self.buffer = '' - self.pointer = 0 - self.raw_buffer = None - self.raw_decode = None - self.encoding = None - self.index = 0 - self.line = 0 - self.column = 0 - if isinstance(stream, str): - self.name = "" - self.check_printable(stream) - self.buffer = stream+'\0' - elif isinstance(stream, bytes): - self.name = "" - self.raw_buffer = stream - self.determine_encoding() - else: - self.stream = stream - self.name = getattr(stream, 'name', "") - self.eof = False - self.raw_buffer = None - self.determine_encoding() - - def peek(self, index=0): - try: - return self.buffer[self.pointer+index] - except IndexError: - self.update(index+1) - return self.buffer[self.pointer+index] - - def prefix(self, length=1): - if self.pointer+length >= len(self.buffer): - self.update(length) - return self.buffer[self.pointer:self.pointer+length] - - def forward(self, length=1): - if self.pointer+length+1 >= len(self.buffer): - self.update(length+1) - while length: - ch = self.buffer[self.pointer] - self.pointer += 1 - self.index += 1 - if ch in '\n\x85\u2028\u2029' \ - or (ch == '\r' and self.buffer[self.pointer] != '\n'): - self.line += 1 - self.column = 0 - elif ch != '\uFEFF': - self.column += 1 - length -= 1 - - def get_mark(self): - if self.stream is None: - return Mark(self.name, self.index, self.line, self.column, - self.buffer, self.pointer) - else: - return Mark(self.name, self.index, self.line, self.column, - None, None) - - def determine_encoding(self): - while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): - self.update_raw() - if isinstance(self.raw_buffer, bytes): - if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): - self.raw_decode = codecs.utf_16_le_decode - self.encoding = 'utf-16-le' - elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): - self.raw_decode = codecs.utf_16_be_decode - self.encoding = 'utf-16-be' - else: - self.raw_decode = codecs.utf_8_decode - self.encoding = 'utf-8' - self.update(1) - - NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]') - def check_printable(self, data): - match = self.NON_PRINTABLE.search(data) - if match: - character = match.group() - position = self.index+(len(self.buffer)-self.pointer)+match.start() - raise ReaderError(self.name, position, ord(character), - 'unicode', "special characters are not allowed") - - def update(self, length): - if self.raw_buffer is None: - return - self.buffer = self.buffer[self.pointer:] - self.pointer = 0 - while len(self.buffer) < length: - if not self.eof: - self.update_raw() - if self.raw_decode is not None: - try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) - except UnicodeDecodeError as exc: - character = self.raw_buffer[exc.start] - if self.stream is not None: - position = self.stream_pointer-len(self.raw_buffer)+exc.start - else: - position = exc.start - raise ReaderError(self.name, position, character, - exc.encoding, exc.reason) - else: - data = self.raw_buffer - converted = len(data) - self.check_printable(data) - self.buffer += data - self.raw_buffer = self.raw_buffer[converted:] - if self.eof: - self.buffer += '\0' - self.raw_buffer = None - break - - def update_raw(self, size=4096): - data = self.stream.read(size) - if self.raw_buffer is None: - self.raw_buffer = data - else: - self.raw_buffer += data - self.stream_pointer += len(data) - if not data: - self.eof = True diff --git a/venv/lib/python3.6/site-packages/yaml/representer.py b/venv/lib/python3.6/site-packages/yaml/representer.py deleted file mode 100644 index dd14401..0000000 --- a/venv/lib/python3.6/site-packages/yaml/representer.py +++ /dev/null @@ -1,389 +0,0 @@ - -__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', - 'RepresenterError'] - -from .error import * -from .nodes import * - -import datetime, sys, copyreg, types, base64, collections - -class RepresenterError(YAMLError): - pass - -class BaseRepresenter: - - yaml_representers = {} - yaml_multi_representers = {} - - def __init__(self, default_style=None, default_flow_style=False, sort_keys=True): - self.default_style = default_style - self.sort_keys = sort_keys - self.default_flow_style = default_flow_style - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent(self, data): - node = self.represent_data(data) - self.serialize(node) - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent_data(self, data): - if self.ignore_aliases(data): - self.alias_key = None - else: - self.alias_key = id(data) - if self.alias_key is not None: - if self.alias_key in self.represented_objects: - node = self.represented_objects[self.alias_key] - #if node is None: - # raise RepresenterError("recursive objects are not allowed: %r" % data) - return node - #self.represented_objects[alias_key] = None - self.object_keeper.append(data) - data_types = type(data).__mro__ - if data_types[0] in self.yaml_representers: - node = self.yaml_representers[data_types[0]](self, data) - else: - for data_type in data_types: - if data_type in self.yaml_multi_representers: - node = self.yaml_multi_representers[data_type](self, data) - break - else: - if None in self.yaml_multi_representers: - node = self.yaml_multi_representers[None](self, data) - elif None in self.yaml_representers: - node = self.yaml_representers[None](self, data) - else: - node = ScalarNode(None, str(data)) - #if alias_key is not None: - # self.represented_objects[alias_key] = node - return node - - @classmethod - def add_representer(cls, data_type, representer): - if not 'yaml_representers' in cls.__dict__: - cls.yaml_representers = cls.yaml_representers.copy() - cls.yaml_representers[data_type] = representer - - @classmethod - def add_multi_representer(cls, data_type, representer): - if not 'yaml_multi_representers' in cls.__dict__: - cls.yaml_multi_representers = cls.yaml_multi_representers.copy() - cls.yaml_multi_representers[data_type] = representer - - def represent_scalar(self, tag, value, style=None): - if style is None: - style = self.default_style - node = ScalarNode(tag, value, style=style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - return node - - def represent_sequence(self, tag, sequence, flow_style=None): - value = [] - node = SequenceNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - for item in sequence: - node_item = self.represent_data(item) - if not (isinstance(node_item, ScalarNode) and not node_item.style): - best_style = False - value.append(node_item) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - mapping = list(mapping.items()) - if self.sort_keys: - try: - mapping = sorted(mapping) - except TypeError: - pass - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def ignore_aliases(self, data): - return False - -class SafeRepresenter(BaseRepresenter): - - def ignore_aliases(self, data): - if data is None: - return True - if isinstance(data, tuple) and data == (): - return True - if isinstance(data, (str, bytes, bool, int, float)): - return True - - def represent_none(self, data): - return self.represent_scalar('tag:yaml.org,2002:null', 'null') - - def represent_str(self, data): - return self.represent_scalar('tag:yaml.org,2002:str', data) - - def represent_binary(self, data): - if hasattr(base64, 'encodebytes'): - data = base64.encodebytes(data).decode('ascii') - else: - data = base64.encodestring(data).decode('ascii') - return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') - - def represent_bool(self, data): - if data: - value = 'true' - else: - value = 'false' - return self.represent_scalar('tag:yaml.org,2002:bool', value) - - def represent_int(self, data): - return self.represent_scalar('tag:yaml.org,2002:int', str(data)) - - inf_value = 1e300 - while repr(inf_value) != repr(inf_value*inf_value): - inf_value *= inf_value - - def represent_float(self, data): - if data != data or (data == 0.0 and data == 1.0): - value = '.nan' - elif data == self.inf_value: - value = '.inf' - elif data == -self.inf_value: - value = '-.inf' - else: - value = repr(data).lower() - # Note that in some cases `repr(data)` represents a float number - # without the decimal parts. For instance: - # >>> repr(1e17) - # '1e17' - # Unfortunately, this is not a valid float representation according - # to the definition of the `!!float` tag. We fix this by adding - # '.0' before the 'e' symbol. - if '.' not in value and 'e' in value: - value = value.replace('e', '.0e', 1) - return self.represent_scalar('tag:yaml.org,2002:float', value) - - def represent_list(self, data): - #pairs = (len(data) > 0 and isinstance(data, list)) - #if pairs: - # for item in data: - # if not isinstance(item, tuple) or len(item) != 2: - # pairs = False - # break - #if not pairs: - return self.represent_sequence('tag:yaml.org,2002:seq', data) - #value = [] - #for item_key, item_value in data: - # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', - # [(item_key, item_value)])) - #return SequenceNode(u'tag:yaml.org,2002:pairs', value) - - def represent_dict(self, data): - return self.represent_mapping('tag:yaml.org,2002:map', data) - - def represent_set(self, data): - value = {} - for key in data: - value[key] = None - return self.represent_mapping('tag:yaml.org,2002:set', value) - - def represent_date(self, data): - value = data.isoformat() - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_datetime(self, data): - value = data.isoformat(' ') - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_yaml_object(self, tag, data, cls, flow_style=None): - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__.copy() - return self.represent_mapping(tag, state, flow_style=flow_style) - - def represent_undefined(self, data): - raise RepresenterError("cannot represent an object", data) - -SafeRepresenter.add_representer(type(None), - SafeRepresenter.represent_none) - -SafeRepresenter.add_representer(str, - SafeRepresenter.represent_str) - -SafeRepresenter.add_representer(bytes, - SafeRepresenter.represent_binary) - -SafeRepresenter.add_representer(bool, - SafeRepresenter.represent_bool) - -SafeRepresenter.add_representer(int, - SafeRepresenter.represent_int) - -SafeRepresenter.add_representer(float, - SafeRepresenter.represent_float) - -SafeRepresenter.add_representer(list, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(tuple, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(dict, - SafeRepresenter.represent_dict) - -SafeRepresenter.add_representer(set, - SafeRepresenter.represent_set) - -SafeRepresenter.add_representer(datetime.date, - SafeRepresenter.represent_date) - -SafeRepresenter.add_representer(datetime.datetime, - SafeRepresenter.represent_datetime) - -SafeRepresenter.add_representer(None, - SafeRepresenter.represent_undefined) - -class Representer(SafeRepresenter): - - def represent_complex(self, data): - if data.imag == 0.0: - data = '%r' % data.real - elif data.real == 0.0: - data = '%rj' % data.imag - elif data.imag > 0: - data = '%r+%rj' % (data.real, data.imag) - else: - data = '%r%rj' % (data.real, data.imag) - return self.represent_scalar('tag:yaml.org,2002:python/complex', data) - - def represent_tuple(self, data): - return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) - - def represent_name(self, data): - name = '%s.%s' % (data.__module__, data.__name__) - return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') - - def represent_module(self, data): - return self.represent_scalar( - 'tag:yaml.org,2002:python/module:'+data.__name__, '') - - def represent_object(self, data): - # We use __reduce__ API to save the data. data.__reduce__ returns - # a tuple of length 2-5: - # (function, args, state, listitems, dictitems) - - # For reconstructing, we calls function(*args), then set its state, - # listitems, and dictitems if they are not None. - - # A special case is when function.__name__ == '__newobj__'. In this - # case we create the object with args[0].__new__(*args). - - # Another special case is when __reduce__ returns a string - we don't - # support it. - - # We produce a !!python/object, !!python/object/new or - # !!python/object/apply node. - - cls = type(data) - if cls in copyreg.dispatch_table: - reduce = copyreg.dispatch_table[cls](data) - elif hasattr(data, '__reduce_ex__'): - reduce = data.__reduce_ex__(2) - elif hasattr(data, '__reduce__'): - reduce = data.__reduce__() - else: - raise RepresenterError("cannot represent an object", data) - reduce = (list(reduce)+[None]*5)[:5] - function, args, state, listitems, dictitems = reduce - args = list(args) - if state is None: - state = {} - if listitems is not None: - listitems = list(listitems) - if dictitems is not None: - dictitems = dict(dictitems) - if function.__name__ == '__newobj__': - function = args[0] - args = args[1:] - tag = 'tag:yaml.org,2002:python/object/new:' - newobj = True - else: - tag = 'tag:yaml.org,2002:python/object/apply:' - newobj = False - function_name = '%s.%s' % (function.__module__, function.__name__) - if not args and not listitems and not dictitems \ - and isinstance(state, dict) and newobj: - return self.represent_mapping( - 'tag:yaml.org,2002:python/object:'+function_name, state) - if not listitems and not dictitems \ - and isinstance(state, dict) and not state: - return self.represent_sequence(tag+function_name, args) - value = {} - if args: - value['args'] = args - if state or not isinstance(state, dict): - value['state'] = state - if listitems: - value['listitems'] = listitems - if dictitems: - value['dictitems'] = dictitems - return self.represent_mapping(tag+function_name, value) - - def represent_ordered_dict(self, data): - # Provide uniform representation across different Python versions. - data_type = type(data) - tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \ - % (data_type.__module__, data_type.__name__) - items = [[key, value] for key, value in data.items()] - return self.represent_sequence(tag, [items]) - -Representer.add_representer(complex, - Representer.represent_complex) - -Representer.add_representer(tuple, - Representer.represent_tuple) - -Representer.add_representer(type, - Representer.represent_name) - -Representer.add_representer(collections.OrderedDict, - Representer.represent_ordered_dict) - -Representer.add_representer(types.FunctionType, - Representer.represent_name) - -Representer.add_representer(types.BuiltinFunctionType, - Representer.represent_name) - -Representer.add_representer(types.ModuleType, - Representer.represent_module) - -Representer.add_multi_representer(object, - Representer.represent_object) - diff --git a/venv/lib/python3.6/site-packages/yaml/resolver.py b/venv/lib/python3.6/site-packages/yaml/resolver.py deleted file mode 100644 index 02b82e7..0000000 --- a/venv/lib/python3.6/site-packages/yaml/resolver.py +++ /dev/null @@ -1,227 +0,0 @@ - -__all__ = ['BaseResolver', 'Resolver'] - -from .error import * -from .nodes import * - -import re - -class ResolverError(YAMLError): - pass - -class BaseResolver: - - DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' - DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' - DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' - - yaml_implicit_resolvers = {} - yaml_path_resolvers = {} - - def __init__(self): - self.resolver_exact_paths = [] - self.resolver_prefix_paths = [] - - @classmethod - def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: - implicit_resolvers = {} - for key in cls.yaml_implicit_resolvers: - implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:] - cls.yaml_implicit_resolvers = implicit_resolvers - if first is None: - first = [None] - for ch in first: - cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) - - @classmethod - def add_path_resolver(cls, tag, path, kind=None): - # Note: `add_path_resolver` is experimental. The API could be changed. - # `new_path` is a pattern that is matched against the path from the - # root to the node that is being considered. `node_path` elements are - # tuples `(node_check, index_check)`. `node_check` is a node class: - # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` - # matches any kind of a node. `index_check` could be `None`, a boolean - # value, a string value, or a number. `None` and `False` match against - # any _value_ of sequence and mapping nodes. `True` matches against - # any _key_ of a mapping node. A string `index_check` matches against - # a mapping value that corresponds to a scalar key which content is - # equal to the `index_check` value. An integer `index_check` matches - # against a sequence value with the index equal to `index_check`. - if not 'yaml_path_resolvers' in cls.__dict__: - cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() - new_path = [] - for element in path: - if isinstance(element, (list, tuple)): - if len(element) == 2: - node_check, index_check = element - elif len(element) == 1: - node_check = element[0] - index_check = True - else: - raise ResolverError("Invalid path element: %s" % element) - else: - node_check = None - index_check = element - if node_check is str: - node_check = ScalarNode - elif node_check is list: - node_check = SequenceNode - elif node_check is dict: - node_check = MappingNode - elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ - and not isinstance(node_check, str) \ - and node_check is not None: - raise ResolverError("Invalid node checker: %s" % node_check) - if not isinstance(index_check, (str, int)) \ - and index_check is not None: - raise ResolverError("Invalid index checker: %s" % index_check) - new_path.append((node_check, index_check)) - if kind is str: - kind = ScalarNode - elif kind is list: - kind = SequenceNode - elif kind is dict: - kind = MappingNode - elif kind not in [ScalarNode, SequenceNode, MappingNode] \ - and kind is not None: - raise ResolverError("Invalid node kind: %s" % kind) - cls.yaml_path_resolvers[tuple(new_path), kind] = tag - - def descend_resolver(self, current_node, current_index): - if not self.yaml_path_resolvers: - return - exact_paths = {} - prefix_paths = [] - if current_node: - depth = len(self.resolver_prefix_paths) - for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): - if len(path) > depth: - prefix_paths.append((path, kind)) - else: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - for path, kind in self.yaml_path_resolvers: - if not path: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - prefix_paths.append((path, kind)) - self.resolver_exact_paths.append(exact_paths) - self.resolver_prefix_paths.append(prefix_paths) - - def ascend_resolver(self): - if not self.yaml_path_resolvers: - return - self.resolver_exact_paths.pop() - self.resolver_prefix_paths.pop() - - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): - node_check, index_check = path[depth-1] - if isinstance(node_check, str): - if current_node.tag != node_check: - return - elif node_check is not None: - if not isinstance(current_node, node_check): - return - if index_check is True and current_index is not None: - return - if (index_check is False or index_check is None) \ - and current_index is None: - return - if isinstance(index_check, str): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): - return - elif isinstance(index_check, int) and not isinstance(index_check, bool): - if index_check != current_index: - return - return True - - def resolve(self, kind, value, implicit): - if kind is ScalarNode and implicit[0]: - if value == '': - resolvers = self.yaml_implicit_resolvers.get('', []) - else: - resolvers = self.yaml_implicit_resolvers.get(value[0], []) - resolvers += self.yaml_implicit_resolvers.get(None, []) - for tag, regexp in resolvers: - if regexp.match(value): - return tag - implicit = implicit[1] - if self.yaml_path_resolvers: - exact_paths = self.resolver_exact_paths[-1] - if kind in exact_paths: - return exact_paths[kind] - if None in exact_paths: - return exact_paths[None] - if kind is ScalarNode: - return self.DEFAULT_SCALAR_TAG - elif kind is SequenceNode: - return self.DEFAULT_SEQUENCE_TAG - elif kind is MappingNode: - return self.DEFAULT_MAPPING_TAG - -class Resolver(BaseResolver): - pass - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:bool', - re.compile(r'''^(?:yes|Yes|YES|no|No|NO - |true|True|TRUE|false|False|FALSE - |on|On|ON|off|Off|OFF)$''', re.X), - list('yYnNtTfFoO')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:float', - re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? - |\.[0-9_]+(?:[eE][-+][0-9]+)? - |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* - |[-+]?\.(?:inf|Inf|INF) - |\.(?:nan|NaN|NAN))$''', re.X), - list('-+0123456789.')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:int', - re.compile(r'''^(?:[-+]?0b[0-1_]+ - |[-+]?0[0-7_]+ - |[-+]?(?:0|[1-9][0-9_]*) - |[-+]?0x[0-9a-fA-F_]+ - |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), - list('-+0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:merge', - re.compile(r'^(?:<<)$'), - ['<']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:null', - re.compile(r'''^(?: ~ - |null|Null|NULL - | )$''', re.X), - ['~', 'n', 'N', '']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:timestamp', - re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] - |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? - (?:[Tt]|[ \t]+)[0-9][0-9]? - :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? - (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), - list('0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:value', - re.compile(r'^(?:=)$'), - ['=']) - -# The following resolver is only for documentation purposes. It cannot work -# because plain scalars cannot start with '!', '&', or '*'. -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:yaml', - re.compile(r'^(?:!|&|\*)$'), - list('!&*')) - diff --git a/venv/lib/python3.6/site-packages/yaml/scanner.py b/venv/lib/python3.6/site-packages/yaml/scanner.py deleted file mode 100644 index 775dbcc..0000000 --- a/venv/lib/python3.6/site-packages/yaml/scanner.py +++ /dev/null @@ -1,1435 +0,0 @@ - -# Scanner produces tokens of the following types: -# STREAM-START -# STREAM-END -# DIRECTIVE(name, value) -# DOCUMENT-START -# DOCUMENT-END -# BLOCK-SEQUENCE-START -# BLOCK-MAPPING-START -# BLOCK-END -# FLOW-SEQUENCE-START -# FLOW-MAPPING-START -# FLOW-SEQUENCE-END -# FLOW-MAPPING-END -# BLOCK-ENTRY -# FLOW-ENTRY -# KEY -# VALUE -# ALIAS(value) -# ANCHOR(value) -# TAG(value) -# SCALAR(value, plain, style) -# -# Read comments in the Scanner code for more details. -# - -__all__ = ['Scanner', 'ScannerError'] - -from .error import MarkedYAMLError -from .tokens import * - -class ScannerError(MarkedYAMLError): - pass - -class SimpleKey: - # See below simple keys treatment. - - def __init__(self, token_number, required, index, line, column, mark): - self.token_number = token_number - self.required = required - self.index = index - self.line = line - self.column = column - self.mark = mark - -class Scanner: - - def __init__(self): - """Initialize the scanner.""" - # It is assumed that Scanner and Reader will have a common descendant. - # Reader do the dirty work of checking for BOM and converting the - # input data to Unicode. It also adds NUL to the end. - # - # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. - - # Had we reached the end of the stream? - self.done = False - - # The number of unclosed '{' and '['. `flow_level == 0` means block - # context. - self.flow_level = 0 - - # List of processed tokens that are not yet emitted. - self.tokens = [] - - # Add the STREAM-START token. - self.fetch_stream_start() - - # Number of tokens that were emitted through the `get_token` method. - self.tokens_taken = 0 - - # The current indentation level. - self.indent = -1 - - # Past indentation levels. - self.indents = [] - - # Variables related to simple keys treatment. - - # A simple key is a key that is not denoted by the '?' indicator. - # Example of simple keys: - # --- - # block simple key: value - # ? not a simple key: - # : { flow simple key: value } - # We emit the KEY token before all keys, so when we find a potential - # simple key, we try to locate the corresponding ':' indicator. - # Simple keys should be limited to a single line and 1024 characters. - - # Can a simple key start at the current position? A simple key may - # start: - # - at the beginning of the line, not counting indentation spaces - # (in block context), - # - after '{', '[', ',' (in the flow context), - # - after '?', ':', '-' (in the block context). - # In the block context, this flag also signifies if a block collection - # may start at the current position. - self.allow_simple_key = True - - # Keep track of possible simple keys. This is a dictionary. The key - # is `flow_level`; there can be no more that one possible simple key - # for each level. The value is a SimpleKey record: - # (token_number, required, index, line, column, mark) - # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), - # '[', or '{' tokens. - self.possible_simple_keys = {} - - # Public methods. - - def check_token(self, *choices): - # Check if the next token is one of the given types. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - # Return the next token, but do not delete if from the queue. - # Return None if no more tokens. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - return self.tokens[0] - else: - return None - - def get_token(self): - # Return the next token. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - self.tokens_taken += 1 - return self.tokens.pop(0) - - # Private methods. - - def need_more_tokens(self): - if self.done: - return False - if not self.tokens: - return True - # The current token may be a potential simple key, so we - # need to look further. - self.stale_possible_simple_keys() - if self.next_possible_simple_key() == self.tokens_taken: - return True - - def fetch_more_tokens(self): - - # Eat whitespaces and comments until we reach the next token. - self.scan_to_next_token() - - # Remove obsolete possible simple keys. - self.stale_possible_simple_keys() - - # Compare the current indentation and column. It may add some tokens - # and decrease the current indentation level. - self.unwind_indent(self.column) - - # Peek the next character. - ch = self.peek() - - # Is it the end of stream? - if ch == '\0': - return self.fetch_stream_end() - - # Is it a directive? - if ch == '%' and self.check_directive(): - return self.fetch_directive() - - # Is it the document start? - if ch == '-' and self.check_document_start(): - return self.fetch_document_start() - - # Is it the document end? - if ch == '.' and self.check_document_end(): - return self.fetch_document_end() - - # TODO: support for BOM within a stream. - #if ch == '\uFEFF': - # return self.fetch_bom() <-- issue BOMToken - - # Note: the order of the following checks is NOT significant. - - # Is it the flow sequence start indicator? - if ch == '[': - return self.fetch_flow_sequence_start() - - # Is it the flow mapping start indicator? - if ch == '{': - return self.fetch_flow_mapping_start() - - # Is it the flow sequence end indicator? - if ch == ']': - return self.fetch_flow_sequence_end() - - # Is it the flow mapping end indicator? - if ch == '}': - return self.fetch_flow_mapping_end() - - # Is it the flow entry indicator? - if ch == ',': - return self.fetch_flow_entry() - - # Is it the block entry indicator? - if ch == '-' and self.check_block_entry(): - return self.fetch_block_entry() - - # Is it the key indicator? - if ch == '?' and self.check_key(): - return self.fetch_key() - - # Is it the value indicator? - if ch == ':' and self.check_value(): - return self.fetch_value() - - # Is it an alias? - if ch == '*': - return self.fetch_alias() - - # Is it an anchor? - if ch == '&': - return self.fetch_anchor() - - # Is it a tag? - if ch == '!': - return self.fetch_tag() - - # Is it a literal scalar? - if ch == '|' and not self.flow_level: - return self.fetch_literal() - - # Is it a folded scalar? - if ch == '>' and not self.flow_level: - return self.fetch_folded() - - # Is it a single quoted scalar? - if ch == '\'': - return self.fetch_single() - - # Is it a double quoted scalar? - if ch == '\"': - return self.fetch_double() - - # It must be a plain scalar then. - if self.check_plain(): - return self.fetch_plain() - - # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" % ch, - self.get_mark()) - - # Simple keys treatment. - - def next_possible_simple_key(self): - # Return the number of the nearest possible simple key. Actually we - # don't need to loop through the whole dictionary. We may replace it - # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number - min_token_number = None - for level in self.possible_simple_keys: - key = self.possible_simple_keys[level] - if min_token_number is None or key.token_number < min_token_number: - min_token_number = key.token_number - return min_token_number - - def stale_possible_simple_keys(self): - # Remove entries that are no longer possible simple keys. According to - # the YAML specification, simple keys - # - should be limited to a single line, - # - should be no longer than 1024 characters. - # Disabling this procedure will allow simple keys of any length and - # height (may cause problems if indentation is broken though). - for level in list(self.possible_simple_keys): - key = self.possible_simple_keys[level] - if key.line != self.line \ - or self.index-key.index > 1024: - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not find expected ':'", self.get_mark()) - del self.possible_simple_keys[level] - - def save_possible_simple_key(self): - # The next token may start a simple key. We check if it's possible - # and save its position. This function is called for - # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. - - # Check if a simple key is required at the current position. - required = not self.flow_level and self.indent == self.column - - # The next token might be a simple key. Let's save it's number and - # position. - if self.allow_simple_key: - self.remove_possible_simple_key() - token_number = self.tokens_taken+len(self.tokens) - key = SimpleKey(token_number, required, - self.index, self.line, self.column, self.get_mark()) - self.possible_simple_keys[self.flow_level] = key - - def remove_possible_simple_key(self): - # Remove the saved possible key position at the current flow level. - if self.flow_level in self.possible_simple_keys: - key = self.possible_simple_keys[self.flow_level] - - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not find expected ':'", self.get_mark()) - - del self.possible_simple_keys[self.flow_level] - - # Indentation functions. - - def unwind_indent(self, column): - - ## In flow context, tokens should respect indentation. - ## Actually the condition should be `self.indent >= column` according to - ## the spec. But this condition will prohibit intuitively correct - ## constructions such as - ## key : { - ## } - #if self.flow_level and self.indent > column: - # raise ScannerError(None, None, - # "invalid intendation or unclosed '[' or '{'", - # self.get_mark()) - - # In the flow context, indentation is ignored. We make the scanner less - # restrictive then specification requires. - if self.flow_level: - return - - # In block context, we may need to issue the BLOCK-END tokens. - while self.indent > column: - mark = self.get_mark() - self.indent = self.indents.pop() - self.tokens.append(BlockEndToken(mark, mark)) - - def add_indent(self, column): - # Check if we need to increase indentation. - if self.indent < column: - self.indents.append(self.indent) - self.indent = column - return True - return False - - # Fetchers. - - def fetch_stream_start(self): - # We always add STREAM-START as the first token and STREAM-END as the - # last token. - - # Read the token. - mark = self.get_mark() - - # Add STREAM-START. - self.tokens.append(StreamStartToken(mark, mark, - encoding=self.encoding)) - - - def fetch_stream_end(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - self.possible_simple_keys = {} - - # Read the token. - mark = self.get_mark() - - # Add STREAM-END. - self.tokens.append(StreamEndToken(mark, mark)) - - # The steam is finished. - self.done = True - - def fetch_directive(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Scan and add DIRECTIVE. - self.tokens.append(self.scan_directive()) - - def fetch_document_start(self): - self.fetch_document_indicator(DocumentStartToken) - - def fetch_document_end(self): - self.fetch_document_indicator(DocumentEndToken) - - def fetch_document_indicator(self, TokenClass): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. Note that there could not be a block collection - # after '---'. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Add DOCUMENT-START or DOCUMENT-END. - start_mark = self.get_mark() - self.forward(3) - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_start(self): - self.fetch_flow_collection_start(FlowSequenceStartToken) - - def fetch_flow_mapping_start(self): - self.fetch_flow_collection_start(FlowMappingStartToken) - - def fetch_flow_collection_start(self, TokenClass): - - # '[' and '{' may start a simple key. - self.save_possible_simple_key() - - # Increase the flow level. - self.flow_level += 1 - - # Simple keys are allowed after '[' and '{'. - self.allow_simple_key = True - - # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_end(self): - self.fetch_flow_collection_end(FlowSequenceEndToken) - - def fetch_flow_mapping_end(self): - self.fetch_flow_collection_end(FlowMappingEndToken) - - def fetch_flow_collection_end(self, TokenClass): - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Decrease the flow level. - self.flow_level -= 1 - - # No simple keys after ']' or '}'. - self.allow_simple_key = False - - # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_entry(self): - - # Simple keys are allowed after ','. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add FLOW-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(FlowEntryToken(start_mark, end_mark)) - - def fetch_block_entry(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a new entry? - if not self.allow_simple_key: - raise ScannerError(None, None, - "sequence entries are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-SEQUENCE-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockSequenceStartToken(mark, mark)) - - # It's an error for the block entry to occur in the flow context, - # but we let the parser detect this. - else: - pass - - # Simple keys are allowed after '-'. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add BLOCK-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(BlockEntryToken(start_mark, end_mark)) - - def fetch_key(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a key (not necessary a simple)? - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping keys are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-MAPPING-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after '?' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add KEY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(KeyToken(start_mark, end_mark)) - - def fetch_value(self): - - # Do we determine a simple key? - if self.flow_level in self.possible_simple_keys: - - # Add KEY. - key = self.possible_simple_keys[self.flow_level] - del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number-self.tokens_taken, - KeyToken(key.mark, key.mark)) - - # If this key starts a new block mapping, we need to add - # BLOCK-MAPPING-START. - if not self.flow_level: - if self.add_indent(key.column): - self.tokens.insert(key.token_number-self.tokens_taken, - BlockMappingStartToken(key.mark, key.mark)) - - # There cannot be two simple keys one after another. - self.allow_simple_key = False - - # It must be a part of a complex key. - else: - - # Block context needs additional checks. - # (Do we really need them? They will be caught by the parser - # anyway.) - if not self.flow_level: - - # We are allowed to start a complex value if and only if - # we can start a simple key. - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping values are not allowed here", - self.get_mark()) - - # If this value starts a new block mapping, we need to add - # BLOCK-MAPPING-START. It will be detected as an error later by - # the parser. - if not self.flow_level: - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after ':' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add VALUE. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(ValueToken(start_mark, end_mark)) - - def fetch_alias(self): - - # ALIAS could be a simple key. - self.save_possible_simple_key() - - # No simple keys after ALIAS. - self.allow_simple_key = False - - # Scan and add ALIAS. - self.tokens.append(self.scan_anchor(AliasToken)) - - def fetch_anchor(self): - - # ANCHOR could start a simple key. - self.save_possible_simple_key() - - # No simple keys after ANCHOR. - self.allow_simple_key = False - - # Scan and add ANCHOR. - self.tokens.append(self.scan_anchor(AnchorToken)) - - def fetch_tag(self): - - # TAG could start a simple key. - self.save_possible_simple_key() - - # No simple keys after TAG. - self.allow_simple_key = False - - # Scan and add TAG. - self.tokens.append(self.scan_tag()) - - def fetch_literal(self): - self.fetch_block_scalar(style='|') - - def fetch_folded(self): - self.fetch_block_scalar(style='>') - - def fetch_block_scalar(self, style): - - # A simple key may follow a block scalar. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Scan and add SCALAR. - self.tokens.append(self.scan_block_scalar(style)) - - def fetch_single(self): - self.fetch_flow_scalar(style='\'') - - def fetch_double(self): - self.fetch_flow_scalar(style='"') - - def fetch_flow_scalar(self, style): - - # A flow scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after flow scalars. - self.allow_simple_key = False - - # Scan and add SCALAR. - self.tokens.append(self.scan_flow_scalar(style)) - - def fetch_plain(self): - - # A plain scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after plain scalars. But note that `scan_plain` will - # change this flag if the scan is finished at the beginning of the - # line. - self.allow_simple_key = False - - # Scan and add SCALAR. May change `allow_simple_key`. - self.tokens.append(self.scan_plain()) - - # Checkers. - - def check_directive(self): - - # DIRECTIVE: ^ '%' ... - # The '%' indicator is already checked. - if self.column == 0: - return True - - def check_document_start(self): - - # DOCUMENT-START: ^ '---' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '---' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_document_end(self): - - # DOCUMENT-END: ^ '...' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '...' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_block_entry(self): - - # BLOCK-ENTRY: '-' (' '|'\n') - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_key(self): - - # KEY(flow context): '?' - if self.flow_level: - return True - - # KEY(block context): '?' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_value(self): - - # VALUE(flow context): ':' - if self.flow_level: - return True - - # VALUE(block context): ':' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_plain(self): - - # A plain scalar may start with any non-space character except: - # '-', '?', ':', ',', '[', ']', '{', '}', - # '#', '&', '*', '!', '|', '>', '\'', '\"', - # '%', '@', '`'. - # - # It may also start with - # '-', '?', ':' - # if it is followed by a non-space character. - # - # Note that we limit the last rule to the block context (except the - # '-' character) because we want the flow context to be space - # independent. - ch = self.peek() - return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ - or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' - and (ch == '-' or (not self.flow_level and ch in '?:'))) - - # Scanners. - - def scan_to_next_token(self): - # We ignore spaces, line breaks and comments. - # If we find a line break in the block context, we set the flag - # `allow_simple_key` on. - # The byte order mark is stripped if it's the first character in the - # stream. We do not yet support BOM inside the stream as the - # specification requires. Any such mark will be considered as a part - # of the document. - # - # TODO: We need to make tab handling rules more sane. A good rule is - # Tabs cannot precede tokens - # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, - # KEY(block), VALUE(block), BLOCK-ENTRY - # So the checking code is - # if : - # self.allow_simple_keys = False - # We also need to add the check for `allow_simple_keys == True` to - # `unwind_indent` before issuing BLOCK-END. - # Scanners for block, flow, and plain scalars need to be modified. - - if self.index == 0 and self.peek() == '\uFEFF': - self.forward() - found = False - while not found: - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - if self.scan_line_break(): - if not self.flow_level: - self.allow_simple_key = True - else: - found = True - - def scan_directive(self): - # See the specification for details. - start_mark = self.get_mark() - self.forward() - name = self.scan_directive_name(start_mark) - value = None - if name == 'YAML': - value = self.scan_yaml_directive_value(start_mark) - end_mark = self.get_mark() - elif name == 'TAG': - value = self.scan_tag_directive_value(start_mark) - end_mark = self.get_mark() - else: - end_mark = self.get_mark() - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - self.scan_directive_ignored_line(start_mark) - return DirectiveToken(name, value, start_mark, end_mark) - - def scan_directive_name(self, start_mark): - # See the specification for details. - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - return value - - def scan_yaml_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - major = self.scan_yaml_directive_number(start_mark) - if self.peek() != '.': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or '.', but found %r" % self.peek(), - self.get_mark()) - self.forward() - minor = self.scan_yaml_directive_number(start_mark) - if self.peek() not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or ' ', but found %r" % self.peek(), - self.get_mark()) - return (major, minor) - - def scan_yaml_directive_number(self, start_mark): - # See the specification for details. - ch = self.peek() - if not ('0' <= ch <= '9'): - raise ScannerError("while scanning a directive", start_mark, - "expected a digit, but found %r" % ch, self.get_mark()) - length = 0 - while '0' <= self.peek(length) <= '9': - length += 1 - value = int(self.prefix(length)) - self.forward(length) - return value - - def scan_tag_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - handle = self.scan_tag_directive_handle(start_mark) - while self.peek() == ' ': - self.forward() - prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) - - def scan_tag_directive_handle(self, start_mark): - # See the specification for details. - value = self.scan_tag_handle('directive', start_mark) - ch = self.peek() - if ch != ' ': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_tag_directive_prefix(self, start_mark): - # See the specification for details. - value = self.scan_tag_uri('directive', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_directive_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a comment or a line break, but found %r" - % ch, self.get_mark()) - self.scan_line_break() - - def scan_anchor(self, TokenClass): - # The specification does not restrict characters for anchors and - # aliases. This may lead to problems, for instance, the document: - # [ *alias, value ] - # can be interpreted in two ways, as - # [ "value" ] - # and - # [ *alias , "value" ] - # Therefore we restrict aliases to numbers and ASCII letters. - start_mark = self.get_mark() - indicator = self.peek() - if indicator == '*': - name = 'alias' - else: - name = 'anchor' - self.forward() - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - end_mark = self.get_mark() - return TokenClass(value, start_mark, end_mark) - - def scan_tag(self): - # See the specification for details. - start_mark = self.get_mark() - ch = self.peek(1) - if ch == '<': - handle = None - self.forward(2) - suffix = self.scan_tag_uri('tag', start_mark) - if self.peek() != '>': - raise ScannerError("while parsing a tag", start_mark, - "expected '>', but found %r" % self.peek(), - self.get_mark()) - self.forward() - elif ch in '\0 \t\r\n\x85\u2028\u2029': - handle = None - suffix = '!' - self.forward() - else: - length = 1 - use_handle = False - while ch not in '\0 \r\n\x85\u2028\u2029': - if ch == '!': - use_handle = True - break - length += 1 - ch = self.peek(length) - handle = '!' - if use_handle: - handle = self.scan_tag_handle('tag', start_mark) - else: - handle = '!' - self.forward() - suffix = self.scan_tag_uri('tag', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a tag", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - value = (handle, suffix) - end_mark = self.get_mark() - return TagToken(value, start_mark, end_mark) - - def scan_block_scalar(self, style): - # See the specification for details. - - if style == '>': - folded = True - else: - folded = False - - chunks = [] - start_mark = self.get_mark() - - # Scan the header. - self.forward() - chomping, increment = self.scan_block_scalar_indicators(start_mark) - self.scan_block_scalar_ignored_line(start_mark) - - # Determine the indentation level and go to the first non-empty line. - min_indent = self.indent+1 - if min_indent < 1: - min_indent = 1 - if increment is None: - breaks, max_indent, end_mark = self.scan_block_scalar_indentation() - indent = max(min_indent, max_indent) - else: - indent = min_indent+increment-1 - breaks, end_mark = self.scan_block_scalar_breaks(indent) - line_break = '' - - # Scan the inner part of the block scalar. - while self.column == indent and self.peek() != '\0': - chunks.extend(breaks) - leading_non_space = self.peek() not in ' \t' - length = 0 - while self.peek(length) not in '\0\r\n\x85\u2028\u2029': - length += 1 - chunks.append(self.prefix(length)) - self.forward(length) - line_break = self.scan_line_break() - breaks, end_mark = self.scan_block_scalar_breaks(indent) - if self.column == indent and self.peek() != '\0': - - # Unfortunately, folding rules are ambiguous. - # - # This is the folding according to the specification: - - if folded and line_break == '\n' \ - and leading_non_space and self.peek() not in ' \t': - if not breaks: - chunks.append(' ') - else: - chunks.append(line_break) - - # This is Clark Evans's interpretation (also in the spec - # examples): - # - #if folded and line_break == '\n': - # if not breaks: - # if self.peek() not in ' \t': - # chunks.append(' ') - # else: - # chunks.append(line_break) - #else: - # chunks.append(line_break) - else: - break - - # Chomp the tail. - if chomping is not False: - chunks.append(line_break) - if chomping is True: - chunks.extend(breaks) - - # We are done. - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - def scan_block_scalar_indicators(self, start_mark): - # See the specification for details. - chomping = None - increment = None - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - elif ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected chomping or indentation indicators, but found %r" - % ch, self.get_mark()) - return chomping, increment - - def scan_block_scalar_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected a comment or a line break, but found %r" % ch, - self.get_mark()) - self.scan_line_break() - - def scan_block_scalar_indentation(self): - # See the specification for details. - chunks = [] - max_indent = 0 - end_mark = self.get_mark() - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() != ' ': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - else: - self.forward() - if self.column > max_indent: - max_indent = self.column - return chunks, max_indent, end_mark - - def scan_block_scalar_breaks(self, indent): - # See the specification for details. - chunks = [] - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - while self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - return chunks, end_mark - - def scan_flow_scalar(self, style): - # See the specification for details. - # Note that we loose indentation rules for quoted scalars. Quoted - # scalars don't need to adhere indentation because " and ' clearly - # mark the beginning and the end of them. Therefore we are less - # restrictive then the specification requires. We only need to check - # that document separators are not included in scalars. - if style == '"': - double = True - else: - double = False - chunks = [] - start_mark = self.get_mark() - quote = self.peek() - self.forward() - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - while self.peek() != quote: - chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - self.forward() - end_mark = self.get_mark() - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - ESCAPE_REPLACEMENTS = { - '0': '\0', - 'a': '\x07', - 'b': '\x08', - 't': '\x09', - '\t': '\x09', - 'n': '\x0A', - 'v': '\x0B', - 'f': '\x0C', - 'r': '\x0D', - 'e': '\x1B', - ' ': '\x20', - '\"': '\"', - '\\': '\\', - '/': '/', - 'N': '\x85', - '_': '\xA0', - 'L': '\u2028', - 'P': '\u2029', - } - - ESCAPE_CODES = { - 'x': 2, - 'u': 4, - 'U': 8, - } - - def scan_flow_scalar_non_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - length = 0 - while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': - length += 1 - if length: - chunks.append(self.prefix(length)) - self.forward(length) - ch = self.peek() - if not double and ch == '\'' and self.peek(1) == '\'': - chunks.append('\'') - self.forward(2) - elif (double and ch == '\'') or (not double and ch in '\"\\'): - chunks.append(ch) - self.forward() - elif double and ch == '\\': - self.forward() - ch = self.peek() - if ch in self.ESCAPE_REPLACEMENTS: - chunks.append(self.ESCAPE_REPLACEMENTS[ch]) - self.forward() - elif ch in self.ESCAPE_CODES: - length = self.ESCAPE_CODES[ch] - self.forward() - for k in range(length): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k)), self.get_mark()) - code = int(self.prefix(length), 16) - chunks.append(chr(code)) - self.forward(length) - elif ch in '\r\n\x85\u2028\u2029': - self.scan_line_break() - chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) - else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch, self.get_mark()) - else: - return chunks - - def scan_flow_scalar_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - length = 0 - while self.peek(length) in ' \t': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch == '\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) - elif ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - breaks = self.scan_flow_scalar_breaks(double, start_mark) - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - else: - chunks.append(whitespaces) - return chunks - - def scan_flow_scalar_breaks(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - # Instead of checking indentation, we check for document - # separators. - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected document separator", self.get_mark()) - while self.peek() in ' \t': - self.forward() - if self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - else: - return chunks - - def scan_plain(self): - # See the specification for details. - # We add an additional restriction for the flow context: - # plain scalars in the flow context cannot contain ',' or '?'. - # We also keep track of the `allow_simple_key` flag here. - # Indentation rules are loosed for the flow context. - chunks = [] - start_mark = self.get_mark() - end_mark = start_mark - indent = self.indent+1 - # We allow zero indentation for scalars, but then we need to check for - # document separators at the beginning of the line. - #if indent == 0: - # indent = 1 - spaces = [] - while True: - length = 0 - if self.peek() == '#': - break - while True: - ch = self.peek(length) - if ch in '\0 \t\r\n\x85\u2028\u2029' \ - or (ch == ':' and - self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029' - + (u',[]{}' if self.flow_level else u''))\ - or (self.flow_level and ch in ',?[]{}'): - break - length += 1 - if length == 0: - break - self.allow_simple_key = False - chunks.extend(spaces) - chunks.append(self.prefix(length)) - self.forward(length) - end_mark = self.get_mark() - spaces = self.scan_plain_spaces(indent, start_mark) - if not spaces or self.peek() == '#' \ - or (not self.flow_level and self.column < indent): - break - return ScalarToken(''.join(chunks), True, start_mark, end_mark) - - def scan_plain_spaces(self, indent, start_mark): - # See the specification for details. - # The specification is really confusing about tabs in plain scalars. - # We just forbid them completely. Do not use tabs in YAML! - chunks = [] - length = 0 - while self.peek(length) in ' ': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - self.allow_simple_key = True - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - breaks = [] - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() == ' ': - self.forward() - else: - breaks.append(self.scan_line_break()) - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - elif whitespaces: - chunks.append(whitespaces) - return chunks - - def scan_tag_handle(self, name, start_mark): - # See the specification for details. - # For some strange reasons, the specification does not allow '_' in - # tag handles. I have allowed it anyway. - ch = self.peek() - if ch != '!': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length = 1 - ch = self.peek(length) - if ch != ' ': - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if ch != '!': - self.forward(length) - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length += 1 - value = self.prefix(length) - self.forward(length) - return value - - def scan_tag_uri(self, name, start_mark): - # See the specification for details. - # Note: we do not check if URI is well-formed. - chunks = [] - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.!~*\'()[]%': - if ch == '%': - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - chunks.append(self.scan_uri_escapes(name, start_mark)) - else: - length += 1 - ch = self.peek(length) - if length: - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - if not chunks: - raise ScannerError("while parsing a %s" % name, start_mark, - "expected URI, but found %r" % ch, self.get_mark()) - return ''.join(chunks) - - def scan_uri_escapes(self, name, start_mark): - # See the specification for details. - codes = [] - mark = self.get_mark() - while self.peek() == '%': - self.forward() - for k in range(2): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected URI escape sequence of 2 hexdecimal numbers, but found %r" - % self.peek(k), self.get_mark()) - codes.append(int(self.prefix(2), 16)) - self.forward(2) - try: - value = bytes(codes).decode('utf-8') - except UnicodeDecodeError as exc: - raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) - return value - - def scan_line_break(self): - # Transforms: - # '\r\n' : '\n' - # '\r' : '\n' - # '\n' : '\n' - # '\x85' : '\n' - # '\u2028' : '\u2028' - # '\u2029 : '\u2029' - # default : '' - ch = self.peek() - if ch in '\r\n\x85': - if self.prefix(2) == '\r\n': - self.forward(2) - else: - self.forward() - return '\n' - elif ch in '\u2028\u2029': - self.forward() - return ch - return '' diff --git a/venv/lib/python3.6/site-packages/yaml/serializer.py b/venv/lib/python3.6/site-packages/yaml/serializer.py deleted file mode 100644 index fe911e6..0000000 --- a/venv/lib/python3.6/site-packages/yaml/serializer.py +++ /dev/null @@ -1,111 +0,0 @@ - -__all__ = ['Serializer', 'SerializerError'] - -from .error import YAMLError -from .events import * -from .nodes import * - -class SerializerError(YAMLError): - pass - -class Serializer: - - ANCHOR_TEMPLATE = 'id%03d' - - def __init__(self, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - self.use_encoding = encoding - self.use_explicit_start = explicit_start - self.use_explicit_end = explicit_end - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - self.closed = None - - def open(self): - if self.closed is None: - self.emit(StreamStartEvent(encoding=self.use_encoding)) - self.closed = False - elif self.closed: - raise SerializerError("serializer is closed") - else: - raise SerializerError("serializer is already opened") - - def close(self): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif not self.closed: - self.emit(StreamEndEvent()) - self.closed = True - - #def __del__(self): - # self.close() - - def serialize(self, node): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif self.closed: - raise SerializerError("serializer is closed") - self.emit(DocumentStartEvent(explicit=self.use_explicit_start, - version=self.use_version, tags=self.use_tags)) - self.anchor_node(node) - self.serialize_node(node, None, None) - self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - - def anchor_node(self, node): - if node in self.anchors: - if self.anchors[node] is None: - self.anchors[node] = self.generate_anchor(node) - else: - self.anchors[node] = None - if isinstance(node, SequenceNode): - for item in node.value: - self.anchor_node(item) - elif isinstance(node, MappingNode): - for key, value in node.value: - self.anchor_node(key) - self.anchor_node(value) - - def generate_anchor(self, node): - self.last_anchor_id += 1 - return self.ANCHOR_TEMPLATE % self.last_anchor_id - - def serialize_node(self, node, parent, index): - alias = self.anchors[node] - if node in self.serialized_nodes: - self.emit(AliasEvent(alias)) - else: - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if isinstance(node, ScalarNode): - detected_tag = self.resolve(ScalarNode, node.value, (True, False)) - default_tag = self.resolve(ScalarNode, node.value, (False, True)) - implicit = (node.tag == detected_tag), (node.tag == default_tag) - self.emit(ScalarEvent(alias, node.tag, implicit, node.value, - style=node.style)) - elif isinstance(node, SequenceNode): - implicit = (node.tag - == self.resolve(SequenceNode, node.value, True)) - self.emit(SequenceStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - index = 0 - for item in node.value: - self.serialize_node(item, node, index) - index += 1 - self.emit(SequenceEndEvent()) - elif isinstance(node, MappingNode): - implicit = (node.tag - == self.resolve(MappingNode, node.value, True)) - self.emit(MappingStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - for key, value in node.value: - self.serialize_node(key, node, None) - self.serialize_node(value, node, key) - self.emit(MappingEndEvent()) - self.ascend_resolver() - diff --git a/venv/lib/python3.6/site-packages/yaml/tokens.py b/venv/lib/python3.6/site-packages/yaml/tokens.py deleted file mode 100644 index 4d0b48a..0000000 --- a/venv/lib/python3.6/site-packages/yaml/tokens.py +++ /dev/null @@ -1,104 +0,0 @@ - -class Token(object): - def __init__(self, start_mark, end_mark): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] - attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -#class BOMToken(Token): -# id = '' - -class DirectiveToken(Token): - id = '' - def __init__(self, name, value, start_mark, end_mark): - self.name = name - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class DocumentStartToken(Token): - id = '' - -class DocumentEndToken(Token): - id = '' - -class StreamStartToken(Token): - id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndToken(Token): - id = '' - -class BlockSequenceStartToken(Token): - id = '' - -class BlockMappingStartToken(Token): - id = '' - -class BlockEndToken(Token): - id = '' - -class FlowSequenceStartToken(Token): - id = '[' - -class FlowMappingStartToken(Token): - id = '{' - -class FlowSequenceEndToken(Token): - id = ']' - -class FlowMappingEndToken(Token): - id = '}' - -class KeyToken(Token): - id = '?' - -class ValueToken(Token): - id = ':' - -class BlockEntryToken(Token): - id = '-' - -class FlowEntryToken(Token): - id = ',' - -class AliasToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class AnchorToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class TagToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class ScalarToken(Token): - id = '' - def __init__(self, value, plain, start_mark, end_mark, style=None): - self.value = value - self.plain = plain - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - diff --git a/venv/pip-selfcheck.json b/venv/pip-selfcheck.json deleted file mode 100644 index b454b3a..0000000 --- a/venv/pip-selfcheck.json +++ /dev/null @@ -1 +0,0 @@ -{"last_check":"2019-08-19T20:23:40Z","pypi_version":"19.2.2"} \ No newline at end of file diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg deleted file mode 100644 index 10d86de..0000000 --- a/venv/pyvenv.cfg +++ /dev/null @@ -1,3 +0,0 @@ -home = /usr/local/Cellar/python/3.6.1/Frameworks/Python.framework/Versions/3.6/bin -include-system-site-packages = false -version = 3.6.1