-
-
Save mouadessalim/294d964a7ff52f82683c0e511c5958cb to your computer and use it in GitHub Desktop.
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="Encoding" addBOMForNewFiles="with NO BOM" /> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6 (MonApp)" project-jdk-type="Python SDK" /> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<project version="4"> | |
<component name="ProjectModuleManager"> | |
<modules> | |
<module fileurl="file://$PROJECT_DIR$/.idea/MonApp.iml" filepath="$PROJECT_DIR$/.idea/MonApp.iml" /> | |
</modules> | |
</component> | |
</project> |
<?xml version="1.0" encoding="UTF-8"?> | |
<module type="PYTHON_MODULE" version="4"> | |
<component name="NewModuleRootManager"> | |
<content url="file://$MODULE_DIR$"> | |
<excludeFolder url="file://$MODULE_DIR$/venv" /> | |
</content> | |
<orderEntry type="inheritedJdk" /> | |
<orderEntry type="sourceFolder" forTests="false" /> | |
</component> | |
<component name="TestRunnerService"> | |
<option name="PROJECT_TEST_RUNNER" value="Unittests" /> | |
</component> | |
</module> |
from tkinter import * | |
window1 = Tk() | |
window1.mainloop() | |
./setuptools-39.1.0-py3.6.egg | |
./pip-10.0.1-py3.6.egg |
[console_scripts] | |
pip = pip._internal:main | |
pip3 = pip._internal:main | |
pip3.6 = pip._internal:main | |
Metadata-Version: 2.1 | |
Name: pip | |
Version: 10.0.1 | |
Summary: The PyPA recommended tool for installing Python packages. | |
Home-page: https://pip.pypa.io/ | |
Author: The pip developers | |
Author-email: [email protected] | |
License: MIT | |
Description: pip | |
=== | |
The `PyPA recommended`_ tool for installing Python packages. | |
.. image:: https://img.shields.io/pypi/v/pip.svg | |
:target: https://pypi.org/project/pip/ | |
.. image:: https://img.shields.io/travis/pypa/pip/master.svg | |
:target: http://travis-ci.org/pypa/pip | |
.. image:: https://img.shields.io/appveyor/ci/pypa/pip.svg | |
:target: https://ci.appveyor.com/project/pypa/pip/history | |
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest | |
:target: https://pip.pypa.io/en/latest | |
* `Installation`_ | |
* `Documentation`_ | |
* `Changelog`_ | |
* `GitHub Page`_ | |
* `Issue Tracking`_ | |
* `User mailing list`_ | |
* `Dev mailing list`_ | |
* User IRC: #pypa on Freenode. | |
* Dev IRC: #pypa-dev on Freenode. | |
Code of Conduct | |
--------------- | |
Everyone interacting in the pip project's codebases, issue trackers, chat | |
rooms and mailing lists is expected to follow the `PyPA Code of Conduct`_. | |
.. _PyPA recommended: https://packaging.python.org/en/latest/current/ | |
.. _Installation: https://pip.pypa.io/en/stable/installing.html | |
.. _Documentation: https://pip.pypa.io/en/stable/ | |
.. _Changelog: https://pip.pypa.io/en/stable/news.html | |
.. _GitHub Page: https://github.com/pypa/pip | |
.. _Issue Tracking: https://github.com/pypa/pip/issues | |
.. _User mailing list: http://groups.google.com/group/python-virtualenv | |
.. _Dev mailing list: http://groups.google.com/group/pypa-dev | |
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ | |
Keywords: easy_install distutils setuptools egg virtualenv | |
Platform: UNKNOWN | |
Classifier: Development Status :: 5 - Production/Stable | |
Classifier: Intended Audience :: Developers | |
Classifier: License :: OSI Approved :: MIT License | |
Classifier: Topic :: Software Development :: Build Tools | |
Classifier: Programming Language :: Python | |
Classifier: Programming Language :: Python :: 2 | |
Classifier: Programming Language :: Python :: 2.7 | |
Classifier: Programming Language :: Python :: 3 | |
Classifier: Programming Language :: Python :: 3.3 | |
Classifier: Programming Language :: Python :: 3.4 | |
Classifier: Programming Language :: Python :: 3.5 | |
Classifier: Programming Language :: Python :: 3.6 | |
Classifier: Programming Language :: Python :: Implementation :: CPython | |
Classifier: Programming Language :: Python :: Implementation :: PyPy | |
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.* | |
Provides-Extra: testing |
[testing] | |
pytest | |
mock | |
pretend | |
scripttest>=1.3 | |
virtualenv>=1.10 | |
freezegun |
AUTHORS.txt | |
LICENSE.txt | |
MANIFEST.in | |
NEWS.rst | |
README.rst | |
pyproject.toml | |
setup.cfg | |
setup.py | |
docs/Makefile | |
docs/__init__.py | |
docs/conf.py | |
docs/configuration.rst | |
docs/cookbook.rst | |
docs/development.rst | |
docs/docutils.conf | |
docs/index.rst | |
docs/installing.rst | |
docs/logic.rst | |
docs/make.bat | |
docs/news.rst | |
docs/pipext.py | |
docs/quickstart.rst | |
docs/usage.rst | |
docs/user_guide.rst | |
docs/man/pip.rst | |
docs/man/commands/check.rst | |
docs/man/commands/config.rst | |
docs/man/commands/download.rst | |
docs/man/commands/freeze.rst | |
docs/man/commands/hash.rst | |
docs/man/commands/help.rst | |
docs/man/commands/install.rst | |
docs/man/commands/list.rst | |
docs/man/commands/search.rst | |
docs/man/commands/show.rst | |
docs/man/commands/uninstall.rst | |
docs/man/commands/wheel.rst | |
docs/reference/index.rst | |
docs/reference/pip.rst | |
docs/reference/pip_check.rst | |
docs/reference/pip_config.rst | |
docs/reference/pip_download.rst | |
docs/reference/pip_freeze.rst | |
docs/reference/pip_hash.rst | |
docs/reference/pip_install.rst | |
docs/reference/pip_list.rst | |
docs/reference/pip_search.rst | |
docs/reference/pip_show.rst | |
docs/reference/pip_uninstall.rst | |
docs/reference/pip_wheel.rst | |
src/pip/__init__.py | |
src/pip/__main__.py | |
src/pip.egg-info/PKG-INFO | |
src/pip.egg-info/SOURCES.txt | |
src/pip.egg-info/dependency_links.txt | |
src/pip.egg-info/entry_points.txt | |
src/pip.egg-info/not-zip-safe | |
src/pip.egg-info/requires.txt | |
src/pip.egg-info/top_level.txt | |
src/pip/_internal/__init__.py | |
src/pip/_internal/basecommand.py | |
src/pip/_internal/baseparser.py | |
src/pip/_internal/build_env.py | |
src/pip/_internal/cache.py | |
src/pip/_internal/cmdoptions.py | |
src/pip/_internal/compat.py | |
src/pip/_internal/configuration.py | |
src/pip/_internal/download.py | |
src/pip/_internal/exceptions.py | |
src/pip/_internal/index.py | |
src/pip/_internal/locations.py | |
src/pip/_internal/pep425tags.py | |
src/pip/_internal/resolve.py | |
src/pip/_internal/status_codes.py | |
src/pip/_internal/wheel.py | |
src/pip/_internal/commands/__init__.py | |
src/pip/_internal/commands/check.py | |
src/pip/_internal/commands/completion.py | |
src/pip/_internal/commands/configuration.py | |
src/pip/_internal/commands/download.py | |
src/pip/_internal/commands/freeze.py | |
src/pip/_internal/commands/hash.py | |
src/pip/_internal/commands/help.py | |
src/pip/_internal/commands/install.py | |
src/pip/_internal/commands/list.py | |
src/pip/_internal/commands/search.py | |
src/pip/_internal/commands/show.py | |
src/pip/_internal/commands/uninstall.py | |
src/pip/_internal/commands/wheel.py | |
src/pip/_internal/models/__init__.py | |
src/pip/_internal/models/index.py | |
src/pip/_internal/operations/__init__.py | |
src/pip/_internal/operations/check.py | |
src/pip/_internal/operations/freeze.py | |
src/pip/_internal/operations/prepare.py | |
src/pip/_internal/req/__init__.py | |
src/pip/_internal/req/req_file.py | |
src/pip/_internal/req/req_install.py | |
src/pip/_internal/req/req_set.py | |
src/pip/_internal/req/req_uninstall.py | |
src/pip/_internal/utils/__init__.py | |
src/pip/_internal/utils/appdirs.py | |
src/pip/_internal/utils/deprecation.py | |
src/pip/_internal/utils/encoding.py | |
src/pip/_internal/utils/filesystem.py | |
src/pip/_internal/utils/glibc.py | |
src/pip/_internal/utils/hashes.py | |
src/pip/_internal/utils/logging.py | |
src/pip/_internal/utils/misc.py | |
src/pip/_internal/utils/outdated.py | |
src/pip/_internal/utils/packaging.py | |
src/pip/_internal/utils/setuptools_build.py | |
src/pip/_internal/utils/temp_dir.py | |
src/pip/_internal/utils/typing.py | |
src/pip/_internal/utils/ui.py | |
src/pip/_internal/vcs/__init__.py | |
src/pip/_internal/vcs/bazaar.py | |
src/pip/_internal/vcs/git.py | |
src/pip/_internal/vcs/mercurial.py | |
src/pip/_internal/vcs/subversion.py | |
src/pip/_vendor/README.rst | |
src/pip/_vendor/__init__.py | |
src/pip/_vendor/appdirs.py | |
src/pip/_vendor/distro.py | |
src/pip/_vendor/ipaddress.py | |
src/pip/_vendor/pyparsing.py | |
src/pip/_vendor/retrying.py | |
src/pip/_vendor/six.py | |
src/pip/_vendor/vendor.txt | |
src/pip/_vendor/cachecontrol/__init__.py | |
src/pip/_vendor/cachecontrol/_cmd.py | |
src/pip/_vendor/cachecontrol/adapter.py | |
src/pip/_vendor/cachecontrol/cache.py | |
src/pip/_vendor/cachecontrol/compat.py | |
src/pip/_vendor/cachecontrol/controller.py | |
src/pip/_vendor/cachecontrol/filewrapper.py | |
src/pip/_vendor/cachecontrol/heuristics.py | |
src/pip/_vendor/cachecontrol/serialize.py | |
src/pip/_vendor/cachecontrol/wrapper.py | |
src/pip/_vendor/cachecontrol/caches/__init__.py | |
src/pip/_vendor/cachecontrol/caches/file_cache.py | |
src/pip/_vendor/cachecontrol/caches/redis_cache.py | |
src/pip/_vendor/certifi/__init__.py | |
src/pip/_vendor/certifi/__main__.py | |
src/pip/_vendor/certifi/cacert.pem | |
src/pip/_vendor/certifi/core.py | |
src/pip/_vendor/chardet/__init__.py | |
src/pip/_vendor/chardet/big5freq.py | |
src/pip/_vendor/chardet/big5prober.py | |
src/pip/_vendor/chardet/chardistribution.py | |
src/pip/_vendor/chardet/charsetgroupprober.py | |
src/pip/_vendor/chardet/charsetprober.py | |
src/pip/_vendor/chardet/codingstatemachine.py | |
src/pip/_vendor/chardet/compat.py | |
src/pip/_vendor/chardet/cp949prober.py | |
src/pip/_vendor/chardet/enums.py | |
src/pip/_vendor/chardet/escprober.py | |
src/pip/_vendor/chardet/escsm.py | |
src/pip/_vendor/chardet/eucjpprober.py | |
src/pip/_vendor/chardet/euckrfreq.py | |
src/pip/_vendor/chardet/euckrprober.py | |
src/pip/_vendor/chardet/euctwfreq.py | |
src/pip/_vendor/chardet/euctwprober.py | |
src/pip/_vendor/chardet/gb2312freq.py | |
src/pip/_vendor/chardet/gb2312prober.py | |
src/pip/_vendor/chardet/hebrewprober.py | |
src/pip/_vendor/chardet/jisfreq.py | |
src/pip/_vendor/chardet/jpcntx.py | |
src/pip/_vendor/chardet/langbulgarianmodel.py | |
src/pip/_vendor/chardet/langcyrillicmodel.py | |
src/pip/_vendor/chardet/langgreekmodel.py | |
src/pip/_vendor/chardet/langhebrewmodel.py | |
src/pip/_vendor/chardet/langhungarianmodel.py | |
src/pip/_vendor/chardet/langthaimodel.py | |
src/pip/_vendor/chardet/langturkishmodel.py | |
src/pip/_vendor/chardet/latin1prober.py | |
src/pip/_vendor/chardet/mbcharsetprober.py | |
src/pip/_vendor/chardet/mbcsgroupprober.py | |
src/pip/_vendor/chardet/mbcssm.py | |
src/pip/_vendor/chardet/sbcharsetprober.py | |
src/pip/_vendor/chardet/sbcsgroupprober.py | |
src/pip/_vendor/chardet/sjisprober.py | |
src/pip/_vendor/chardet/universaldetector.py | |
src/pip/_vendor/chardet/utf8prober.py | |
src/pip/_vendor/chardet/version.py | |
src/pip/_vendor/chardet/cli/__init__.py | |
src/pip/_vendor/chardet/cli/chardetect.py | |
src/pip/_vendor/colorama/__init__.py | |
src/pip/_vendor/colorama/ansi.py | |
src/pip/_vendor/colorama/ansitowin32.py | |
src/pip/_vendor/colorama/initialise.py | |
src/pip/_vendor/colorama/win32.py | |
src/pip/_vendor/colorama/winterm.py | |
src/pip/_vendor/distlib/__init__.py | |
src/pip/_vendor/distlib/compat.py | |
src/pip/_vendor/distlib/database.py | |
src/pip/_vendor/distlib/index.py | |
src/pip/_vendor/distlib/locators.py | |
src/pip/_vendor/distlib/manifest.py | |
src/pip/_vendor/distlib/markers.py | |
src/pip/_vendor/distlib/metadata.py | |
src/pip/_vendor/distlib/resources.py | |
src/pip/_vendor/distlib/scripts.py | |
src/pip/_vendor/distlib/t32.exe | |
src/pip/_vendor/distlib/t64.exe | |
src/pip/_vendor/distlib/util.py | |
src/pip/_vendor/distlib/version.py | |
src/pip/_vendor/distlib/w32.exe | |
src/pip/_vendor/distlib/w64.exe | |
src/pip/_vendor/distlib/wheel.py | |
src/pip/_vendor/distlib/_backport/__init__.py | |
src/pip/_vendor/distlib/_backport/misc.py | |
src/pip/_vendor/distlib/_backport/shutil.py | |
src/pip/_vendor/distlib/_backport/sysconfig.cfg | |
src/pip/_vendor/distlib/_backport/sysconfig.py | |
src/pip/_vendor/distlib/_backport/tarfile.py | |
src/pip/_vendor/html5lib/__init__.py | |
src/pip/_vendor/html5lib/_ihatexml.py | |
src/pip/_vendor/html5lib/_inputstream.py | |
src/pip/_vendor/html5lib/_tokenizer.py | |
src/pip/_vendor/html5lib/_utils.py | |
src/pip/_vendor/html5lib/constants.py | |
src/pip/_vendor/html5lib/html5parser.py | |
src/pip/_vendor/html5lib/serializer.py | |
src/pip/_vendor/html5lib/_trie/__init__.py | |
src/pip/_vendor/html5lib/_trie/_base.py | |
src/pip/_vendor/html5lib/_trie/datrie.py | |
src/pip/_vendor/html5lib/_trie/py.py | |
src/pip/_vendor/html5lib/filters/__init__.py | |
src/pip/_vendor/html5lib/filters/alphabeticalattributes.py | |
src/pip/_vendor/html5lib/filters/base.py | |
src/pip/_vendor/html5lib/filters/inject_meta_charset.py | |
src/pip/_vendor/html5lib/filters/lint.py | |
src/pip/_vendor/html5lib/filters/optionaltags.py | |
src/pip/_vendor/html5lib/filters/sanitizer.py | |
src/pip/_vendor/html5lib/filters/whitespace.py | |
src/pip/_vendor/html5lib/treeadapters/__init__.py | |
src/pip/_vendor/html5lib/treeadapters/genshi.py | |
src/pip/_vendor/html5lib/treeadapters/sax.py | |
src/pip/_vendor/html5lib/treebuilders/__init__.py | |
src/pip/_vendor/html5lib/treebuilders/base.py | |
src/pip/_vendor/html5lib/treebuilders/dom.py | |
src/pip/_vendor/html5lib/treebuilders/etree.py | |
src/pip/_vendor/html5lib/treebuilders/etree_lxml.py | |
src/pip/_vendor/html5lib/treewalkers/__init__.py | |
src/pip/_vendor/html5lib/treewalkers/base.py | |
src/pip/_vendor/html5lib/treewalkers/dom.py | |
src/pip/_vendor/html5lib/treewalkers/etree.py | |
src/pip/_vendor/html5lib/treewalkers/etree_lxml.py | |
src/pip/_vendor/html5lib/treewalkers/genshi.py | |
src/pip/_vendor/idna/__init__.py | |
src/pip/_vendor/idna/codec.py | |
src/pip/_vendor/idna/compat.py | |
src/pip/_vendor/idna/core.py | |
src/pip/_vendor/idna/idnadata.py | |
src/pip/_vendor/idna/intranges.py | |
src/pip/_vendor/idna/package_data.py | |
src/pip/_vendor/idna/uts46data.py | |
src/pip/_vendor/lockfile/__init__.py | |
src/pip/_vendor/lockfile/linklockfile.py | |
src/pip/_vendor/lockfile/mkdirlockfile.py | |
src/pip/_vendor/lockfile/pidlockfile.py | |
src/pip/_vendor/lockfile/sqlitelockfile.py | |
src/pip/_vendor/lockfile/symlinklockfile.py | |
src/pip/_vendor/msgpack/__init__.py | |
src/pip/_vendor/msgpack/_version.py | |
src/pip/_vendor/msgpack/exceptions.py | |
src/pip/_vendor/msgpack/fallback.py | |
src/pip/_vendor/packaging/__about__.py | |
src/pip/_vendor/packaging/__init__.py | |
src/pip/_vendor/packaging/_compat.py | |
src/pip/_vendor/packaging/_structures.py | |
src/pip/_vendor/packaging/markers.py | |
src/pip/_vendor/packaging/requirements.py | |
src/pip/_vendor/packaging/specifiers.py | |
src/pip/_vendor/packaging/utils.py | |
src/pip/_vendor/packaging/version.py | |
src/pip/_vendor/pkg_resources/__init__.py | |
src/pip/_vendor/pkg_resources/py31compat.py | |
src/pip/_vendor/progress/__init__.py | |
src/pip/_vendor/progress/bar.py | |
src/pip/_vendor/progress/counter.py | |
src/pip/_vendor/progress/helpers.py | |
src/pip/_vendor/progress/spinner.py | |
src/pip/_vendor/pytoml/__init__.py | |
src/pip/_vendor/pytoml/core.py | |
src/pip/_vendor/pytoml/parser.py | |
src/pip/_vendor/pytoml/writer.py | |
src/pip/_vendor/requests/__init__.py | |
src/pip/_vendor/requests/__version__.py | |
src/pip/_vendor/requests/_internal_utils.py | |
src/pip/_vendor/requests/adapters.py | |
src/pip/_vendor/requests/api.py | |
src/pip/_vendor/requests/auth.py | |
src/pip/_vendor/requests/certs.py | |
src/pip/_vendor/requests/compat.py | |
src/pip/_vendor/requests/cookies.py | |
src/pip/_vendor/requests/exceptions.py | |
src/pip/_vendor/requests/help.py | |
src/pip/_vendor/requests/hooks.py | |
src/pip/_vendor/requests/models.py | |
src/pip/_vendor/requests/packages.py | |
src/pip/_vendor/requests/sessions.py | |
src/pip/_vendor/requests/status_codes.py | |
src/pip/_vendor/requests/structures.py | |
src/pip/_vendor/requests/utils.py | |
src/pip/_vendor/urllib3/__init__.py | |
src/pip/_vendor/urllib3/_collections.py | |
src/pip/_vendor/urllib3/connection.py | |
src/pip/_vendor/urllib3/connectionpool.py | |
src/pip/_vendor/urllib3/exceptions.py | |
src/pip/_vendor/urllib3/fields.py | |
src/pip/_vendor/urllib3/filepost.py | |
src/pip/_vendor/urllib3/poolmanager.py | |
src/pip/_vendor/urllib3/request.py | |
src/pip/_vendor/urllib3/response.py | |
src/pip/_vendor/urllib3/contrib/__init__.py | |
src/pip/_vendor/urllib3/contrib/appengine.py | |
src/pip/_vendor/urllib3/contrib/ntlmpool.py | |
src/pip/_vendor/urllib3/contrib/pyopenssl.py | |
src/pip/_vendor/urllib3/contrib/securetransport.py | |
src/pip/_vendor/urllib3/contrib/socks.py | |
src/pip/_vendor/urllib3/contrib/_securetransport/__init__.py | |
src/pip/_vendor/urllib3/contrib/_securetransport/bindings.py | |
src/pip/_vendor/urllib3/contrib/_securetransport/low_level.py | |
src/pip/_vendor/urllib3/packages/__init__.py | |
src/pip/_vendor/urllib3/packages/ordered_dict.py | |
src/pip/_vendor/urllib3/packages/six.py | |
src/pip/_vendor/urllib3/packages/backports/__init__.py | |
src/pip/_vendor/urllib3/packages/backports/makefile.py | |
src/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py | |
src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py | |
src/pip/_vendor/urllib3/util/__init__.py | |
src/pip/_vendor/urllib3/util/connection.py | |
src/pip/_vendor/urllib3/util/request.py | |
src/pip/_vendor/urllib3/util/response.py | |
src/pip/_vendor/urllib3/util/retry.py | |
src/pip/_vendor/urllib3/util/selectors.py | |
src/pip/_vendor/urllib3/util/ssl_.py | |
src/pip/_vendor/urllib3/util/timeout.py | |
src/pip/_vendor/urllib3/util/url.py | |
src/pip/_vendor/urllib3/util/wait.py | |
src/pip/_vendor/webencodings/__init__.py | |
src/pip/_vendor/webencodings/labels.py | |
src/pip/_vendor/webencodings/mklabels.py | |
src/pip/_vendor/webencodings/tests.py | |
src/pip/_vendor/webencodings/x_user_defined.py |
pip |
__version__ = "10.0.1" |
from __future__ import absolute_import | |
import os | |
import sys | |
# If we are running from a wheel, add the wheel to sys.path | |
# This allows the usage python pip-*.whl/pip install pip-*.whl | |
if __package__ == '': | |
# __file__ is pip-*.whl/pip/__main__.py | |
# first dirname call strips of '/__main__.py', second strips off '/pip' | |
# Resulting path is the name of the wheel itself | |
# Add that to sys.path so we can import pip | |
path = os.path.dirname(os.path.dirname(__file__)) | |
sys.path.insert(0, path) | |
from pip._internal import main as _main # noqa | |
if __name__ == '__main__': | |
sys.exit(_main()) |
#!/usr/bin/env python | |
from __future__ import absolute_import | |
import locale | |
import logging | |
import os | |
import optparse | |
import warnings | |
import sys | |
# 2016-06-17 [email protected]: urllib3 1.14 added optional support for socks, | |
# but if invoked (i.e. imported), it will issue a warning to stderr if socks | |
# isn't available. requests unconditionally imports urllib3's socks contrib | |
# module, triggering this warning. The warning breaks DEP-8 tests (because of | |
# the stderr output) and is just plain annoying in normal usage. I don't want | |
# to add socks as yet another dependency for pip, nor do I want to allow-stder | |
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to | |
# be done before the import of pip.vcs. | |
from pip._vendor.urllib3.exceptions import DependencyWarning | |
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa | |
# We want to inject the use of SecureTransport as early as possible so that any | |
# references or sessions or what have you are ensured to have it, however we | |
# only want to do this in the case that we're running on macOS and the linked | |
# OpenSSL is too old to handle TLSv1.2 | |
try: | |
import ssl | |
except ImportError: | |
pass | |
else: | |
# Checks for OpenSSL 1.0.1 on MacOS | |
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: | |
try: | |
from pip._vendor.urllib3.contrib import securetransport | |
except (ImportError, OSError): | |
pass | |
else: | |
securetransport.inject_into_urllib3() | |
from pip import __version__ | |
from pip._internal import cmdoptions | |
from pip._internal.exceptions import CommandError, PipError | |
from pip._internal.utils.misc import get_installed_distributions, get_prog | |
from pip._internal.utils import deprecation | |
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa | |
from pip._internal.baseparser import ( | |
ConfigOptionParser, UpdatingDefaultsHelpFormatter, | |
) | |
from pip._internal.commands import get_summaries, get_similar_commands | |
from pip._internal.commands import commands_dict | |
from pip._vendor.urllib3.exceptions import InsecureRequestWarning | |
logger = logging.getLogger(__name__) | |
# Hide the InsecureRequestWarning from urllib3 | |
warnings.filterwarnings("ignore", category=InsecureRequestWarning) | |
def autocomplete(): | |
"""Command and option completion for the main option parser (and options) | |
and its subcommands (and options). | |
Enable by sourcing one of the completion shell scripts (bash, zsh or fish). | |
""" | |
# Don't complete if user hasn't sourced bash_completion file. | |
if 'PIP_AUTO_COMPLETE' not in os.environ: | |
return | |
cwords = os.environ['COMP_WORDS'].split()[1:] | |
cword = int(os.environ['COMP_CWORD']) | |
try: | |
current = cwords[cword - 1] | |
except IndexError: | |
current = '' | |
subcommands = [cmd for cmd, summary in get_summaries()] | |
options = [] | |
# subcommand | |
try: | |
subcommand_name = [w for w in cwords if w in subcommands][0] | |
except IndexError: | |
subcommand_name = None | |
parser = create_main_parser() | |
# subcommand options | |
if subcommand_name: | |
# special case: 'help' subcommand has no options | |
if subcommand_name == 'help': | |
sys.exit(1) | |
# special case: list locally installed dists for show and uninstall | |
should_list_installed = ( | |
subcommand_name in ['show', 'uninstall'] and | |
not current.startswith('-') | |
) | |
if should_list_installed: | |
installed = [] | |
lc = current.lower() | |
for dist in get_installed_distributions(local_only=True): | |
if dist.key.startswith(lc) and dist.key not in cwords[1:]: | |
installed.append(dist.key) | |
# if there are no dists installed, fall back to option completion | |
if installed: | |
for dist in installed: | |
print(dist) | |
sys.exit(1) | |
subcommand = commands_dict[subcommand_name]() | |
for opt in subcommand.parser.option_list_all: | |
if opt.help != optparse.SUPPRESS_HELP: | |
for opt_str in opt._long_opts + opt._short_opts: | |
options.append((opt_str, opt.nargs)) | |
# filter out previously specified options from available options | |
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] | |
options = [(x, v) for (x, v) in options if x not in prev_opts] | |
# filter options by current input | |
options = [(k, v) for k, v in options if k.startswith(current)] | |
for option in options: | |
opt_label = option[0] | |
# append '=' to options which require args | |
if option[1] and option[0][:2] == "--": | |
opt_label += '=' | |
print(opt_label) | |
else: | |
# show main parser options only when necessary | |
if current.startswith('-') or current.startswith('--'): | |
opts = [i.option_list for i in parser.option_groups] | |
opts.append(parser.option_list) | |
opts = (o for it in opts for o in it) | |
for opt in opts: | |
if opt.help != optparse.SUPPRESS_HELP: | |
subcommands += opt._long_opts + opt._short_opts | |
print(' '.join([x for x in subcommands if x.startswith(current)])) | |
sys.exit(1) | |
def create_main_parser(): | |
parser_kw = { | |
'usage': '\n%prog <command> [options]', | |
'add_help_option': False, | |
'formatter': UpdatingDefaultsHelpFormatter(), | |
'name': 'global', | |
'prog': get_prog(), | |
} | |
parser = ConfigOptionParser(**parser_kw) | |
parser.disable_interspersed_args() | |
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | |
parser.version = 'pip %s from %s (python %s)' % ( | |
__version__, pip_pkg_dir, sys.version[:3], | |
) | |
# add the general options | |
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) | |
parser.add_option_group(gen_opts) | |
parser.main = True # so the help formatter knows | |
# create command listing for description | |
command_summaries = get_summaries() | |
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] | |
parser.description = '\n'.join(description) | |
return parser | |
def parseopts(args): | |
parser = create_main_parser() | |
# Note: parser calls disable_interspersed_args(), so the result of this | |
# call is to split the initial args into the general options before the | |
# subcommand and everything else. | |
# For example: | |
# args: ['--timeout=5', 'install', '--user', 'INITools'] | |
# general_options: ['--timeout==5'] | |
# args_else: ['install', '--user', 'INITools'] | |
general_options, args_else = parser.parse_args(args) | |
# --version | |
if general_options.version: | |
sys.stdout.write(parser.version) | |
sys.stdout.write(os.linesep) | |
sys.exit() | |
# pip || pip help -> print_help() | |
if not args_else or (args_else[0] == 'help' and len(args_else) == 1): | |
parser.print_help() | |
sys.exit() | |
# the subcommand name | |
cmd_name = args_else[0] | |
if cmd_name not in commands_dict: | |
guess = get_similar_commands(cmd_name) | |
msg = ['unknown command "%s"' % cmd_name] | |
if guess: | |
msg.append('maybe you meant "%s"' % guess) | |
raise CommandError(' - '.join(msg)) | |
# all the args without the subcommand | |
cmd_args = args[:] | |
cmd_args.remove(cmd_name) | |
return cmd_name, cmd_args | |
def check_isolated(args): | |
isolated = False | |
if "--isolated" in args: | |
isolated = True | |
return isolated | |
def main(args=None): | |
if args is None: | |
args = sys.argv[1:] | |
# Configure our deprecation warnings to be sent through loggers | |
deprecation.install_warning_logger() | |
autocomplete() | |
try: | |
cmd_name, cmd_args = parseopts(args) | |
except PipError as exc: | |
sys.stderr.write("ERROR: %s" % exc) | |
sys.stderr.write(os.linesep) | |
sys.exit(1) | |
# Needed for locale.getpreferredencoding(False) to work | |
# in pip._internal.utils.encoding.auto_decode | |
try: | |
locale.setlocale(locale.LC_ALL, '') | |
except locale.Error as e: | |
# setlocale can apparently crash if locale are uninitialized | |
logger.debug("Ignoring error %s when setting locale", e) | |
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) | |
return command.main(cmd_args) |
"""Base Command class, and related routines""" | |
from __future__ import absolute_import | |
import logging | |
import logging.config | |
import optparse | |
import os | |
import sys | |
import warnings | |
from pip._internal import cmdoptions | |
from pip._internal.baseparser import ( | |
ConfigOptionParser, UpdatingDefaultsHelpFormatter, | |
) | |
from pip._internal.compat import WINDOWS | |
from pip._internal.download import PipSession | |
from pip._internal.exceptions import ( | |
BadCommand, CommandError, InstallationError, PreviousBuildDirError, | |
UninstallationError, | |
) | |
from pip._internal.index import PackageFinder | |
from pip._internal.locations import running_under_virtualenv | |
from pip._internal.req.req_file import parse_requirements | |
from pip._internal.req.req_install import InstallRequirement | |
from pip._internal.status_codes import ( | |
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR, | |
VIRTUALENV_NOT_FOUND, | |
) | |
from pip._internal.utils import deprecation | |
from pip._internal.utils.logging import IndentingFormatter | |
from pip._internal.utils.misc import get_prog, normalize_path | |
from pip._internal.utils.outdated import pip_version_check | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import Optional | |
__all__ = ['Command'] | |
logger = logging.getLogger(__name__) | |
class Command(object): | |
name = None # type: Optional[str] | |
usage = None # type: Optional[str] | |
hidden = False # type: bool | |
ignore_require_venv = False # type: bool | |
log_streams = ("ext://sys.stdout", "ext://sys.stderr") | |
def __init__(self, isolated=False): | |
parser_kw = { | |
'usage': self.usage, | |
'prog': '%s %s' % (get_prog(), self.name), | |
'formatter': UpdatingDefaultsHelpFormatter(), | |
'add_help_option': False, | |
'name': self.name, | |
'description': self.__doc__, | |
'isolated': isolated, | |
} | |
self.parser = ConfigOptionParser(**parser_kw) | |
# Commands should add options to this option group | |
optgroup_name = '%s Options' % self.name.capitalize() | |
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) | |
# Add the general options | |
gen_opts = cmdoptions.make_option_group( | |
cmdoptions.general_group, | |
self.parser, | |
) | |
self.parser.add_option_group(gen_opts) | |
def _build_session(self, options, retries=None, timeout=None): | |
session = PipSession( | |
cache=( | |
normalize_path(os.path.join(options.cache_dir, "http")) | |
if options.cache_dir else None | |
), | |
retries=retries if retries is not None else options.retries, | |
insecure_hosts=options.trusted_hosts, | |
) | |
# Handle custom ca-bundles from the user | |
if options.cert: | |
session.verify = options.cert | |
# Handle SSL client certificate | |
if options.client_cert: | |
session.cert = options.client_cert | |
# Handle timeouts | |
if options.timeout or timeout: | |
session.timeout = ( | |
timeout if timeout is not None else options.timeout | |
) | |
# Handle configured proxies | |
if options.proxy: | |
session.proxies = { | |
"http": options.proxy, | |
"https": options.proxy, | |
} | |
# Determine if we can prompt the user for authentication or not | |
session.auth.prompting = not options.no_input | |
return session | |
def parse_args(self, args): | |
# factored out for testability | |
return self.parser.parse_args(args) | |
def main(self, args): | |
options, args = self.parse_args(args) | |
# Set verbosity so that it can be used elsewhere. | |
self.verbosity = options.verbose - options.quiet | |
if self.verbosity >= 1: | |
level = "DEBUG" | |
elif self.verbosity == -1: | |
level = "WARNING" | |
elif self.verbosity == -2: | |
level = "ERROR" | |
elif self.verbosity <= -3: | |
level = "CRITICAL" | |
else: | |
level = "INFO" | |
# The root logger should match the "console" level *unless* we | |
# specified "--log" to send debug logs to a file. | |
root_level = level | |
if options.log: | |
root_level = "DEBUG" | |
logger_class = "pip._internal.utils.logging.ColorizedStreamHandler" | |
handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler" | |
logging.config.dictConfig({ | |
"version": 1, | |
"disable_existing_loggers": False, | |
"filters": { | |
"exclude_warnings": { | |
"()": "pip._internal.utils.logging.MaxLevelFilter", | |
"level": logging.WARNING, | |
}, | |
}, | |
"formatters": { | |
"indent": { | |
"()": IndentingFormatter, | |
"format": "%(message)s", | |
}, | |
}, | |
"handlers": { | |
"console": { | |
"level": level, | |
"class": logger_class, | |
"no_color": options.no_color, | |
"stream": self.log_streams[0], | |
"filters": ["exclude_warnings"], | |
"formatter": "indent", | |
}, | |
"console_errors": { | |
"level": "WARNING", | |
"class": logger_class, | |
"no_color": options.no_color, | |
"stream": self.log_streams[1], | |
"formatter": "indent", | |
}, | |
"user_log": { | |
"level": "DEBUG", | |
"class": handler_class, | |
"filename": options.log or "/dev/null", | |
"delay": True, | |
"formatter": "indent", | |
}, | |
}, | |
"root": { | |
"level": root_level, | |
"handlers": list(filter(None, [ | |
"console", | |
"console_errors", | |
"user_log" if options.log else None, | |
])), | |
}, | |
# Disable any logging besides WARNING unless we have DEBUG level | |
# logging enabled. These use both pip._vendor and the bare names | |
# for the case where someone unbundles our libraries. | |
"loggers": { | |
name: { | |
"level": ( | |
"WARNING" if level in ["INFO", "ERROR"] else "DEBUG" | |
) | |
} for name in [ | |
"pip._vendor", "distlib", "requests", "urllib3" | |
] | |
}, | |
}) | |
if sys.version_info[:2] == (3, 3): | |
warnings.warn( | |
"Python 3.3 supported has been deprecated and support for it " | |
"will be dropped in the future. Please upgrade your Python.", | |
deprecation.RemovedInPip11Warning, | |
) | |
# TODO: try to get these passing down from the command? | |
# without resorting to os.environ to hold these. | |
if options.no_input: | |
os.environ['PIP_NO_INPUT'] = '1' | |
if options.exists_action: | |
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) | |
if options.require_venv and not self.ignore_require_venv: | |
# If a venv is required check if it can really be found | |
if not running_under_virtualenv(): | |
logger.critical( | |
'Could not find an activated virtualenv (required).' | |
) | |
sys.exit(VIRTUALENV_NOT_FOUND) | |
original_root_handlers = set(logging.root.handlers) | |
try: | |
status = self.run(options, args) | |
# FIXME: all commands should return an exit status | |
# and when it is done, isinstance is not needed anymore | |
if isinstance(status, int): | |
return status | |
except PreviousBuildDirError as exc: | |
logger.critical(str(exc)) | |
logger.debug('Exception information:', exc_info=True) | |
return PREVIOUS_BUILD_DIR_ERROR | |
except (InstallationError, UninstallationError, BadCommand) as exc: | |
logger.critical(str(exc)) | |
logger.debug('Exception information:', exc_info=True) | |
return ERROR | |
except CommandError as exc: | |
logger.critical('ERROR: %s', exc) | |
logger.debug('Exception information:', exc_info=True) | |
return ERROR | |
except KeyboardInterrupt: | |
logger.critical('Operation cancelled by user') | |
logger.debug('Exception information:', exc_info=True) | |
return ERROR | |
except: | |
logger.critical('Exception:', exc_info=True) | |
return UNKNOWN_ERROR | |
finally: | |
# Check if we're using the latest version of pip available | |
if (not options.disable_pip_version_check and not | |
getattr(options, "no_index", False)): | |
with self._build_session( | |
options, | |
retries=0, | |
timeout=min(5, options.timeout)) as session: | |
pip_version_check(session, options) | |
# Avoid leaking loggers | |
for handler in set(logging.root.handlers) - original_root_handlers: | |
# this method benefit from the Logger class internal lock | |
logging.root.removeHandler(handler) | |
return SUCCESS | |
class RequirementCommand(Command): | |
@staticmethod | |
def populate_requirement_set(requirement_set, args, options, finder, | |
session, name, wheel_cache): | |
""" | |
Marshal cmd line args into a requirement set. | |
""" | |
# NOTE: As a side-effect, options.require_hashes and | |
# requirement_set.require_hashes may be updated | |
for filename in options.constraints: | |
for req_to_add in parse_requirements( | |
filename, | |
constraint=True, finder=finder, options=options, | |
session=session, wheel_cache=wheel_cache): | |
req_to_add.is_direct = True | |
requirement_set.add_requirement(req_to_add) | |
for req in args: | |
req_to_add = InstallRequirement.from_line( | |
req, None, isolated=options.isolated_mode, | |
wheel_cache=wheel_cache | |
) | |
req_to_add.is_direct = True | |
requirement_set.add_requirement(req_to_add) | |
for req in options.editables: | |
req_to_add = InstallRequirement.from_editable( | |
req, | |
isolated=options.isolated_mode, | |
wheel_cache=wheel_cache | |
) | |
req_to_add.is_direct = True | |
requirement_set.add_requirement(req_to_add) | |
for filename in options.requirements: | |
for req_to_add in parse_requirements( | |
filename, | |
finder=finder, options=options, session=session, | |
wheel_cache=wheel_cache): | |
req_to_add.is_direct = True | |
requirement_set.add_requirement(req_to_add) | |
# If --require-hashes was a line in a requirements file, tell | |
# RequirementSet about it: | |
requirement_set.require_hashes = options.require_hashes | |
if not (args or options.editables or options.requirements): | |
opts = {'name': name} | |
if options.find_links: | |
raise CommandError( | |
'You must give at least one requirement to %(name)s ' | |
'(maybe you meant "pip %(name)s %(links)s"?)' % | |
dict(opts, links=' '.join(options.find_links))) | |
else: | |
raise CommandError( | |
'You must give at least one requirement to %(name)s ' | |
'(see "pip help %(name)s")' % opts) | |
# On Windows, any operation modifying pip should be run as: | |
# python -m pip ... | |
# See https://github.com/pypa/pip/issues/1299 for more discussion | |
should_show_use_python_msg = ( | |
WINDOWS and | |
requirement_set.has_requirement("pip") and | |
os.path.basename(sys.argv[0]).startswith("pip") | |
) | |
if should_show_use_python_msg: | |
new_command = [ | |
sys.executable, "-m", "pip" | |
] + sys.argv[1:] | |
raise CommandError( | |
'To modify pip, please run the following command:\n{}' | |
.format(" ".join(new_command)) | |
) | |
def _build_package_finder(self, options, session, | |
platform=None, python_versions=None, | |
abi=None, implementation=None): | |
""" | |
Create a package finder appropriate to this requirement command. | |
""" | |
index_urls = [options.index_url] + options.extra_index_urls | |
if options.no_index: | |
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | |
index_urls = [] | |
return PackageFinder( | |
find_links=options.find_links, | |
format_control=options.format_control, | |
index_urls=index_urls, | |
trusted_hosts=options.trusted_hosts, | |
allow_all_prereleases=options.pre, | |
process_dependency_links=options.process_dependency_links, | |
session=session, | |
platform=platform, | |
versions=python_versions, | |
abi=abi, | |
implementation=implementation, | |
) |
"""Base option parser setup""" | |
from __future__ import absolute_import | |
import logging | |
import optparse | |
import sys | |
import textwrap | |
from distutils.util import strtobool | |
from pip._vendor.six import string_types | |
from pip._internal.compat import get_terminal_size | |
from pip._internal.configuration import Configuration, ConfigurationError | |
logger = logging.getLogger(__name__) | |
class PrettyHelpFormatter(optparse.IndentedHelpFormatter): | |
"""A prettier/less verbose help formatter for optparse.""" | |
def __init__(self, *args, **kwargs): | |
# help position must be aligned with __init__.parseopts.description | |
kwargs['max_help_position'] = 30 | |
kwargs['indent_increment'] = 1 | |
kwargs['width'] = get_terminal_size()[0] - 2 | |
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) | |
def format_option_strings(self, option): | |
return self._format_option_strings(option, ' <%s>', ', ') | |
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): | |
""" | |
Return a comma-separated list of option strings and metavars. | |
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format') | |
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar | |
:param optsep: separator | |
""" | |
opts = [] | |
if option._short_opts: | |
opts.append(option._short_opts[0]) | |
if option._long_opts: | |
opts.append(option._long_opts[0]) | |
if len(opts) > 1: | |
opts.insert(1, optsep) | |
if option.takes_value(): | |
metavar = option.metavar or option.dest.lower() | |
opts.append(mvarfmt % metavar.lower()) | |
return ''.join(opts) | |
def format_heading(self, heading): | |
if heading == 'Options': | |
return '' | |
return heading + ':\n' | |
def format_usage(self, usage): | |
""" | |
Ensure there is only one newline between usage and the first heading | |
if there is no description. | |
""" | |
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") | |
return msg | |
def format_description(self, description): | |
# leave full control over description to us | |
if description: | |
if hasattr(self.parser, 'main'): | |
label = 'Commands' | |
else: | |
label = 'Description' | |
# some doc strings have initial newlines, some don't | |
description = description.lstrip('\n') | |
# some doc strings have final newlines and spaces, some don't | |
description = description.rstrip() | |
# dedent, then reindent | |
description = self.indent_lines(textwrap.dedent(description), " ") | |
description = '%s:\n%s\n' % (label, description) | |
return description | |
else: | |
return '' | |
def format_epilog(self, epilog): | |
# leave full control over epilog to us | |
if epilog: | |
return epilog | |
else: | |
return '' | |
def indent_lines(self, text, indent): | |
new_lines = [indent + line for line in text.split('\n')] | |
return "\n".join(new_lines) | |
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): | |
"""Custom help formatter for use in ConfigOptionParser. | |
This is updates the defaults before expanding them, allowing | |
them to show up correctly in the help listing. | |
""" | |
def expand_default(self, option): | |
if self.parser is not None: | |
self.parser._update_defaults(self.parser.defaults) | |
return optparse.IndentedHelpFormatter.expand_default(self, option) | |
class CustomOptionParser(optparse.OptionParser): | |
def insert_option_group(self, idx, *args, **kwargs): | |
"""Insert an OptionGroup at a given position.""" | |
group = self.add_option_group(*args, **kwargs) | |
self.option_groups.pop() | |
self.option_groups.insert(idx, group) | |
return group | |
@property | |
def option_list_all(self): | |
"""Get a list of all options, including those in option groups.""" | |
res = self.option_list[:] | |
for i in self.option_groups: | |
res.extend(i.option_list) | |
return res | |
class ConfigOptionParser(CustomOptionParser): | |
"""Custom option parser which updates its defaults by checking the | |
configuration files and environmental variables""" | |
def __init__(self, *args, **kwargs): | |
self.name = kwargs.pop('name') | |
isolated = kwargs.pop("isolated", False) | |
self.config = Configuration(isolated) | |
assert self.name | |
optparse.OptionParser.__init__(self, *args, **kwargs) | |
def check_default(self, option, key, val): | |
try: | |
return option.check_value(key, val) | |
except optparse.OptionValueError as exc: | |
print("An error occurred during configuration: %s" % exc) | |
sys.exit(3) | |
def _get_ordered_configuration_items(self): | |
# Configuration gives keys in an unordered manner. Order them. | |
override_order = ["global", self.name, ":env:"] | |
# Pool the options into different groups | |
section_items = {name: [] for name in override_order} | |
for section_key, val in self.config.items(): | |
# ignore empty values | |
if not val: | |
logger.debug( | |
"Ignoring configuration key '%s' as it's value is empty.", | |
section_key | |
) | |
continue | |
section, key = section_key.split(".", 1) | |
if section in override_order: | |
section_items[section].append((key, val)) | |
# Yield each group in their override order | |
for section in override_order: | |
for key, val in section_items[section]: | |
yield key, val | |
def _update_defaults(self, defaults): | |
"""Updates the given defaults with values from the config files and | |
the environ. Does a little special handling for certain types of | |
options (lists).""" | |
# Accumulate complex default state. | |
self.values = optparse.Values(self.defaults) | |
late_eval = set() | |
# Then set the options with those values | |
for key, val in self._get_ordered_configuration_items(): | |
# '--' because configuration supports only long names | |
option = self.get_option('--' + key) | |
# Ignore options not present in this parser. E.g. non-globals put | |
# in [global] by users that want them to apply to all applicable | |
# commands. | |
if option is None: | |
continue | |
if option.action in ('store_true', 'store_false', 'count'): | |
val = strtobool(val) | |
elif option.action == 'append': | |
val = val.split() | |
val = [self.check_default(option, key, v) for v in val] | |
elif option.action == 'callback': | |
late_eval.add(option.dest) | |
opt_str = option.get_opt_string() | |
val = option.convert_value(opt_str, val) | |
# From take_action | |
args = option.callback_args or () | |
kwargs = option.callback_kwargs or {} | |
option.callback(option, opt_str, val, self, *args, **kwargs) | |
else: | |
val = self.check_default(option, key, val) | |
defaults[option.dest] = val | |
for key in late_eval: | |
defaults[key] = getattr(self.values, key) | |
self.values = None | |
return defaults | |
def get_default_values(self): | |
"""Overriding to make updating the defaults after instantiation of | |
the option parser possible, _update_defaults() does the dirty work.""" | |
if not self.process_default_values: | |
# Old, pre-Optik 1.5 behaviour. | |
return optparse.Values(self.defaults) | |
# Load the configuration, or error out in case of an error | |
try: | |
self.config.load() | |
except ConfigurationError as err: | |
self.exit(2, err.args[0]) | |
defaults = self._update_defaults(self.defaults.copy()) # ours | |
for option in self._get_all_options(): | |
default = defaults.get(option.dest) | |
if isinstance(default, string_types): | |
opt_str = option.get_opt_string() | |
defaults[option.dest] = option.check_value(opt_str, default) | |
return optparse.Values(defaults) | |
def error(self, msg): | |
self.print_usage(sys.stderr) | |
self.exit(2, "%s\n" % msg) |
"""Build Environment used for isolation during sdist building | |
""" | |
import os | |
from distutils.sysconfig import get_python_lib | |
from sysconfig import get_paths | |
from pip._internal.utils.temp_dir import TempDirectory | |
class BuildEnvironment(object): | |
"""Creates and manages an isolated environment to install build deps | |
""" | |
def __init__(self, no_clean): | |
self._temp_dir = TempDirectory(kind="build-env") | |
self._no_clean = no_clean | |
@property | |
def path(self): | |
return self._temp_dir.path | |
def __enter__(self): | |
self._temp_dir.create() | |
self.save_path = os.environ.get('PATH', None) | |
self.save_pythonpath = os.environ.get('PYTHONPATH', None) | |
self.save_nousersite = os.environ.get('PYTHONNOUSERSITE', None) | |
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' | |
install_dirs = get_paths(install_scheme, vars={ | |
'base': self.path, | |
'platbase': self.path, | |
}) | |
scripts = install_dirs['scripts'] | |
if self.save_path: | |
os.environ['PATH'] = scripts + os.pathsep + self.save_path | |
else: | |
os.environ['PATH'] = scripts + os.pathsep + os.defpath | |
# Note: prefer distutils' sysconfig to get the | |
# library paths so PyPy is correctly supported. | |
purelib = get_python_lib(plat_specific=0, prefix=self.path) | |
platlib = get_python_lib(plat_specific=1, prefix=self.path) | |
if purelib == platlib: | |
lib_dirs = purelib | |
else: | |
lib_dirs = purelib + os.pathsep + platlib | |
if self.save_pythonpath: | |
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \ | |
self.save_pythonpath | |
else: | |
os.environ['PYTHONPATH'] = lib_dirs | |
os.environ['PYTHONNOUSERSITE'] = '1' | |
return self.path | |
def __exit__(self, exc_type, exc_val, exc_tb): | |
if not self._no_clean: | |
self._temp_dir.cleanup() | |
def restore_var(varname, old_value): | |
if old_value is None: | |
os.environ.pop(varname, None) | |
else: | |
os.environ[varname] = old_value | |
restore_var('PATH', self.save_path) | |
restore_var('PYTHONPATH', self.save_pythonpath) | |
restore_var('PYTHONNOUSERSITE', self.save_nousersite) | |
def cleanup(self): | |
self._temp_dir.cleanup() | |
class NoOpBuildEnvironment(BuildEnvironment): | |
"""A no-op drop-in replacement for BuildEnvironment | |
""" | |
def __init__(self, no_clean): | |
pass | |
def __enter__(self): | |
pass | |
def __exit__(self, exc_type, exc_val, exc_tb): | |
pass | |
def cleanup(self): | |
pass |
"""Cache Management | |
""" | |
import errno | |
import hashlib | |
import logging | |
import os | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._internal import index | |
from pip._internal.compat import expanduser | |
from pip._internal.download import path_to_url | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.wheel import InvalidWheelFilename, Wheel | |
logger = logging.getLogger(__name__) | |
class Cache(object): | |
"""An abstract class - provides cache directories for data from links | |
:param cache_dir: The root of the cache. | |
:param format_control: A pip.index.FormatControl object to limit | |
binaries being read from the cache. | |
:param allowed_formats: which formats of files the cache should store. | |
('binary' and 'source' are the only allowed values) | |
""" | |
def __init__(self, cache_dir, format_control, allowed_formats): | |
super(Cache, self).__init__() | |
self.cache_dir = expanduser(cache_dir) if cache_dir else None | |
self.format_control = format_control | |
self.allowed_formats = allowed_formats | |
_valid_formats = {"source", "binary"} | |
assert self.allowed_formats.union(_valid_formats) == _valid_formats | |
def _get_cache_path_parts(self, link): | |
"""Get parts of part that must be os.path.joined with cache_dir | |
""" | |
# We want to generate an url to use as our cache key, we don't want to | |
# just re-use the URL because it might have other items in the fragment | |
# and we don't care about those. | |
key_parts = [link.url_without_fragment] | |
if link.hash_name is not None and link.hash is not None: | |
key_parts.append("=".join([link.hash_name, link.hash])) | |
key_url = "#".join(key_parts) | |
# Encode our key url with sha224, we'll use this because it has similar | |
# security properties to sha256, but with a shorter total output (and | |
# thus less secure). However the differences don't make a lot of | |
# difference for our use case here. | |
hashed = hashlib.sha224(key_url.encode()).hexdigest() | |
# We want to nest the directories some to prevent having a ton of top | |
# level directories where we might run out of sub directories on some | |
# FS. | |
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] | |
return parts | |
def _get_candidates(self, link, package_name): | |
can_not_cache = ( | |
not self.cache_dir or | |
not package_name or | |
not link | |
) | |
if can_not_cache: | |
return [] | |
canonical_name = canonicalize_name(package_name) | |
formats = index.fmt_ctl_formats( | |
self.format_control, canonical_name | |
) | |
if not self.allowed_formats.intersection(formats): | |
return [] | |
root = self.get_path_for_link(link) | |
try: | |
return os.listdir(root) | |
except OSError as err: | |
if err.errno in {errno.ENOENT, errno.ENOTDIR}: | |
return [] | |
raise | |
def get_path_for_link(self, link): | |
"""Return a directory to store cached items in for link. | |
""" | |
raise NotImplementedError() | |
def get(self, link, package_name): | |
"""Returns a link to a cached item if it exists, otherwise returns the | |
passed link. | |
""" | |
raise NotImplementedError() | |
def _link_for_candidate(self, link, candidate): | |
root = self.get_path_for_link(link) | |
path = os.path.join(root, candidate) | |
return index.Link(path_to_url(path)) | |
def cleanup(self): | |
pass | |
class SimpleWheelCache(Cache): | |
"""A cache of wheels for future installs. | |
""" | |
def __init__(self, cache_dir, format_control): | |
super(SimpleWheelCache, self).__init__( | |
cache_dir, format_control, {"binary"} | |
) | |
def get_path_for_link(self, link): | |
"""Return a directory to store cached wheels for link | |
Because there are M wheels for any one sdist, we provide a directory | |
to cache them in, and then consult that directory when looking up | |
cache hits. | |
We only insert things into the cache if they have plausible version | |
numbers, so that we don't contaminate the cache with things that were | |
not unique. E.g. ./package might have dozens of installs done for it | |
and build a version of 0.0...and if we built and cached a wheel, we'd | |
end up using the same wheel even if the source has been edited. | |
:param link: The link of the sdist for which this will cache wheels. | |
""" | |
parts = self._get_cache_path_parts(link) | |
# Store wheels within the root cache_dir | |
return os.path.join(self.cache_dir, "wheels", *parts) | |
def get(self, link, package_name): | |
candidates = [] | |
for wheel_name in self._get_candidates(link, package_name): | |
try: | |
wheel = Wheel(wheel_name) | |
except InvalidWheelFilename: | |
continue | |
if not wheel.supported(): | |
# Built for a different python/arch/etc | |
continue | |
candidates.append((wheel.support_index_min(), wheel_name)) | |
if not candidates: | |
return link | |
return self._link_for_candidate(link, min(candidates)[1]) | |
class EphemWheelCache(SimpleWheelCache): | |
"""A SimpleWheelCache that creates it's own temporary cache directory | |
""" | |
def __init__(self, format_control): | |
self._temp_dir = TempDirectory(kind="ephem-wheel-cache") | |
self._temp_dir.create() | |
super(EphemWheelCache, self).__init__( | |
self._temp_dir.path, format_control | |
) | |
def cleanup(self): | |
self._temp_dir.cleanup() | |
class WheelCache(Cache): | |
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache | |
This Cache allows for gracefully degradation, using the ephem wheel cache | |
when a certain link is not found in the simple wheel cache first. | |
""" | |
def __init__(self, cache_dir, format_control): | |
super(WheelCache, self).__init__( | |
cache_dir, format_control, {'binary'} | |
) | |
self._wheel_cache = SimpleWheelCache(cache_dir, format_control) | |
self._ephem_cache = EphemWheelCache(format_control) | |
def get_path_for_link(self, link): | |
return self._wheel_cache.get_path_for_link(link) | |
def get_ephem_path_for_link(self, link): | |
return self._ephem_cache.get_path_for_link(link) | |
def get(self, link, package_name): | |
retval = self._wheel_cache.get(link, package_name) | |
if retval is link: | |
retval = self._ephem_cache.get(link, package_name) | |
return retval | |
def cleanup(self): | |
self._wheel_cache.cleanup() | |
self._ephem_cache.cleanup() |
""" | |
shared options and groups | |
The principle here is to define options once, but *not* instantiate them | |
globally. One reason being that options with action='append' can carry state | |
between parses. pip parses general options twice internally, and shouldn't | |
pass on state. To be consistent, all options will follow this design. | |
""" | |
from __future__ import absolute_import | |
import warnings | |
from functools import partial | |
from optparse import SUPPRESS_HELP, Option, OptionGroup | |
from pip._internal.index import ( | |
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, | |
) | |
from pip._internal.locations import USER_CACHE_DIR, src_prefix | |
from pip._internal.models import PyPI | |
from pip._internal.utils.hashes import STRONG_HASHES | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
from pip._internal.utils.ui import BAR_TYPES | |
if MYPY_CHECK_RUNNING: | |
from typing import Any | |
def make_option_group(group, parser): | |
""" | |
Return an OptionGroup object | |
group -- assumed to be dict with 'name' and 'options' keys | |
parser -- an optparse Parser | |
""" | |
option_group = OptionGroup(parser, group['name']) | |
for option in group['options']: | |
option_group.add_option(option()) | |
return option_group | |
def check_install_build_global(options, check_options=None): | |
"""Disable wheels if per-setup.py call options are set. | |
:param options: The OptionParser options to update. | |
:param check_options: The options to check, if not supplied defaults to | |
options. | |
""" | |
if check_options is None: | |
check_options = options | |
def getname(n): | |
return getattr(check_options, n, None) | |
names = ["build_options", "global_options", "install_options"] | |
if any(map(getname, names)): | |
control = options.format_control | |
fmt_ctl_no_binary(control) | |
warnings.warn( | |
'Disabling all use of wheels due to the use of --build-options ' | |
'/ --global-options / --install-options.', stacklevel=2, | |
) | |
########### | |
# options # | |
########### | |
help_ = partial( | |
Option, | |
'-h', '--help', | |
dest='help', | |
action='help', | |
help='Show help.', | |
) # type: Any | |
isolated_mode = partial( | |
Option, | |
"--isolated", | |
dest="isolated_mode", | |
action="store_true", | |
default=False, | |
help=( | |
"Run pip in an isolated mode, ignoring environment variables and user " | |
"configuration." | |
), | |
) | |
require_virtualenv = partial( | |
Option, | |
# Run only if inside a virtualenv, bail if not. | |
'--require-virtualenv', '--require-venv', | |
dest='require_venv', | |
action='store_true', | |
default=False, | |
help=SUPPRESS_HELP | |
) # type: Any | |
verbose = partial( | |
Option, | |
'-v', '--verbose', | |
dest='verbose', | |
action='count', | |
default=0, | |
help='Give more output. Option is additive, and can be used up to 3 times.' | |
) | |
no_color = partial( | |
Option, | |
'--no-color', | |
dest='no_color', | |
action='store_true', | |
default=False, | |
help="Suppress colored output", | |
) | |
version = partial( | |
Option, | |
'-V', '--version', | |
dest='version', | |
action='store_true', | |
help='Show version and exit.', | |
) # type: Any | |
quiet = partial( | |
Option, | |
'-q', '--quiet', | |
dest='quiet', | |
action='count', | |
default=0, | |
help=( | |
'Give less output. Option is additive, and can be used up to 3' | |
' times (corresponding to WARNING, ERROR, and CRITICAL logging' | |
' levels).' | |
), | |
) # type: Any | |
progress_bar = partial( | |
Option, | |
'--progress-bar', | |
dest='progress_bar', | |
type='choice', | |
choices=list(BAR_TYPES.keys()), | |
default='on', | |
help=( | |
'Specify type of progress to be displayed [' + | |
'|'.join(BAR_TYPES.keys()) + '] (default: %default)' | |
), | |
) # type: Any | |
log = partial( | |
Option, | |
"--log", "--log-file", "--local-log", | |
dest="log", | |
metavar="path", | |
help="Path to a verbose appending log." | |
) # type: Any | |
no_input = partial( | |
Option, | |
# Don't ask for input | |
'--no-input', | |
dest='no_input', | |
action='store_true', | |
default=False, | |
help=SUPPRESS_HELP | |
) # type: Any | |
proxy = partial( | |
Option, | |
'--proxy', | |
dest='proxy', | |
type='str', | |
default='', | |
help="Specify a proxy in the form [user:passwd@]proxy.server:port." | |
) # type: Any | |
retries = partial( | |
Option, | |
'--retries', | |
dest='retries', | |
type='int', | |
default=5, | |
help="Maximum number of retries each connection should attempt " | |
"(default %default times).", | |
) # type: Any | |
timeout = partial( | |
Option, | |
'--timeout', '--default-timeout', | |
metavar='sec', | |
dest='timeout', | |
type='float', | |
default=15, | |
help='Set the socket timeout (default %default seconds).', | |
) # type: Any | |
skip_requirements_regex = partial( | |
Option, | |
# A regex to be used to skip requirements | |
'--skip-requirements-regex', | |
dest='skip_requirements_regex', | |
type='str', | |
default='', | |
help=SUPPRESS_HELP, | |
) # type: Any | |
def exists_action(): | |
return Option( | |
# Option when path already exist | |
'--exists-action', | |
dest='exists_action', | |
type='choice', | |
choices=['s', 'i', 'w', 'b', 'a'], | |
default=[], | |
action='append', | |
metavar='action', | |
help="Default action when a path already exists: " | |
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).", | |
) | |
cert = partial( | |
Option, | |
'--cert', | |
dest='cert', | |
type='str', | |
metavar='path', | |
help="Path to alternate CA bundle.", | |
) # type: Any | |
client_cert = partial( | |
Option, | |
'--client-cert', | |
dest='client_cert', | |
type='str', | |
default=None, | |
metavar='path', | |
help="Path to SSL client certificate, a single file containing the " | |
"private key and the certificate in PEM format.", | |
) # type: Any | |
index_url = partial( | |
Option, | |
'-i', '--index-url', '--pypi-url', | |
dest='index_url', | |
metavar='URL', | |
default=PyPI.simple_url, | |
help="Base URL of Python Package Index (default %default). " | |
"This should point to a repository compliant with PEP 503 " | |
"(the simple repository API) or a local directory laid out " | |
"in the same format.", | |
) # type: Any | |
def extra_index_url(): | |
return Option( | |
'--extra-index-url', | |
dest='extra_index_urls', | |
metavar='URL', | |
action='append', | |
default=[], | |
help="Extra URLs of package indexes to use in addition to " | |
"--index-url. Should follow the same rules as " | |
"--index-url.", | |
) | |
no_index = partial( | |
Option, | |
'--no-index', | |
dest='no_index', | |
action='store_true', | |
default=False, | |
help='Ignore package index (only looking at --find-links URLs instead).', | |
) # type: Any | |
def find_links(): | |
return Option( | |
'-f', '--find-links', | |
dest='find_links', | |
action='append', | |
default=[], | |
metavar='url', | |
help="If a url or path to an html file, then parse for links to " | |
"archives. If a local path or file:// url that's a directory, " | |
"then look for archives in the directory listing.", | |
) | |
def trusted_host(): | |
return Option( | |
"--trusted-host", | |
dest="trusted_hosts", | |
action="append", | |
metavar="HOSTNAME", | |
default=[], | |
help="Mark this host as trusted, even though it does not have valid " | |
"or any HTTPS.", | |
) | |
# Remove after 1.5 | |
process_dependency_links = partial( | |
Option, | |
"--process-dependency-links", | |
dest="process_dependency_links", | |
action="store_true", | |
default=False, | |
help="Enable the processing of dependency links.", | |
) # type: Any | |
def constraints(): | |
return Option( | |
'-c', '--constraint', | |
dest='constraints', | |
action='append', | |
default=[], | |
metavar='file', | |
help='Constrain versions using the given constraints file. ' | |
'This option can be used multiple times.' | |
) | |
def requirements(): | |
return Option( | |
'-r', '--requirement', | |
dest='requirements', | |
action='append', | |
default=[], | |
metavar='file', | |
help='Install from the given requirements file. ' | |
'This option can be used multiple times.' | |
) | |
def editable(): | |
return Option( | |
'-e', '--editable', | |
dest='editables', | |
action='append', | |
default=[], | |
metavar='path/url', | |
help=('Install a project in editable mode (i.e. setuptools ' | |
'"develop mode") from a local project path or a VCS url.'), | |
) | |
src = partial( | |
Option, | |
'--src', '--source', '--source-dir', '--source-directory', | |
dest='src_dir', | |
metavar='dir', | |
default=src_prefix, | |
help='Directory to check out editable projects into. ' | |
'The default in a virtualenv is "<venv path>/src". ' | |
'The default for global installs is "<current dir>/src".' | |
) # type: Any | |
def _get_format_control(values, option): | |
"""Get a format_control object.""" | |
return getattr(values, option.dest) | |
def _handle_no_binary(option, opt_str, value, parser): | |
existing = getattr(parser.values, option.dest) | |
fmt_ctl_handle_mutual_exclude( | |
value, existing.no_binary, existing.only_binary, | |
) | |
def _handle_only_binary(option, opt_str, value, parser): | |
existing = getattr(parser.values, option.dest) | |
fmt_ctl_handle_mutual_exclude( | |
value, existing.only_binary, existing.no_binary, | |
) | |
def no_binary(): | |
return Option( | |
"--no-binary", dest="format_control", action="callback", | |
callback=_handle_no_binary, type="str", | |
default=FormatControl(set(), set()), | |
help="Do not use binary packages. Can be supplied multiple times, and " | |
"each time adds to the existing value. Accepts either :all: to " | |
"disable all binary packages, :none: to empty the set, or one or " | |
"more package names with commas between them. Note that some " | |
"packages are tricky to compile and may fail to install when " | |
"this option is used on them.", | |
) | |
def only_binary(): | |
return Option( | |
"--only-binary", dest="format_control", action="callback", | |
callback=_handle_only_binary, type="str", | |
default=FormatControl(set(), set()), | |
help="Do not use source packages. Can be supplied multiple times, and " | |
"each time adds to the existing value. Accepts either :all: to " | |
"disable all source packages, :none: to empty the set, or one or " | |
"more package names with commas between them. Packages without " | |
"binary distributions will fail to install when this option is " | |
"used on them.", | |
) | |
cache_dir = partial( | |
Option, | |
"--cache-dir", | |
dest="cache_dir", | |
default=USER_CACHE_DIR, | |
metavar="dir", | |
help="Store the cache data in <dir>." | |
) | |
no_cache = partial( | |
Option, | |
"--no-cache-dir", | |
dest="cache_dir", | |
action="store_false", | |
help="Disable the cache.", | |
) | |
no_deps = partial( | |
Option, | |
'--no-deps', '--no-dependencies', | |
dest='ignore_dependencies', | |
action='store_true', | |
default=False, | |
help="Don't install package dependencies.", | |
) # type: Any | |
build_dir = partial( | |
Option, | |
'-b', '--build', '--build-dir', '--build-directory', | |
dest='build_dir', | |
metavar='dir', | |
help='Directory to unpack packages into and build in. Note that ' | |
'an initial build still takes place in a temporary directory. ' | |
'The location of temporary directories can be controlled by setting ' | |
'the TMPDIR environment variable (TEMP on Windows) appropriately. ' | |
'When passed, build directories are not cleaned in case of failures.' | |
) # type: Any | |
ignore_requires_python = partial( | |
Option, | |
'--ignore-requires-python', | |
dest='ignore_requires_python', | |
action='store_true', | |
help='Ignore the Requires-Python information.' | |
) # type: Any | |
no_build_isolation = partial( | |
Option, | |
'--no-build-isolation', | |
dest='build_isolation', | |
action='store_false', | |
default=True, | |
help='Disable isolation when building a modern source distribution. ' | |
'Build dependencies specified by PEP 518 must be already installed ' | |
'if this option is used.' | |
) # type: Any | |
install_options = partial( | |
Option, | |
'--install-option', | |
dest='install_options', | |
action='append', | |
metavar='options', | |
help="Extra arguments to be supplied to the setup.py install " | |
"command (use like --install-option=\"--install-scripts=/usr/local/" | |
"bin\"). Use multiple --install-option options to pass multiple " | |
"options to setup.py install. If you are using an option with a " | |
"directory path, be sure to use absolute path.", | |
) # type: Any | |
global_options = partial( | |
Option, | |
'--global-option', | |
dest='global_options', | |
action='append', | |
metavar='options', | |
help="Extra global options to be supplied to the setup.py " | |
"call before the install command.", | |
) # type: Any | |
no_clean = partial( | |
Option, | |
'--no-clean', | |
action='store_true', | |
default=False, | |
help="Don't clean up build directories)." | |
) # type: Any | |
pre = partial( | |
Option, | |
'--pre', | |
action='store_true', | |
default=False, | |
help="Include pre-release and development versions. By default, " | |
"pip only finds stable versions.", | |
) # type: Any | |
disable_pip_version_check = partial( | |
Option, | |
"--disable-pip-version-check", | |
dest="disable_pip_version_check", | |
action="store_true", | |
default=False, | |
help="Don't periodically check PyPI to determine whether a new version " | |
"of pip is available for download. Implied with --no-index.", | |
) # type: Any | |
# Deprecated, Remove later | |
always_unzip = partial( | |
Option, | |
'-Z', '--always-unzip', | |
dest='always_unzip', | |
action='store_true', | |
help=SUPPRESS_HELP, | |
) # type: Any | |
def _merge_hash(option, opt_str, value, parser): | |
"""Given a value spelled "algo:digest", append the digest to a list | |
pointed to in a dict by the algo name.""" | |
if not parser.values.hashes: | |
parser.values.hashes = {} | |
try: | |
algo, digest = value.split(':', 1) | |
except ValueError: | |
parser.error('Arguments to %s must be a hash name ' | |
'followed by a value, like --hash=sha256:abcde...' % | |
opt_str) | |
if algo not in STRONG_HASHES: | |
parser.error('Allowed hash algorithms for %s are %s.' % | |
(opt_str, ', '.join(STRONG_HASHES))) | |
parser.values.hashes.setdefault(algo, []).append(digest) | |
hash = partial( | |
Option, | |
'--hash', | |
# Hash values eventually end up in InstallRequirement.hashes due to | |
# __dict__ copying in process_line(). | |
dest='hashes', | |
action='callback', | |
callback=_merge_hash, | |
type='string', | |
help="Verify that the package's archive matches this " | |
'hash before installing. Example: --hash=sha256:abcdef...', | |
) # type: Any | |
require_hashes = partial( | |
Option, | |
'--require-hashes', | |
dest='require_hashes', | |
action='store_true', | |
default=False, | |
help='Require a hash to check each requirement against, for ' | |
'repeatable installs. This option is implied when any package in a ' | |
'requirements file has a --hash option.', | |
) # type: Any | |
########## | |
# groups # | |
########## | |
general_group = { | |
'name': 'General Options', | |
'options': [ | |
help_, | |
isolated_mode, | |
require_virtualenv, | |
verbose, | |
version, | |
quiet, | |
log, | |
no_input, | |
proxy, | |
retries, | |
timeout, | |
skip_requirements_regex, | |
exists_action, | |
trusted_host, | |
cert, | |
client_cert, | |
cache_dir, | |
no_cache, | |
disable_pip_version_check, | |
no_color, | |
] | |
} | |
index_group = { | |
'name': 'Package Index Options', | |
'options': [ | |
index_url, | |
extra_index_url, | |
no_index, | |
find_links, | |
process_dependency_links, | |
] | |
} |
""" | |
Package containing all pip commands | |
""" | |
from __future__ import absolute_import | |
from pip._internal.commands.completion import CompletionCommand | |
from pip._internal.commands.configuration import ConfigurationCommand | |
from pip._internal.commands.download import DownloadCommand | |
from pip._internal.commands.freeze import FreezeCommand | |
from pip._internal.commands.hash import HashCommand | |
from pip._internal.commands.help import HelpCommand | |
from pip._internal.commands.list import ListCommand | |
from pip._internal.commands.check import CheckCommand | |
from pip._internal.commands.search import SearchCommand | |
from pip._internal.commands.show import ShowCommand | |
from pip._internal.commands.install import InstallCommand | |
from pip._internal.commands.uninstall import UninstallCommand | |
from pip._internal.commands.wheel import WheelCommand | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import List, Type | |
from pip._internal.basecommand import Command | |
commands_order = [ | |
InstallCommand, | |
DownloadCommand, | |
UninstallCommand, | |
FreezeCommand, | |
ListCommand, | |
ShowCommand, | |
CheckCommand, | |
ConfigurationCommand, | |
SearchCommand, | |
WheelCommand, | |
HashCommand, | |
CompletionCommand, | |
HelpCommand, | |
] # type: List[Type[Command]] | |
commands_dict = {c.name: c for c in commands_order} | |
def get_summaries(ordered=True): | |
"""Yields sorted (command name, command summary) tuples.""" | |
if ordered: | |
cmditems = _sort_commands(commands_dict, commands_order) | |
else: | |
cmditems = commands_dict.items() | |
for name, command_class in cmditems: | |
yield (name, command_class.summary) | |
def get_similar_commands(name): | |
"""Command name auto-correct.""" | |
from difflib import get_close_matches | |
name = name.lower() | |
close_commands = get_close_matches(name, commands_dict.keys()) | |
if close_commands: | |
return close_commands[0] | |
else: | |
return False | |
def _sort_commands(cmddict, order): | |
def keyfn(key): | |
try: | |
return order.index(key[1]) | |
except ValueError: | |
# unordered items should come last | |
return 0xff | |
return sorted(cmddict.items(), key=keyfn) |
import logging | |
from pip._internal.basecommand import Command | |
from pip._internal.operations.check import ( | |
check_package_set, create_package_set_from_installed, | |
) | |
from pip._internal.utils.misc import get_installed_distributions | |
logger = logging.getLogger(__name__) | |
class CheckCommand(Command): | |
"""Verify installed packages have compatible dependencies.""" | |
name = 'check' | |
usage = """ | |
%prog [options]""" | |
summary = 'Verify installed packages have compatible dependencies.' | |
def run(self, options, args): | |
package_set = create_package_set_from_installed() | |
missing, conflicting = check_package_set(package_set) | |
for project_name in missing: | |
version = package_set[project_name].version | |
for dependency in missing[project_name]: | |
logger.info( | |
"%s %s requires %s, which is not installed.", | |
project_name, version, dependency[0], | |
) | |
for project_name in conflicting: | |
version = package_set[project_name].version | |
for dep_name, dep_version, req in conflicting[project_name]: | |
logger.info( | |
"%s %s has requirement %s, but you have %s %s.", | |
project_name, version, req, dep_name, dep_version, | |
) | |
if missing or conflicting: | |
return 1 | |
else: | |
logger.info("No broken requirements found.") |
from __future__ import absolute_import | |
import sys | |
import textwrap | |
from pip._internal.basecommand import Command | |
from pip._internal.utils.misc import get_prog | |
BASE_COMPLETION = """ | |
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end | |
""" | |
COMPLETION_SCRIPTS = { | |
'bash': """ | |
_pip_completion() | |
{ | |
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ | |
COMP_CWORD=$COMP_CWORD \\ | |
PIP_AUTO_COMPLETE=1 $1 ) ) | |
} | |
complete -o default -F _pip_completion %(prog)s | |
""", | |
'zsh': """ | |
function _pip_completion { | |
local words cword | |
read -Ac words | |
read -cn cword | |
reply=( $( COMP_WORDS="$words[*]" \\ | |
COMP_CWORD=$(( cword-1 )) \\ | |
PIP_AUTO_COMPLETE=1 $words[1] ) ) | |
} | |
compctl -K _pip_completion %(prog)s | |
""", | |
'fish': """ | |
function __fish_complete_pip | |
set -lx COMP_WORDS (commandline -o) "" | |
set -lx COMP_CWORD ( \\ | |
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ | |
) | |
set -lx PIP_AUTO_COMPLETE 1 | |
string split \\ -- (eval $COMP_WORDS[1]) | |
end | |
complete -fa "(__fish_complete_pip)" -c %(prog)s | |
""", | |
} | |
class CompletionCommand(Command): | |
"""A helper command to be used for command completion.""" | |
name = 'completion' | |
summary = 'A helper command used for command completion.' | |
ignore_require_venv = True | |
def __init__(self, *args, **kw): | |
super(CompletionCommand, self).__init__(*args, **kw) | |
cmd_opts = self.cmd_opts | |
cmd_opts.add_option( | |
'--bash', '-b', | |
action='store_const', | |
const='bash', | |
dest='shell', | |
help='Emit completion code for bash') | |
cmd_opts.add_option( | |
'--zsh', '-z', | |
action='store_const', | |
const='zsh', | |
dest='shell', | |
help='Emit completion code for zsh') | |
cmd_opts.add_option( | |
'--fish', '-f', | |
action='store_const', | |
const='fish', | |
dest='shell', | |
help='Emit completion code for fish') | |
self.parser.insert_option_group(0, cmd_opts) | |
def run(self, options, args): | |
"""Prints the completion code of the given shell""" | |
shells = COMPLETION_SCRIPTS.keys() | |
shell_options = ['--' + shell for shell in sorted(shells)] | |
if options.shell in shells: | |
script = textwrap.dedent( | |
COMPLETION_SCRIPTS.get(options.shell, '') % { | |
'prog': get_prog(), | |
} | |
) | |
print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) | |
else: | |
sys.stderr.write( | |
'ERROR: You must pass %s\n' % ' or '.join(shell_options) | |
) |
import logging | |
import os | |
import subprocess | |
from pip._internal.basecommand import Command | |
from pip._internal.configuration import Configuration, kinds | |
from pip._internal.exceptions import PipError | |
from pip._internal.locations import venv_config_file | |
from pip._internal.status_codes import ERROR, SUCCESS | |
from pip._internal.utils.misc import get_prog | |
logger = logging.getLogger(__name__) | |
class ConfigurationCommand(Command): | |
"""Manage local and global configuration. | |
Subcommands: | |
list: List the active configuration (or from the file specified) | |
edit: Edit the configuration file in an editor | |
get: Get the value associated with name | |
set: Set the name=value | |
unset: Unset the value associated with name | |
If none of --user, --global and --venv are passed, a virtual | |
environment configuration file is used if one is active and the file | |
exists. Otherwise, all modifications happen on the to the user file by | |
default. | |
""" | |
name = 'config' | |
usage = """ | |
%prog [<file-option>] list | |
%prog [<file-option>] [--editor <editor-path>] edit | |
%prog [<file-option>] get name | |
%prog [<file-option>] set name value | |
%prog [<file-option>] unset name | |
""" | |
summary = "Manage local and global configuration." | |
def __init__(self, *args, **kwargs): | |
super(ConfigurationCommand, self).__init__(*args, **kwargs) | |
self.configuration = None | |
self.cmd_opts.add_option( | |
'--editor', | |
dest='editor', | |
action='store', | |
default=None, | |
help=( | |
'Editor to use to edit the file. Uses VISUAL or EDITOR ' | |
'environment variables if not provided.' | |
) | |
) | |
self.cmd_opts.add_option( | |
'--global', | |
dest='global_file', | |
action='store_true', | |
default=False, | |
help='Use the system-wide configuration file only' | |
) | |
self.cmd_opts.add_option( | |
'--user', | |
dest='user_file', | |
action='store_true', | |
default=False, | |
help='Use the user configuration file only' | |
) | |
self.cmd_opts.add_option( | |
'--venv', | |
dest='venv_file', | |
action='store_true', | |
default=False, | |
help='Use the virtualenv configuration file only' | |
) | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
handlers = { | |
"list": self.list_values, | |
"edit": self.open_in_editor, | |
"get": self.get_name, | |
"set": self.set_name_value, | |
"unset": self.unset_name | |
} | |
# Determine action | |
if not args or args[0] not in handlers: | |
logger.error("Need an action ({}) to perform.".format( | |
", ".join(sorted(handlers))) | |
) | |
return ERROR | |
action = args[0] | |
# Determine which configuration files are to be loaded | |
# Depends on whether the command is modifying. | |
try: | |
load_only = self._determine_file( | |
options, need_value=(action in ["get", "set", "unset", "edit"]) | |
) | |
except PipError as e: | |
logger.error(e.args[0]) | |
return ERROR | |
# Load a new configuration | |
self.configuration = Configuration( | |
isolated=options.isolated_mode, load_only=load_only | |
) | |
self.configuration.load() | |
# Error handling happens here, not in the action-handlers. | |
try: | |
handlers[action](options, args[1:]) | |
except PipError as e: | |
logger.error(e.args[0]) | |
return ERROR | |
return SUCCESS | |
def _determine_file(self, options, need_value): | |
file_options = { | |
kinds.USER: options.user_file, | |
kinds.GLOBAL: options.global_file, | |
kinds.VENV: options.venv_file | |
} | |
if sum(file_options.values()) == 0: | |
if not need_value: | |
return None | |
# Default to user, unless there's a virtualenv file. | |
elif os.path.exists(venv_config_file): | |
return kinds.VENV | |
else: | |
return kinds.USER | |
elif sum(file_options.values()) == 1: | |
# There's probably a better expression for this. | |
return [key for key in file_options if file_options[key]][0] | |
raise PipError( | |
"Need exactly one file to operate upon " | |
"(--user, --venv, --global) to perform." | |
) | |
def list_values(self, options, args): | |
self._get_n_args(args, "list", n=0) | |
for key, value in sorted(self.configuration.items()): | |
logger.info("%s=%r", key, value) | |
def get_name(self, options, args): | |
key = self._get_n_args(args, "get [name]", n=1) | |
value = self.configuration.get_value(key) | |
logger.info("%s", value) | |
def set_name_value(self, options, args): | |
key, value = self._get_n_args(args, "set [name] [value]", n=2) | |
self.configuration.set_value(key, value) | |
self._save_configuration() | |
def unset_name(self, options, args): | |
key = self._get_n_args(args, "unset [name]", n=1) | |
self.configuration.unset_value(key) | |
self._save_configuration() | |
def open_in_editor(self, options, args): | |
editor = self._determine_editor(options) | |
fname = self.configuration.get_file_to_edit() | |
if fname is None: | |
raise PipError("Could not determine appropriate file.") | |
try: | |
subprocess.check_call([editor, fname]) | |
except subprocess.CalledProcessError as e: | |
raise PipError( | |
"Editor Subprocess exited with exit code {}" | |
.format(e.returncode) | |
) | |
def _get_n_args(self, args, example, n): | |
"""Helper to make sure the command got the right number of arguments | |
""" | |
if len(args) != n: | |
msg = ( | |
'Got unexpected number of arguments, expected {}. ' | |
'(example: "{} config {}")' | |
).format(n, get_prog(), example) | |
raise PipError(msg) | |
if n == 1: | |
return args[0] | |
else: | |
return args | |
def _save_configuration(self): | |
# We successfully ran a modifying command. Need to save the | |
# configuration. | |
try: | |
self.configuration.save() | |
except Exception: | |
logger.error( | |
"Unable to save configuration. Please report this as a bug.", | |
exc_info=1 | |
) | |
raise PipError("Internal Error.") | |
def _determine_editor(self, options): | |
if options.editor is not None: | |
return options.editor | |
elif "VISUAL" in os.environ: | |
return os.environ["VISUAL"] | |
elif "EDITOR" in os.environ: | |
return os.environ["EDITOR"] | |
else: | |
raise PipError("Could not determine editor to use.") |
from __future__ import absolute_import | |
import logging | |
import os | |
from pip._internal import cmdoptions | |
from pip._internal.basecommand import RequirementCommand | |
from pip._internal.exceptions import CommandError | |
from pip._internal.index import FormatControl | |
from pip._internal.operations.prepare import RequirementPreparer | |
from pip._internal.req import RequirementSet | |
from pip._internal.resolve import Resolver | |
from pip._internal.utils.filesystem import check_path_owner | |
from pip._internal.utils.misc import ensure_dir, normalize_path | |
from pip._internal.utils.temp_dir import TempDirectory | |
logger = logging.getLogger(__name__) | |
class DownloadCommand(RequirementCommand): | |
""" | |
Download packages from: | |
- PyPI (and other indexes) using requirement specifiers. | |
- VCS project urls. | |
- Local project directories. | |
- Local or remote source archives. | |
pip also supports downloading from "requirements files", which provide | |
an easy way to specify a whole environment to be downloaded. | |
""" | |
name = 'download' | |
usage = """ | |
%prog [options] <requirement specifier> [package-index-options] ... | |
%prog [options] -r <requirements file> [package-index-options] ... | |
%prog [options] <vcs project url> ... | |
%prog [options] <local project path> ... | |
%prog [options] <archive url/path> ...""" | |
summary = 'Download packages.' | |
def __init__(self, *args, **kw): | |
super(DownloadCommand, self).__init__(*args, **kw) | |
cmd_opts = self.cmd_opts | |
cmd_opts.add_option(cmdoptions.constraints()) | |
cmd_opts.add_option(cmdoptions.requirements()) | |
cmd_opts.add_option(cmdoptions.build_dir()) | |
cmd_opts.add_option(cmdoptions.no_deps()) | |
cmd_opts.add_option(cmdoptions.global_options()) | |
cmd_opts.add_option(cmdoptions.no_binary()) | |
cmd_opts.add_option(cmdoptions.only_binary()) | |
cmd_opts.add_option(cmdoptions.src()) | |
cmd_opts.add_option(cmdoptions.pre()) | |
cmd_opts.add_option(cmdoptions.no_clean()) | |
cmd_opts.add_option(cmdoptions.require_hashes()) | |
cmd_opts.add_option(cmdoptions.progress_bar()) | |
cmd_opts.add_option(cmdoptions.no_build_isolation()) | |
cmd_opts.add_option( | |
'-d', '--dest', '--destination-dir', '--destination-directory', | |
dest='download_dir', | |
metavar='dir', | |
default=os.curdir, | |
help=("Download packages into <dir>."), | |
) | |
cmd_opts.add_option( | |
'--platform', | |
dest='platform', | |
metavar='platform', | |
default=None, | |
help=("Only download wheels compatible with <platform>. " | |
"Defaults to the platform of the running system."), | |
) | |
cmd_opts.add_option( | |
'--python-version', | |
dest='python_version', | |
metavar='python_version', | |
default=None, | |
help=("Only download wheels compatible with Python " | |
"interpreter version <version>. If not specified, then the " | |
"current system interpreter minor version is used. A major " | |
"version (e.g. '2') can be specified to match all " | |
"minor revs of that major version. A minor version " | |
"(e.g. '34') can also be specified."), | |
) | |
cmd_opts.add_option( | |
'--implementation', | |
dest='implementation', | |
metavar='implementation', | |
default=None, | |
help=("Only download wheels compatible with Python " | |
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', " | |
" or 'ip'. If not specified, then the current " | |
"interpreter implementation is used. Use 'py' to force " | |
"implementation-agnostic wheels."), | |
) | |
cmd_opts.add_option( | |
'--abi', | |
dest='abi', | |
metavar='abi', | |
default=None, | |
help=("Only download wheels compatible with Python " | |
"abi <abi>, e.g. 'pypy_41'. If not specified, then the " | |
"current interpreter abi tag is used. Generally " | |
"you will need to specify --implementation, " | |
"--platform, and --python-version when using " | |
"this option."), | |
) | |
index_opts = cmdoptions.make_option_group( | |
cmdoptions.index_group, | |
self.parser, | |
) | |
self.parser.insert_option_group(0, index_opts) | |
self.parser.insert_option_group(0, cmd_opts) | |
def run(self, options, args): | |
options.ignore_installed = True | |
# editable doesn't really make sense for `pip download`, but the bowels | |
# of the RequirementSet code require that property. | |
options.editables = [] | |
if options.python_version: | |
python_versions = [options.python_version] | |
else: | |
python_versions = None | |
dist_restriction_set = any([ | |
options.python_version, | |
options.platform, | |
options.abi, | |
options.implementation, | |
]) | |
binary_only = FormatControl(set(), {':all:'}) | |
no_sdist_dependencies = ( | |
options.format_control != binary_only and | |
not options.ignore_dependencies | |
) | |
if dist_restriction_set and no_sdist_dependencies: | |
raise CommandError( | |
"When restricting platform and interpreter constraints using " | |
"--python-version, --platform, --abi, or --implementation, " | |
"either --no-deps must be set, or --only-binary=:all: must be " | |
"set and --no-binary must not be set (or must be set to " | |
":none:)." | |
) | |
options.src_dir = os.path.abspath(options.src_dir) | |
options.download_dir = normalize_path(options.download_dir) | |
ensure_dir(options.download_dir) | |
with self._build_session(options) as session: | |
finder = self._build_package_finder( | |
options=options, | |
session=session, | |
platform=options.platform, | |
python_versions=python_versions, | |
abi=options.abi, | |
implementation=options.implementation, | |
) | |
build_delete = (not (options.no_clean or options.build_dir)) | |
if options.cache_dir and not check_path_owner(options.cache_dir): | |
logger.warning( | |
"The directory '%s' or its parent directory is not owned " | |
"by the current user and caching wheels has been " | |
"disabled. check the permissions and owner of that " | |
"directory. If executing pip with sudo, you may want " | |
"sudo's -H flag.", | |
options.cache_dir, | |
) | |
options.cache_dir = None | |
with TempDirectory( | |
options.build_dir, delete=build_delete, kind="download" | |
) as directory: | |
requirement_set = RequirementSet( | |
require_hashes=options.require_hashes, | |
) | |
self.populate_requirement_set( | |
requirement_set, | |
args, | |
options, | |
finder, | |
session, | |
self.name, | |
None | |
) | |
preparer = RequirementPreparer( | |
build_dir=directory.path, | |
src_dir=options.src_dir, | |
download_dir=options.download_dir, | |
wheel_download_dir=None, | |
progress_bar=options.progress_bar, | |
build_isolation=options.build_isolation, | |
) | |
resolver = Resolver( | |
preparer=preparer, | |
finder=finder, | |
session=session, | |
wheel_cache=None, | |
use_user_site=False, | |
upgrade_strategy="to-satisfy-only", | |
force_reinstall=False, | |
ignore_dependencies=options.ignore_dependencies, | |
ignore_requires_python=False, | |
ignore_installed=True, | |
isolated=options.isolated_mode, | |
) | |
resolver.resolve(requirement_set) | |
downloaded = ' '.join([ | |
req.name for req in requirement_set.successfully_downloaded | |
]) | |
if downloaded: | |
logger.info('Successfully downloaded %s', downloaded) | |
# Clean up | |
if not options.no_clean: | |
requirement_set.cleanup_files() | |
return requirement_set |
from __future__ import absolute_import | |
import sys | |
from pip._internal import index | |
from pip._internal.basecommand import Command | |
from pip._internal.cache import WheelCache | |
from pip._internal.compat import stdlib_pkgs | |
from pip._internal.operations.freeze import freeze | |
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} | |
class FreezeCommand(Command): | |
""" | |
Output installed packages in requirements format. | |
packages are listed in a case-insensitive sorted order. | |
""" | |
name = 'freeze' | |
usage = """ | |
%prog [options]""" | |
summary = 'Output installed packages in requirements format.' | |
log_streams = ("ext://sys.stderr", "ext://sys.stderr") | |
def __init__(self, *args, **kw): | |
super(FreezeCommand, self).__init__(*args, **kw) | |
self.cmd_opts.add_option( | |
'-r', '--requirement', | |
dest='requirements', | |
action='append', | |
default=[], | |
metavar='file', | |
help="Use the order in the given requirements file and its " | |
"comments when generating output. This option can be " | |
"used multiple times.") | |
self.cmd_opts.add_option( | |
'-f', '--find-links', | |
dest='find_links', | |
action='append', | |
default=[], | |
metavar='URL', | |
help='URL for finding packages, which will be added to the ' | |
'output.') | |
self.cmd_opts.add_option( | |
'-l', '--local', | |
dest='local', | |
action='store_true', | |
default=False, | |
help='If in a virtualenv that has global access, do not output ' | |
'globally-installed packages.') | |
self.cmd_opts.add_option( | |
'--user', | |
dest='user', | |
action='store_true', | |
default=False, | |
help='Only output packages installed in user-site.') | |
self.cmd_opts.add_option( | |
'--all', | |
dest='freeze_all', | |
action='store_true', | |
help='Do not skip these packages in the output:' | |
' %s' % ', '.join(DEV_PKGS)) | |
self.cmd_opts.add_option( | |
'--exclude-editable', | |
dest='exclude_editable', | |
action='store_true', | |
help='Exclude editable package from output.') | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
format_control = index.FormatControl(set(), set()) | |
wheel_cache = WheelCache(options.cache_dir, format_control) | |
skip = set(stdlib_pkgs) | |
if not options.freeze_all: | |
skip.update(DEV_PKGS) | |
freeze_kwargs = dict( | |
requirement=options.requirements, | |
find_links=options.find_links, | |
local_only=options.local, | |
user_only=options.user, | |
skip_regex=options.skip_requirements_regex, | |
isolated=options.isolated_mode, | |
wheel_cache=wheel_cache, | |
skip=skip, | |
exclude_editable=options.exclude_editable, | |
) | |
try: | |
for line in freeze(**freeze_kwargs): | |
sys.stdout.write(line + '\n') | |
finally: | |
wheel_cache.cleanup() |
from __future__ import absolute_import | |
import hashlib | |
import logging | |
import sys | |
from pip._internal.basecommand import Command | |
from pip._internal.status_codes import ERROR | |
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES | |
from pip._internal.utils.misc import read_chunks | |
logger = logging.getLogger(__name__) | |
class HashCommand(Command): | |
""" | |
Compute a hash of a local package archive. | |
These can be used with --hash in a requirements file to do repeatable | |
installs. | |
""" | |
name = 'hash' | |
usage = '%prog [options] <file> ...' | |
summary = 'Compute hashes of package archives.' | |
ignore_require_venv = True | |
def __init__(self, *args, **kw): | |
super(HashCommand, self).__init__(*args, **kw) | |
self.cmd_opts.add_option( | |
'-a', '--algorithm', | |
dest='algorithm', | |
choices=STRONG_HASHES, | |
action='store', | |
default=FAVORITE_HASH, | |
help='The hash algorithm to use: one of %s' % | |
', '.join(STRONG_HASHES)) | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
if not args: | |
self.parser.print_usage(sys.stderr) | |
return ERROR | |
algorithm = options.algorithm | |
for path in args: | |
logger.info('%s:\n--hash=%s:%s', | |
path, algorithm, _hash_of_file(path, algorithm)) | |
def _hash_of_file(path, algorithm): | |
"""Return the hash digest of a file.""" | |
with open(path, 'rb') as archive: | |
hash = hashlib.new(algorithm) | |
for chunk in read_chunks(archive): | |
hash.update(chunk) | |
return hash.hexdigest() |
from __future__ import absolute_import | |
from pip._internal.basecommand import SUCCESS, Command | |
from pip._internal.exceptions import CommandError | |
class HelpCommand(Command): | |
"""Show help for commands""" | |
name = 'help' | |
usage = """ | |
%prog <command>""" | |
summary = 'Show help for commands.' | |
ignore_require_venv = True | |
def run(self, options, args): | |
from pip._internal.commands import commands_dict, get_similar_commands | |
try: | |
# 'pip help' with no args is handled by pip.__init__.parseopt() | |
cmd_name = args[0] # the command we need help for | |
except IndexError: | |
return SUCCESS | |
if cmd_name not in commands_dict: | |
guess = get_similar_commands(cmd_name) | |
msg = ['unknown command "%s"' % cmd_name] | |
if guess: | |
msg.append('maybe you meant "%s"' % guess) | |
raise CommandError(' - '.join(msg)) | |
command = commands_dict[cmd_name]() | |
command.parser.print_help() | |
return SUCCESS |
from __future__ import absolute_import | |
import errno | |
import logging | |
import operator | |
import os | |
import shutil | |
from optparse import SUPPRESS_HELP | |
from pip._internal import cmdoptions | |
from pip._internal.basecommand import RequirementCommand | |
from pip._internal.cache import WheelCache | |
from pip._internal.exceptions import ( | |
CommandError, InstallationError, PreviousBuildDirError, | |
) | |
from pip._internal.locations import distutils_scheme, virtualenv_no_global | |
from pip._internal.operations.check import check_install_conflicts | |
from pip._internal.operations.prepare import RequirementPreparer | |
from pip._internal.req import RequirementSet, install_given_reqs | |
from pip._internal.resolve import Resolver | |
from pip._internal.status_codes import ERROR | |
from pip._internal.utils.filesystem import check_path_owner | |
from pip._internal.utils.misc import ensure_dir, get_installed_version | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.wheel import WheelBuilder | |
try: | |
import wheel | |
except ImportError: | |
wheel = None | |
logger = logging.getLogger(__name__) | |
class InstallCommand(RequirementCommand): | |
""" | |
Install packages from: | |
- PyPI (and other indexes) using requirement specifiers. | |
- VCS project urls. | |
- Local project directories. | |
- Local or remote source archives. | |
pip also supports installing from "requirements files", which provide | |
an easy way to specify a whole environment to be installed. | |
""" | |
name = 'install' | |
usage = """ | |
%prog [options] <requirement specifier> [package-index-options] ... | |
%prog [options] -r <requirements file> [package-index-options] ... | |
%prog [options] [-e] <vcs project url> ... | |
%prog [options] [-e] <local project path> ... | |
%prog [options] <archive url/path> ...""" | |
summary = 'Install packages.' | |
def __init__(self, *args, **kw): | |
super(InstallCommand, self).__init__(*args, **kw) | |
cmd_opts = self.cmd_opts | |
cmd_opts.add_option(cmdoptions.requirements()) | |
cmd_opts.add_option(cmdoptions.constraints()) | |
cmd_opts.add_option(cmdoptions.no_deps()) | |
cmd_opts.add_option(cmdoptions.pre()) | |
cmd_opts.add_option(cmdoptions.editable()) | |
cmd_opts.add_option( | |
'-t', '--target', | |
dest='target_dir', | |
metavar='dir', | |
default=None, | |
help='Install packages into <dir>. ' | |
'By default this will not replace existing files/folders in ' | |
'<dir>. Use --upgrade to replace existing packages in <dir> ' | |
'with new versions.' | |
) | |
cmd_opts.add_option( | |
'--user', | |
dest='use_user_site', | |
action='store_true', | |
help="Install to the Python user install directory for your " | |
"platform. Typically ~/.local/, or %APPDATA%\\Python on " | |
"Windows. (See the Python documentation for site.USER_BASE " | |
"for full details.)") | |
cmd_opts.add_option( | |
'--no-user', | |
dest='use_user_site', | |
action='store_false', | |
help=SUPPRESS_HELP) | |
cmd_opts.add_option( | |
'--root', | |
dest='root_path', | |
metavar='dir', | |
default=None, | |
help="Install everything relative to this alternate root " | |
"directory.") | |
cmd_opts.add_option( | |
'--prefix', | |
dest='prefix_path', | |
metavar='dir', | |
default=None, | |
help="Installation prefix where lib, bin and other top-level " | |
"folders are placed") | |
cmd_opts.add_option(cmdoptions.build_dir()) | |
cmd_opts.add_option(cmdoptions.src()) | |
cmd_opts.add_option( | |
'-U', '--upgrade', | |
dest='upgrade', | |
action='store_true', | |
help='Upgrade all specified packages to the newest available ' | |
'version. The handling of dependencies depends on the ' | |
'upgrade-strategy used.' | |
) | |
cmd_opts.add_option( | |
'--upgrade-strategy', | |
dest='upgrade_strategy', | |
default='only-if-needed', | |
choices=['only-if-needed', 'eager'], | |
help='Determines how dependency upgrading should be handled ' | |
'[default: %default]. ' | |
'"eager" - dependencies are upgraded regardless of ' | |
'whether the currently installed version satisfies the ' | |
'requirements of the upgraded package(s). ' | |
'"only-if-needed" - are upgraded only when they do not ' | |
'satisfy the requirements of the upgraded package(s).' | |
) | |
cmd_opts.add_option( | |
'--force-reinstall', | |
dest='force_reinstall', | |
action='store_true', | |
help='Reinstall all packages even if they are already ' | |
'up-to-date.') | |
cmd_opts.add_option( | |
'-I', '--ignore-installed', | |
dest='ignore_installed', | |
action='store_true', | |
help='Ignore the installed packages (reinstalling instead).') | |
cmd_opts.add_option(cmdoptions.ignore_requires_python()) | |
cmd_opts.add_option(cmdoptions.no_build_isolation()) | |
cmd_opts.add_option(cmdoptions.install_options()) | |
cmd_opts.add_option(cmdoptions.global_options()) | |
cmd_opts.add_option( | |
"--compile", | |
action="store_true", | |
dest="compile", | |
default=True, | |
help="Compile Python source files to bytecode", | |
) | |
cmd_opts.add_option( | |
"--no-compile", | |
action="store_false", | |
dest="compile", | |
help="Do not compile Python source files to bytecode", | |
) | |
cmd_opts.add_option( | |
"--no-warn-script-location", | |
action="store_false", | |
dest="warn_script_location", | |
default=True, | |
help="Do not warn when installing scripts outside PATH", | |
) | |
cmd_opts.add_option( | |
"--no-warn-conflicts", | |
action="store_false", | |
dest="warn_about_conflicts", | |
default=True, | |
help="Do not warn about broken dependencies", | |
) | |
cmd_opts.add_option(cmdoptions.no_binary()) | |
cmd_opts.add_option(cmdoptions.only_binary()) | |
cmd_opts.add_option(cmdoptions.no_clean()) | |
cmd_opts.add_option(cmdoptions.require_hashes()) | |
cmd_opts.add_option(cmdoptions.progress_bar()) | |
index_opts = cmdoptions.make_option_group( | |
cmdoptions.index_group, | |
self.parser, | |
) | |
self.parser.insert_option_group(0, index_opts) | |
self.parser.insert_option_group(0, cmd_opts) | |
def run(self, options, args): | |
cmdoptions.check_install_build_global(options) | |
upgrade_strategy = "to-satisfy-only" | |
if options.upgrade: | |
upgrade_strategy = options.upgrade_strategy | |
if options.build_dir: | |
options.build_dir = os.path.abspath(options.build_dir) | |
options.src_dir = os.path.abspath(options.src_dir) | |
install_options = options.install_options or [] | |
if options.use_user_site: | |
if options.prefix_path: | |
raise CommandError( | |
"Can not combine '--user' and '--prefix' as they imply " | |
"different installation locations" | |
) | |
if virtualenv_no_global(): | |
raise InstallationError( | |
"Can not perform a '--user' install. User site-packages " | |
"are not visible in this virtualenv." | |
) | |
install_options.append('--user') | |
install_options.append('--prefix=') | |
target_temp_dir = TempDirectory(kind="target") | |
if options.target_dir: | |
options.ignore_installed = True | |
options.target_dir = os.path.abspath(options.target_dir) | |
if (os.path.exists(options.target_dir) and not | |
os.path.isdir(options.target_dir)): | |
raise CommandError( | |
"Target path exists but is not a directory, will not " | |
"continue." | |
) | |
# Create a target directory for using with the target option | |
target_temp_dir.create() | |
install_options.append('--home=' + target_temp_dir.path) | |
global_options = options.global_options or [] | |
with self._build_session(options) as session: | |
finder = self._build_package_finder(options, session) | |
build_delete = (not (options.no_clean or options.build_dir)) | |
wheel_cache = WheelCache(options.cache_dir, options.format_control) | |
if options.cache_dir and not check_path_owner(options.cache_dir): | |
logger.warning( | |
"The directory '%s' or its parent directory is not owned " | |
"by the current user and caching wheels has been " | |
"disabled. check the permissions and owner of that " | |
"directory. If executing pip with sudo, you may want " | |
"sudo's -H flag.", | |
options.cache_dir, | |
) | |
options.cache_dir = None | |
with TempDirectory( | |
options.build_dir, delete=build_delete, kind="install" | |
) as directory: | |
requirement_set = RequirementSet( | |
require_hashes=options.require_hashes, | |
) | |
try: | |
self.populate_requirement_set( | |
requirement_set, args, options, finder, session, | |
self.name, wheel_cache | |
) | |
preparer = RequirementPreparer( | |
build_dir=directory.path, | |
src_dir=options.src_dir, | |
download_dir=None, | |
wheel_download_dir=None, | |
progress_bar=options.progress_bar, | |
build_isolation=options.build_isolation, | |
) | |
resolver = Resolver( | |
preparer=preparer, | |
finder=finder, | |
session=session, | |
wheel_cache=wheel_cache, | |
use_user_site=options.use_user_site, | |
upgrade_strategy=upgrade_strategy, | |
force_reinstall=options.force_reinstall, | |
ignore_dependencies=options.ignore_dependencies, | |
ignore_requires_python=options.ignore_requires_python, | |
ignore_installed=options.ignore_installed, | |
isolated=options.isolated_mode, | |
) | |
resolver.resolve(requirement_set) | |
# If caching is disabled or wheel is not installed don't | |
# try to build wheels. | |
if wheel and options.cache_dir: | |
# build wheels before install. | |
wb = WheelBuilder( | |
finder, preparer, wheel_cache, | |
build_options=[], global_options=[], | |
) | |
# Ignore the result: a failed wheel will be | |
# installed from the sdist/vcs whatever. | |
wb.build( | |
requirement_set.requirements.values(), | |
session=session, autobuilding=True | |
) | |
to_install = resolver.get_installation_order( | |
requirement_set | |
) | |
# Consistency Checking of the package set we're installing. | |
should_warn_about_conflicts = ( | |
not options.ignore_dependencies and | |
options.warn_about_conflicts | |
) | |
if should_warn_about_conflicts: | |
self._warn_about_conflicts(to_install) | |
# Don't warn about script install locations if | |
# --target has been specified | |
warn_script_location = options.warn_script_location | |
if options.target_dir: | |
warn_script_location = False | |
installed = install_given_reqs( | |
to_install, | |
install_options, | |
global_options, | |
root=options.root_path, | |
home=target_temp_dir.path, | |
prefix=options.prefix_path, | |
pycompile=options.compile, | |
warn_script_location=warn_script_location, | |
use_user_site=options.use_user_site, | |
) | |
possible_lib_locations = get_lib_location_guesses( | |
user=options.use_user_site, | |
home=target_temp_dir.path, | |
root=options.root_path, | |
prefix=options.prefix_path, | |
isolated=options.isolated_mode, | |
) | |
reqs = sorted(installed, key=operator.attrgetter('name')) | |
items = [] | |
for req in reqs: | |
item = req.name | |
try: | |
installed_version = get_installed_version( | |
req.name, possible_lib_locations | |
) | |
if installed_version: | |
item += '-' + installed_version | |
except Exception: | |
pass | |
items.append(item) | |
installed = ' '.join(items) | |
if installed: | |
logger.info('Successfully installed %s', installed) | |
except EnvironmentError as error: | |
show_traceback = (self.verbosity >= 1) | |
message = create_env_error_message( | |
error, show_traceback, options.use_user_site, | |
) | |
logger.error(message, exc_info=show_traceback) | |
return ERROR | |
except PreviousBuildDirError: | |
options.no_clean = True | |
raise | |
finally: | |
# Clean up | |
if not options.no_clean: | |
requirement_set.cleanup_files() | |
wheel_cache.cleanup() | |
if options.target_dir: | |
self._handle_target_dir( | |
options.target_dir, target_temp_dir, options.upgrade | |
) | |
return requirement_set | |
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): | |
ensure_dir(target_dir) | |
# Checking both purelib and platlib directories for installed | |
# packages to be moved to target directory | |
lib_dir_list = [] | |
with target_temp_dir: | |
# Checking both purelib and platlib directories for installed | |
# packages to be moved to target directory | |
scheme = distutils_scheme('', home=target_temp_dir.path) | |
purelib_dir = scheme['purelib'] | |
platlib_dir = scheme['platlib'] | |
data_dir = scheme['data'] | |
if os.path.exists(purelib_dir): | |
lib_dir_list.append(purelib_dir) | |
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: | |
lib_dir_list.append(platlib_dir) | |
if os.path.exists(data_dir): | |
lib_dir_list.append(data_dir) | |
for lib_dir in lib_dir_list: | |
for item in os.listdir(lib_dir): | |
if lib_dir == data_dir: | |
ddir = os.path.join(data_dir, item) | |
if any(s.startswith(ddir) for s in lib_dir_list[:-1]): | |
continue | |
target_item_dir = os.path.join(target_dir, item) | |
if os.path.exists(target_item_dir): | |
if not upgrade: | |
logger.warning( | |
'Target directory %s already exists. Specify ' | |
'--upgrade to force replacement.', | |
target_item_dir | |
) | |
continue | |
if os.path.islink(target_item_dir): | |
logger.warning( | |
'Target directory %s already exists and is ' | |
'a link. Pip will not automatically replace ' | |
'links, please remove if replacement is ' | |
'desired.', | |
target_item_dir | |
) | |
continue | |
if os.path.isdir(target_item_dir): | |
shutil.rmtree(target_item_dir) | |
else: | |
os.remove(target_item_dir) | |
shutil.move( | |
os.path.join(lib_dir, item), | |
target_item_dir | |
) | |
def _warn_about_conflicts(self, to_install): | |
package_set, _dep_info = check_install_conflicts(to_install) | |
missing, conflicting = _dep_info | |
# NOTE: There is some duplication here from pip check | |
for project_name in missing: | |
version = package_set[project_name][0] | |
for dependency in missing[project_name]: | |
logger.critical( | |
"%s %s requires %s, which is not installed.", | |
project_name, version, dependency[1], | |
) | |
for project_name in conflicting: | |
version = package_set[project_name][0] | |
for dep_name, dep_version, req in conflicting[project_name]: | |
logger.critical( | |
"%s %s has requirement %s, but you'll have %s %s which is " | |
"incompatible.", | |
project_name, version, req, dep_name, dep_version, | |
) | |
def get_lib_location_guesses(*args, **kwargs): | |
scheme = distutils_scheme('', *args, **kwargs) | |
return [scheme['purelib'], scheme['platlib']] | |
def create_env_error_message(error, show_traceback, using_user_site): | |
"""Format an error message for an EnvironmentError | |
It may occur anytime during the execution of the install command. | |
""" | |
parts = [] | |
# Mention the error if we are not going to show a traceback | |
parts.append("Could not install packages due to an EnvironmentError") | |
if not show_traceback: | |
parts.append(": ") | |
parts.append(str(error)) | |
else: | |
parts.append(".") | |
# Spilt the error indication from a helper message (if any) | |
parts[-1] += "\n" | |
# Suggest useful actions to the user: | |
# (1) using user site-packages or (2) verifying the permissions | |
if error.errno == errno.EACCES: | |
user_option_part = "Consider using the `--user` option" | |
permissions_part = "Check the permissions" | |
if not using_user_site: | |
parts.extend([ | |
user_option_part, " or ", | |
permissions_part.lower(), | |
]) | |
else: | |
parts.append(permissions_part) | |
parts.append(".\n") | |
return "".join(parts).strip() + "\n" |
from __future__ import absolute_import | |
import json | |
import logging | |
import warnings | |
from pip._vendor import six | |
from pip._vendor.six.moves import zip_longest | |
from pip._internal.basecommand import Command | |
from pip._internal.cmdoptions import index_group, make_option_group | |
from pip._internal.exceptions import CommandError | |
from pip._internal.index import PackageFinder | |
from pip._internal.utils.deprecation import RemovedInPip11Warning | |
from pip._internal.utils.misc import ( | |
dist_is_editable, get_installed_distributions, | |
) | |
from pip._internal.utils.packaging import get_installer | |
logger = logging.getLogger(__name__) | |
class ListCommand(Command): | |
""" | |
List installed packages, including editables. | |
Packages are listed in a case-insensitive sorted order. | |
""" | |
name = 'list' | |
usage = """ | |
%prog [options]""" | |
summary = 'List installed packages.' | |
def __init__(self, *args, **kw): | |
super(ListCommand, self).__init__(*args, **kw) | |
cmd_opts = self.cmd_opts | |
cmd_opts.add_option( | |
'-o', '--outdated', | |
action='store_true', | |
default=False, | |
help='List outdated packages') | |
cmd_opts.add_option( | |
'-u', '--uptodate', | |
action='store_true', | |
default=False, | |
help='List uptodate packages') | |
cmd_opts.add_option( | |
'-e', '--editable', | |
action='store_true', | |
default=False, | |
help='List editable projects.') | |
cmd_opts.add_option( | |
'-l', '--local', | |
action='store_true', | |
default=False, | |
help=('If in a virtualenv that has global access, do not list ' | |
'globally-installed packages.'), | |
) | |
self.cmd_opts.add_option( | |
'--user', | |
dest='user', | |
action='store_true', | |
default=False, | |
help='Only output packages installed in user-site.') | |
cmd_opts.add_option( | |
'--pre', | |
action='store_true', | |
default=False, | |
help=("Include pre-release and development versions. By default, " | |
"pip only finds stable versions."), | |
) | |
cmd_opts.add_option( | |
'--format', | |
action='store', | |
dest='list_format', | |
default="columns", | |
choices=('legacy', 'columns', 'freeze', 'json'), | |
help="Select the output format among: columns (default), freeze, " | |
"json, or legacy.", | |
) | |
cmd_opts.add_option( | |
'--not-required', | |
action='store_true', | |
dest='not_required', | |
help="List packages that are not dependencies of " | |
"installed packages.", | |
) | |
cmd_opts.add_option( | |
'--exclude-editable', | |
action='store_false', | |
dest='include_editable', | |
help='Exclude editable package from output.', | |
) | |
cmd_opts.add_option( | |
'--include-editable', | |
action='store_true', | |
dest='include_editable', | |
help='Include editable package from output.', | |
default=True, | |
) | |
index_opts = make_option_group(index_group, self.parser) | |
self.parser.insert_option_group(0, index_opts) | |
self.parser.insert_option_group(0, cmd_opts) | |
def _build_package_finder(self, options, index_urls, session): | |
""" | |
Create a package finder appropriate to this list command. | |
""" | |
return PackageFinder( | |
find_links=options.find_links, | |
index_urls=index_urls, | |
allow_all_prereleases=options.pre, | |
trusted_hosts=options.trusted_hosts, | |
process_dependency_links=options.process_dependency_links, | |
session=session, | |
) | |
def run(self, options, args): | |
if options.list_format == "legacy": | |
warnings.warn( | |
"The legacy format has been deprecated and will be removed " | |
"in the future.", | |
RemovedInPip11Warning, | |
) | |
if options.outdated and options.uptodate: | |
raise CommandError( | |
"Options --outdated and --uptodate cannot be combined.") | |
packages = get_installed_distributions( | |
local_only=options.local, | |
user_only=options.user, | |
editables_only=options.editable, | |
include_editables=options.include_editable, | |
) | |
if options.outdated: | |
packages = self.get_outdated(packages, options) | |
elif options.uptodate: | |
packages = self.get_uptodate(packages, options) | |
if options.not_required: | |
packages = self.get_not_required(packages, options) | |
self.output_package_listing(packages, options) | |
def get_outdated(self, packages, options): | |
return [ | |
dist for dist in self.iter_packages_latest_infos(packages, options) | |
if dist.latest_version > dist.parsed_version | |
] | |
def get_uptodate(self, packages, options): | |
return [ | |
dist for dist in self.iter_packages_latest_infos(packages, options) | |
if dist.latest_version == dist.parsed_version | |
] | |
def get_not_required(self, packages, options): | |
dep_keys = set() | |
for dist in packages: | |
dep_keys.update(requirement.key for requirement in dist.requires()) | |
return {pkg for pkg in packages if pkg.key not in dep_keys} | |
def iter_packages_latest_infos(self, packages, options): | |
index_urls = [options.index_url] + options.extra_index_urls | |
if options.no_index: | |
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | |
index_urls = [] | |
dependency_links = [] | |
for dist in packages: | |
if dist.has_metadata('dependency_links.txt'): | |
dependency_links.extend( | |
dist.get_metadata_lines('dependency_links.txt'), | |
) | |
with self._build_session(options) as session: | |
finder = self._build_package_finder(options, index_urls, session) | |
finder.add_dependency_links(dependency_links) | |
for dist in packages: | |
typ = 'unknown' | |
all_candidates = finder.find_all_candidates(dist.key) | |
if not options.pre: | |
# Remove prereleases | |
all_candidates = [candidate for candidate in all_candidates | |
if not candidate.version.is_prerelease] | |
if not all_candidates: | |
continue | |
best_candidate = max(all_candidates, | |
key=finder._candidate_sort_key) | |
remote_version = best_candidate.version | |
if best_candidate.location.is_wheel: | |
typ = 'wheel' | |
else: | |
typ = 'sdist' | |
# This is dirty but makes the rest of the code much cleaner | |
dist.latest_version = remote_version | |
dist.latest_filetype = typ | |
yield dist | |
def output_legacy(self, dist, options): | |
if options.verbose >= 1: | |
return '%s (%s, %s, %s)' % ( | |
dist.project_name, | |
dist.version, | |
dist.location, | |
get_installer(dist), | |
) | |
elif dist_is_editable(dist): | |
return '%s (%s, %s)' % ( | |
dist.project_name, | |
dist.version, | |
dist.location, | |
) | |
else: | |
return '%s (%s)' % (dist.project_name, dist.version) | |
def output_legacy_latest(self, dist, options): | |
return '%s - Latest: %s [%s]' % ( | |
self.output_legacy(dist, options), | |
dist.latest_version, | |
dist.latest_filetype, | |
) | |
def output_package_listing(self, packages, options): | |
packages = sorted( | |
packages, | |
key=lambda dist: dist.project_name.lower(), | |
) | |
if options.list_format == 'columns' and packages: | |
data, header = format_for_columns(packages, options) | |
self.output_package_listing_columns(data, header) | |
elif options.list_format == 'freeze': | |
for dist in packages: | |
if options.verbose >= 1: | |
logger.info("%s==%s (%s)", dist.project_name, | |
dist.version, dist.location) | |
else: | |
logger.info("%s==%s", dist.project_name, dist.version) | |
elif options.list_format == 'json': | |
logger.info(format_for_json(packages, options)) | |
elif options.list_format == "legacy": | |
for dist in packages: | |
if options.outdated: | |
logger.info(self.output_legacy_latest(dist, options)) | |
else: | |
logger.info(self.output_legacy(dist, options)) | |
def output_package_listing_columns(self, data, header): | |
# insert the header first: we need to know the size of column names | |
if len(data) > 0: | |
data.insert(0, header) | |
pkg_strings, sizes = tabulate(data) | |
# Create and add a separator. | |
if len(data) > 0: | |
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) | |
for val in pkg_strings: | |
logger.info(val) | |
def tabulate(vals): | |
# From pfmoore on GitHub: | |
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564 | |
assert len(vals) > 0 | |
sizes = [0] * max(len(x) for x in vals) | |
for row in vals: | |
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] | |
result = [] | |
for row in vals: | |
display = " ".join([str(c).ljust(s) if c is not None else '' | |
for s, c in zip_longest(sizes, row)]) | |
result.append(display) | |
return result, sizes | |
def format_for_columns(pkgs, options): | |
""" | |
Convert the package data into something usable | |
by output_package_listing_columns. | |
""" | |
running_outdated = options.outdated | |
# Adjust the header for the `pip list --outdated` case. | |
if running_outdated: | |
header = ["Package", "Version", "Latest", "Type"] | |
else: | |
header = ["Package", "Version"] | |
data = [] | |
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): | |
header.append("Location") | |
if options.verbose >= 1: | |
header.append("Installer") | |
for proj in pkgs: | |
# if we're working on the 'outdated' list, separate out the | |
# latest_version and type | |
row = [proj.project_name, proj.version] | |
if running_outdated: | |
row.append(proj.latest_version) | |
row.append(proj.latest_filetype) | |
if options.verbose >= 1 or dist_is_editable(proj): | |
row.append(proj.location) | |
if options.verbose >= 1: | |
row.append(get_installer(proj)) | |
data.append(row) | |
return data, header | |
def format_for_json(packages, options): | |
data = [] | |
for dist in packages: | |
info = { | |
'name': dist.project_name, | |
'version': six.text_type(dist.version), | |
} | |
if options.verbose >= 1: | |
info['location'] = dist.location | |
info['installer'] = get_installer(dist) | |
if options.outdated: | |
info['latest_version'] = six.text_type(dist.latest_version) | |
info['latest_filetype'] = dist.latest_filetype | |
data.append(info) | |
return json.dumps(data) |
from __future__ import absolute_import | |
import logging | |
import sys | |
import textwrap | |
from collections import OrderedDict | |
from pip._vendor import pkg_resources | |
from pip._vendor.packaging.version import parse as parse_version | |
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is | |
# why we ignore the type on this import | |
from pip._vendor.six.moves import xmlrpc_client # type: ignore | |
from pip._internal.basecommand import SUCCESS, Command | |
from pip._internal.compat import get_terminal_size | |
from pip._internal.download import PipXmlrpcTransport | |
from pip._internal.exceptions import CommandError | |
from pip._internal.models import PyPI | |
from pip._internal.status_codes import NO_MATCHES_FOUND | |
from pip._internal.utils.logging import indent_log | |
logger = logging.getLogger(__name__) | |
class SearchCommand(Command): | |
"""Search for PyPI packages whose name or summary contains <query>.""" | |
name = 'search' | |
usage = """ | |
%prog [options] <query>""" | |
summary = 'Search PyPI for packages.' | |
ignore_require_venv = True | |
def __init__(self, *args, **kw): | |
super(SearchCommand, self).__init__(*args, **kw) | |
self.cmd_opts.add_option( | |
'-i', '--index', | |
dest='index', | |
metavar='URL', | |
default=PyPI.pypi_url, | |
help='Base URL of Python Package Index (default %default)') | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
if not args: | |
raise CommandError('Missing required argument (search query).') | |
query = args | |
pypi_hits = self.search(query, options) | |
hits = transform_hits(pypi_hits) | |
terminal_width = None | |
if sys.stdout.isatty(): | |
terminal_width = get_terminal_size()[0] | |
print_results(hits, terminal_width=terminal_width) | |
if pypi_hits: | |
return SUCCESS | |
return NO_MATCHES_FOUND | |
def search(self, query, options): | |
index_url = options.index | |
with self._build_session(options) as session: | |
transport = PipXmlrpcTransport(index_url, session) | |
pypi = xmlrpc_client.ServerProxy(index_url, transport) | |
hits = pypi.search({'name': query, 'summary': query}, 'or') | |
return hits | |
def transform_hits(hits): | |
""" | |
The list from pypi is really a list of versions. We want a list of | |
packages with the list of versions stored inline. This converts the | |
list from pypi into one we can use. | |
""" | |
packages = OrderedDict() | |
for hit in hits: | |
name = hit['name'] | |
summary = hit['summary'] | |
version = hit['version'] | |
if name not in packages.keys(): | |
packages[name] = { | |
'name': name, | |
'summary': summary, | |
'versions': [version], | |
} | |
else: | |
packages[name]['versions'].append(version) | |
# if this is the highest version, replace summary and score | |
if version == highest_version(packages[name]['versions']): | |
packages[name]['summary'] = summary | |
return list(packages.values()) | |
def print_results(hits, name_column_width=None, terminal_width=None): | |
if not hits: | |
return | |
if name_column_width is None: | |
name_column_width = max([ | |
len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) | |
for hit in hits | |
]) + 4 | |
installed_packages = [p.project_name for p in pkg_resources.working_set] | |
for hit in hits: | |
name = hit['name'] | |
summary = hit['summary'] or '' | |
latest = highest_version(hit.get('versions', ['-'])) | |
if terminal_width is not None: | |
target_width = terminal_width - name_column_width - 5 | |
if target_width > 10: | |
# wrap and indent summary to fit terminal | |
summary = textwrap.wrap(summary, target_width) | |
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) | |
line = '%-*s - %s' % (name_column_width, | |
'%s (%s)' % (name, latest), summary) | |
try: | |
logger.info(line) | |
if name in installed_packages: | |
dist = pkg_resources.get_distribution(name) | |
with indent_log(): | |
if dist.version == latest: | |
logger.info('INSTALLED: %s (latest)', dist.version) | |
else: | |
logger.info('INSTALLED: %s', dist.version) | |
logger.info('LATEST: %s', latest) | |
except UnicodeEncodeError: | |
pass | |
def highest_version(versions): | |
return max(versions, key=parse_version) |
from __future__ import absolute_import | |
import logging | |
import os | |
from email.parser import FeedParser # type: ignore | |
from pip._vendor import pkg_resources | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._internal.basecommand import Command | |
from pip._internal.status_codes import ERROR, SUCCESS | |
logger = logging.getLogger(__name__) | |
class ShowCommand(Command): | |
"""Show information about one or more installed packages.""" | |
name = 'show' | |
usage = """ | |
%prog [options] <package> ...""" | |
summary = 'Show information about installed packages.' | |
ignore_require_venv = True | |
def __init__(self, *args, **kw): | |
super(ShowCommand, self).__init__(*args, **kw) | |
self.cmd_opts.add_option( | |
'-f', '--files', | |
dest='files', | |
action='store_true', | |
default=False, | |
help='Show the full list of installed files for each package.') | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
if not args: | |
logger.warning('ERROR: Please provide a package name or names.') | |
return ERROR | |
query = args | |
results = search_packages_info(query) | |
if not print_results( | |
results, list_files=options.files, verbose=options.verbose): | |
return ERROR | |
return SUCCESS | |
def search_packages_info(query): | |
""" | |
Gather details from installed distributions. Print distribution name, | |
version, location, and installed files. Installed files requires a | |
pip generated 'installed-files.txt' in the distributions '.egg-info' | |
directory. | |
""" | |
installed = {} | |
for p in pkg_resources.working_set: | |
installed[canonicalize_name(p.project_name)] = p | |
query_names = [canonicalize_name(name) for name in query] | |
for dist in [installed[pkg] for pkg in query_names if pkg in installed]: | |
package = { | |
'name': dist.project_name, | |
'version': dist.version, | |
'location': dist.location, | |
'requires': [dep.project_name for dep in dist.requires()], | |
} | |
file_list = None | |
metadata = None | |
if isinstance(dist, pkg_resources.DistInfoDistribution): | |
# RECORDs should be part of .dist-info metadatas | |
if dist.has_metadata('RECORD'): | |
lines = dist.get_metadata_lines('RECORD') | |
paths = [l.split(',')[0] for l in lines] | |
paths = [os.path.join(dist.location, p) for p in paths] | |
file_list = [os.path.relpath(p, dist.location) for p in paths] | |
if dist.has_metadata('METADATA'): | |
metadata = dist.get_metadata('METADATA') | |
else: | |
# Otherwise use pip's log for .egg-info's | |
if dist.has_metadata('installed-files.txt'): | |
paths = dist.get_metadata_lines('installed-files.txt') | |
paths = [os.path.join(dist.egg_info, p) for p in paths] | |
file_list = [os.path.relpath(p, dist.location) for p in paths] | |
if dist.has_metadata('PKG-INFO'): | |
metadata = dist.get_metadata('PKG-INFO') | |
if dist.has_metadata('entry_points.txt'): | |
entry_points = dist.get_metadata_lines('entry_points.txt') | |
package['entry_points'] = entry_points | |
if dist.has_metadata('INSTALLER'): | |
for line in dist.get_metadata_lines('INSTALLER'): | |
if line.strip(): | |
package['installer'] = line.strip() | |
break | |
# @todo: Should pkg_resources.Distribution have a | |
# `get_pkg_info` method? | |
feed_parser = FeedParser() | |
feed_parser.feed(metadata) | |
pkg_info_dict = feed_parser.close() | |
for key in ('metadata-version', 'summary', | |
'home-page', 'author', 'author-email', 'license'): | |
package[key] = pkg_info_dict.get(key) | |
# It looks like FeedParser cannot deal with repeated headers | |
classifiers = [] | |
for line in metadata.splitlines(): | |
if line.startswith('Classifier: '): | |
classifiers.append(line[len('Classifier: '):]) | |
package['classifiers'] = classifiers | |
if file_list: | |
package['files'] = sorted(file_list) | |
yield package | |
def print_results(distributions, list_files=False, verbose=False): | |
""" | |
Print the informations from installed distributions found. | |
""" | |
results_printed = False | |
for i, dist in enumerate(distributions): | |
results_printed = True | |
if i > 0: | |
logger.info("---") | |
name = dist.get('name', '') | |
required_by = [ | |
pkg.project_name for pkg in pkg_resources.working_set | |
if name in [required.name for required in pkg.requires()] | |
] | |
logger.info("Name: %s", name) | |
logger.info("Version: %s", dist.get('version', '')) | |
logger.info("Summary: %s", dist.get('summary', '')) | |
logger.info("Home-page: %s", dist.get('home-page', '')) | |
logger.info("Author: %s", dist.get('author', '')) | |
logger.info("Author-email: %s", dist.get('author-email', '')) | |
logger.info("License: %s", dist.get('license', '')) | |
logger.info("Location: %s", dist.get('location', '')) | |
logger.info("Requires: %s", ', '.join(dist.get('requires', []))) | |
logger.info("Required-by: %s", ', '.join(required_by)) | |
if verbose: | |
logger.info("Metadata-Version: %s", | |
dist.get('metadata-version', '')) | |
logger.info("Installer: %s", dist.get('installer', '')) | |
logger.info("Classifiers:") | |
for classifier in dist.get('classifiers', []): | |
logger.info(" %s", classifier) | |
logger.info("Entry-points:") | |
for entry in dist.get('entry_points', []): | |
logger.info(" %s", entry.strip()) | |
if list_files: | |
logger.info("Files:") | |
for line in dist.get('files', []): | |
logger.info(" %s", line.strip()) | |
if "files" not in dist: | |
logger.info("Cannot locate installed-files.txt") | |
return results_printed |
from __future__ import absolute_import | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._internal.basecommand import Command | |
from pip._internal.exceptions import InstallationError | |
from pip._internal.req import InstallRequirement, parse_requirements | |
class UninstallCommand(Command): | |
""" | |
Uninstall packages. | |
pip is able to uninstall most installed packages. Known exceptions are: | |
- Pure distutils packages installed with ``python setup.py install``, which | |
leave behind no metadata to determine what files were installed. | |
- Script wrappers installed by ``python setup.py develop``. | |
""" | |
name = 'uninstall' | |
usage = """ | |
%prog [options] <package> ... | |
%prog [options] -r <requirements file> ...""" | |
summary = 'Uninstall packages.' | |
def __init__(self, *args, **kw): | |
super(UninstallCommand, self).__init__(*args, **kw) | |
self.cmd_opts.add_option( | |
'-r', '--requirement', | |
dest='requirements', | |
action='append', | |
default=[], | |
metavar='file', | |
help='Uninstall all the packages listed in the given requirements ' | |
'file. This option can be used multiple times.', | |
) | |
self.cmd_opts.add_option( | |
'-y', '--yes', | |
dest='yes', | |
action='store_true', | |
help="Don't ask for confirmation of uninstall deletions.") | |
self.parser.insert_option_group(0, self.cmd_opts) | |
def run(self, options, args): | |
with self._build_session(options) as session: | |
reqs_to_uninstall = {} | |
for name in args: | |
req = InstallRequirement.from_line( | |
name, isolated=options.isolated_mode, | |
) | |
if req.name: | |
reqs_to_uninstall[canonicalize_name(req.name)] = req | |
for filename in options.requirements: | |
for req in parse_requirements( | |
filename, | |
options=options, | |
session=session): | |
if req.name: | |
reqs_to_uninstall[canonicalize_name(req.name)] = req | |
if not reqs_to_uninstall: | |
raise InstallationError( | |
'You must give at least one requirement to %(name)s (see ' | |
'"pip help %(name)s")' % dict(name=self.name) | |
) | |
for req in reqs_to_uninstall.values(): | |
uninstall_pathset = req.uninstall( | |
auto_confirm=options.yes, verbose=self.verbosity > 0, | |
) | |
if uninstall_pathset: | |
uninstall_pathset.commit() |
# -*- coding: utf-8 -*- | |
from __future__ import absolute_import | |
import logging | |
import os | |
from pip._internal import cmdoptions | |
from pip._internal.basecommand import RequirementCommand | |
from pip._internal.cache import WheelCache | |
from pip._internal.exceptions import CommandError, PreviousBuildDirError | |
from pip._internal.operations.prepare import RequirementPreparer | |
from pip._internal.req import RequirementSet | |
from pip._internal.resolve import Resolver | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.wheel import WheelBuilder | |
logger = logging.getLogger(__name__) | |
class WheelCommand(RequirementCommand): | |
""" | |
Build Wheel archives for your requirements and dependencies. | |
Wheel is a built-package format, and offers the advantage of not | |
recompiling your software during every install. For more details, see the | |
wheel docs: https://wheel.readthedocs.io/en/latest/ | |
Requirements: setuptools>=0.8, and wheel. | |
'pip wheel' uses the bdist_wheel setuptools extension from the wheel | |
package to build individual wheels. | |
""" | |
name = 'wheel' | |
usage = """ | |
%prog [options] <requirement specifier> ... | |
%prog [options] -r <requirements file> ... | |
%prog [options] [-e] <vcs project url> ... | |
%prog [options] [-e] <local project path> ... | |
%prog [options] <archive url/path> ...""" | |
summary = 'Build wheels from your requirements.' | |
def __init__(self, *args, **kw): | |
super(WheelCommand, self).__init__(*args, **kw) | |
cmd_opts = self.cmd_opts | |
cmd_opts.add_option( | |
'-w', '--wheel-dir', | |
dest='wheel_dir', | |
metavar='dir', | |
default=os.curdir, | |
help=("Build wheels into <dir>, where the default is the " | |
"current working directory."), | |
) | |
cmd_opts.add_option(cmdoptions.no_binary()) | |
cmd_opts.add_option(cmdoptions.only_binary()) | |
cmd_opts.add_option( | |
'--build-option', | |
dest='build_options', | |
metavar='options', | |
action='append', | |
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", | |
) | |
cmd_opts.add_option(cmdoptions.no_build_isolation()) | |
cmd_opts.add_option(cmdoptions.constraints()) | |
cmd_opts.add_option(cmdoptions.editable()) | |
cmd_opts.add_option(cmdoptions.requirements()) | |
cmd_opts.add_option(cmdoptions.src()) | |
cmd_opts.add_option(cmdoptions.ignore_requires_python()) | |
cmd_opts.add_option(cmdoptions.no_deps()) | |
cmd_opts.add_option(cmdoptions.build_dir()) | |
cmd_opts.add_option(cmdoptions.progress_bar()) | |
cmd_opts.add_option( | |
'--global-option', | |
dest='global_options', | |
action='append', | |
metavar='options', | |
help="Extra global options to be supplied to the setup.py " | |
"call before the 'bdist_wheel' command.") | |
cmd_opts.add_option( | |
'--pre', | |
action='store_true', | |
default=False, | |
help=("Include pre-release and development versions. By default, " | |
"pip only finds stable versions."), | |
) | |
cmd_opts.add_option(cmdoptions.no_clean()) | |
cmd_opts.add_option(cmdoptions.require_hashes()) | |
index_opts = cmdoptions.make_option_group( | |
cmdoptions.index_group, | |
self.parser, | |
) | |
self.parser.insert_option_group(0, index_opts) | |
self.parser.insert_option_group(0, cmd_opts) | |
def run(self, options, args): | |
cmdoptions.check_install_build_global(options) | |
index_urls = [options.index_url] + options.extra_index_urls | |
if options.no_index: | |
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) | |
index_urls = [] | |
if options.build_dir: | |
options.build_dir = os.path.abspath(options.build_dir) | |
options.src_dir = os.path.abspath(options.src_dir) | |
with self._build_session(options) as session: | |
finder = self._build_package_finder(options, session) | |
build_delete = (not (options.no_clean or options.build_dir)) | |
wheel_cache = WheelCache(options.cache_dir, options.format_control) | |
with TempDirectory( | |
options.build_dir, delete=build_delete, kind="wheel" | |
) as directory: | |
requirement_set = RequirementSet( | |
require_hashes=options.require_hashes, | |
) | |
try: | |
self.populate_requirement_set( | |
requirement_set, args, options, finder, session, | |
self.name, wheel_cache | |
) | |
preparer = RequirementPreparer( | |
build_dir=directory.path, | |
src_dir=options.src_dir, | |
download_dir=None, | |
wheel_download_dir=options.wheel_dir, | |
progress_bar=options.progress_bar, | |
build_isolation=options.build_isolation, | |
) | |
resolver = Resolver( | |
preparer=preparer, | |
finder=finder, | |
session=session, | |
wheel_cache=wheel_cache, | |
use_user_site=False, | |
upgrade_strategy="to-satisfy-only", | |
force_reinstall=False, | |
ignore_dependencies=options.ignore_dependencies, | |
ignore_requires_python=options.ignore_requires_python, | |
ignore_installed=True, | |
isolated=options.isolated_mode, | |
) | |
resolver.resolve(requirement_set) | |
# build wheels | |
wb = WheelBuilder( | |
finder, preparer, wheel_cache, | |
build_options=options.build_options or [], | |
global_options=options.global_options or [], | |
no_clean=options.no_clean, | |
) | |
wheels_built_successfully = wb.build( | |
requirement_set.requirements.values(), session=session, | |
) | |
if not wheels_built_successfully: | |
raise CommandError( | |
"Failed to build one or more wheels" | |
) | |
except PreviousBuildDirError: | |
options.no_clean = True | |
raise | |
finally: | |
if not options.no_clean: | |
requirement_set.cleanup_files() | |
wheel_cache.cleanup() |
"""Stuff that differs in different Python versions and platform | |
distributions.""" | |
from __future__ import absolute_import, division | |
import codecs | |
import locale | |
import logging | |
import os | |
import shutil | |
import sys | |
from pip._vendor.six import text_type | |
try: | |
import ipaddress | |
except ImportError: | |
try: | |
from pip._vendor import ipaddress # type: ignore | |
except ImportError: | |
import ipaddr as ipaddress # type: ignore | |
ipaddress.ip_address = ipaddress.IPAddress | |
ipaddress.ip_network = ipaddress.IPNetwork | |
__all__ = [ | |
"ipaddress", "uses_pycache", "console_to_str", "native_str", | |
"get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", | |
] | |
logger = logging.getLogger(__name__) | |
if sys.version_info >= (3, 4): | |
uses_pycache = True | |
from importlib.util import cache_from_source | |
else: | |
import imp | |
try: | |
cache_from_source = imp.cache_from_source # type: ignore | |
except AttributeError: | |
# does not use __pycache__ | |
cache_from_source = None | |
uses_pycache = cache_from_source is not None | |
if sys.version_info >= (3, 5): | |
backslashreplace_decode = "backslashreplace" | |
else: | |
# In version 3.4 and older, backslashreplace exists | |
# but does not support use for decoding. | |
# We implement our own replace handler for this | |
# situation, so that we can consistently use | |
# backslash replacement for all versions. | |
def backslashreplace_decode_fn(err): | |
raw_bytes = (err.object[i] for i in range(err.start, err.end)) | |
if sys.version_info[0] == 2: | |
# Python 2 gave us characters - convert to numeric bytes | |
raw_bytes = (ord(b) for b in raw_bytes) | |
return u"".join(u"\\x%x" % c for c in raw_bytes), err.end | |
codecs.register_error( | |
"backslashreplace_decode", | |
backslashreplace_decode_fn, | |
) | |
backslashreplace_decode = "backslashreplace_decode" | |
def console_to_str(data): | |
"""Return a string, safe for output, of subprocess output. | |
We assume the data is in the locale preferred encoding. | |
If it won't decode properly, we warn the user but decode as | |
best we can. | |
We also ensure that the output can be safely written to | |
standard output without encoding errors. | |
""" | |
# First, get the encoding we assume. This is the preferred | |
# encoding for the locale, unless that is not found, or | |
# it is ASCII, in which case assume UTF-8 | |
encoding = locale.getpreferredencoding() | |
if (not encoding) or codecs.lookup(encoding).name == "ascii": | |
encoding = "utf-8" | |
# Now try to decode the data - if we fail, warn the user and | |
# decode with replacement. | |
try: | |
s = data.decode(encoding) | |
except UnicodeDecodeError: | |
logger.warning( | |
"Subprocess output does not appear to be encoded as %s", | |
encoding, | |
) | |
s = data.decode(encoding, errors=backslashreplace_decode) | |
# Make sure we can print the output, by encoding it to the output | |
# encoding with replacement of unencodable characters, and then | |
# decoding again. | |
# We use stderr's encoding because it's less likely to be | |
# redirected and if we don't find an encoding we skip this | |
# step (on the assumption that output is wrapped by something | |
# that won't fail). | |
# The double getattr is to deal with the possibility that we're | |
# being called in a situation where sys.__stderr__ doesn't exist, | |
# or doesn't have an encoding attribute. Neither of these cases | |
# should occur in normal pip use, but there's no harm in checking | |
# in case people use pip in (unsupported) unusual situations. | |
output_encoding = getattr(getattr(sys, "__stderr__", None), | |
"encoding", None) | |
if output_encoding: | |
s = s.encode(output_encoding, errors="backslashreplace") | |
s = s.decode(output_encoding) | |
return s | |
if sys.version_info >= (3,): | |
def native_str(s, replace=False): | |
if isinstance(s, bytes): | |
return s.decode('utf-8', 'replace' if replace else 'strict') | |
return s | |
else: | |
def native_str(s, replace=False): | |
# Replace is ignored -- unicode to UTF-8 can't fail | |
if isinstance(s, text_type): | |
return s.encode('utf-8') | |
return s | |
def get_path_uid(path): | |
""" | |
Return path's uid. | |
Does not follow symlinks: | |
https://github.com/pypa/pip/pull/935#discussion_r5307003 | |
Placed this function in compat due to differences on AIX and | |
Jython, that should eventually go away. | |
:raises OSError: When path is a symlink or can't be read. | |
""" | |
if hasattr(os, 'O_NOFOLLOW'): | |
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) | |
file_uid = os.fstat(fd).st_uid | |
os.close(fd) | |
else: # AIX and Jython | |
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW | |
if not os.path.islink(path): | |
# older versions of Jython don't have `os.fstat` | |
file_uid = os.stat(path).st_uid | |
else: | |
# raise OSError for parity with os.O_NOFOLLOW above | |
raise OSError( | |
"%s is a symlink; Will not return uid for symlinks" % path | |
) | |
return file_uid | |
def expanduser(path): | |
""" | |
Expand ~ and ~user constructions. | |
Includes a workaround for http://bugs.python.org/issue14768 | |
""" | |
expanded = os.path.expanduser(path) | |
if path.startswith('~/') and expanded.startswith('//'): | |
expanded = expanded[1:] | |
return expanded | |
# packages in the stdlib that may have installation metadata, but should not be | |
# considered 'installed'. this theoretically could be determined based on | |
# dist.location (py27:`sysconfig.get_paths()['stdlib']`, | |
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may | |
# make this ineffective, so hard-coding | |
stdlib_pkgs = {"python", "wsgiref", "argparse"} | |
# windows detection, covers cpython and ironpython | |
WINDOWS = (sys.platform.startswith("win") or | |
(sys.platform == 'cli' and os.name == 'nt')) | |
def samefile(file1, file2): | |
"""Provide an alternative for os.path.samefile on Windows/Python2""" | |
if hasattr(os.path, 'samefile'): | |
return os.path.samefile(file1, file2) | |
else: | |
path1 = os.path.normcase(os.path.abspath(file1)) | |
path2 = os.path.normcase(os.path.abspath(file2)) | |
return path1 == path2 | |
if hasattr(shutil, 'get_terminal_size'): | |
def get_terminal_size(): | |
""" | |
Returns a tuple (x, y) representing the width(x) and the height(y) | |
in characters of the terminal window. | |
""" | |
return tuple(shutil.get_terminal_size()) | |
else: | |
def get_terminal_size(): | |
""" | |
Returns a tuple (x, y) representing the width(x) and the height(y) | |
in characters of the terminal window. | |
""" | |
def ioctl_GWINSZ(fd): | |
try: | |
import fcntl | |
import termios | |
import struct | |
cr = struct.unpack_from( | |
'hh', | |
fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') | |
) | |
except: | |
return None | |
if cr == (0, 0): | |
return None | |
return cr | |
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) | |
if not cr: | |
try: | |
fd = os.open(os.ctermid(), os.O_RDONLY) | |
cr = ioctl_GWINSZ(fd) | |
os.close(fd) | |
except: | |
pass | |
if not cr: | |
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) | |
return int(cr[1]), int(cr[0]) |
"""Configuration management setup | |
Some terminology: | |
- name | |
As written in config files. | |
- value | |
Value associated with a name | |
- key | |
Name combined with it's section (section.name) | |
- variant | |
A single word describing where the configuration key-value pair came from | |
""" | |
import locale | |
import logging | |
import os | |
from pip._vendor import six | |
from pip._vendor.six.moves import configparser | |
from pip._internal.exceptions import ConfigurationError | |
from pip._internal.locations import ( | |
legacy_config_file, new_config_file, running_under_virtualenv, | |
site_config_files, venv_config_file, | |
) | |
from pip._internal.utils.misc import ensure_dir, enum | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple | |
RawConfigParser = configparser.RawConfigParser # Shorthand | |
Kind = NewType("Kind", str) | |
logger = logging.getLogger(__name__) | |
# NOTE: Maybe use the optionx attribute to normalize keynames. | |
def _normalize_name(name): | |
# type: (str) -> str | |
"""Make a name consistent regardless of source (environment or file) | |
""" | |
name = name.lower().replace('_', '-') | |
if name.startswith('--'): | |
name = name[2:] # only prefer long opts | |
return name | |
def _disassemble_key(name): | |
# type: (str) -> List[str] | |
return name.split(".", 1) | |
# The kinds of configurations there are. | |
kinds = enum( | |
USER="user", # User Specific | |
GLOBAL="global", # System Wide | |
VENV="venv", # Virtual Environment Specific | |
ENV="env", # from PIP_CONFIG_FILE | |
ENV_VAR="env-var", # from Environment Variables | |
) | |
class Configuration(object): | |
"""Handles management of configuration. | |
Provides an interface to accessing and managing configuration files. | |
This class converts provides an API that takes "section.key-name" style | |
keys and stores the value associated with it as "key-name" under the | |
section "section". | |
This allows for a clean interface wherein the both the section and the | |
key-name are preserved in an easy to manage form in the configuration files | |
and the data stored is also nice. | |
""" | |
def __init__(self, isolated, load_only=None): | |
# type: (bool, Kind) -> None | |
super(Configuration, self).__init__() | |
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None] | |
if load_only not in _valid_load_only: | |
raise ConfigurationError( | |
"Got invalid value for load_only - should be one of {}".format( | |
", ".join(map(repr, _valid_load_only[:-1])) | |
) | |
) | |
self.isolated = isolated # type: bool | |
self.load_only = load_only # type: Optional[Kind] | |
# The order here determines the override order. | |
self._override_order = [ | |
kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR | |
] | |
self._ignore_env_names = ["version", "help"] | |
# Because we keep track of where we got the data from | |
self._parsers = { | |
variant: [] for variant in self._override_order | |
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] | |
self._config = { | |
variant: {} for variant in self._override_order | |
} # type: Dict[Kind, Dict[str, Any]] | |
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] | |
def load(self): | |
# type: () -> None | |
"""Loads configuration from configuration files and environment | |
""" | |
self._load_config_files() | |
if not self.isolated: | |
self._load_environment_vars() | |
def get_file_to_edit(self): | |
# type: () -> Optional[str] | |
"""Returns the file with highest priority in configuration | |
""" | |
assert self.load_only is not None, \ | |
"Need to be specified a file to be editing" | |
try: | |
return self._get_parser_to_modify()[0] | |
except IndexError: | |
return None | |
def items(self): | |
# type: () -> Iterable[Tuple[str, Any]] | |
"""Returns key-value pairs like dict.items() representing the loaded | |
configuration | |
""" | |
return self._dictionary.items() | |
def get_value(self, key): | |
# type: (str) -> Any | |
"""Get a value from the configuration. | |
""" | |
try: | |
return self._dictionary[key] | |
except KeyError: | |
raise ConfigurationError("No such key - {}".format(key)) | |
def set_value(self, key, value): | |
# type: (str, Any) -> None | |
"""Modify a value in the configuration. | |
""" | |
self._ensure_have_load_only() | |
fname, parser = self._get_parser_to_modify() | |
if parser is not None: | |
section, name = _disassemble_key(key) | |
# Modify the parser and the configuration | |
if not parser.has_section(section): | |
parser.add_section(section) | |
parser.set(section, name, value) | |
self._config[self.load_only][key] = value | |
self._mark_as_modified(fname, parser) | |
def unset_value(self, key): | |
# type: (str) -> None | |
"""Unset a value in the configuration. | |
""" | |
self._ensure_have_load_only() | |
if key not in self._config[self.load_only]: | |
raise ConfigurationError("No such key - {}".format(key)) | |
fname, parser = self._get_parser_to_modify() | |
if parser is not None: | |
section, name = _disassemble_key(key) | |
# Remove the key in the parser | |
modified_something = False | |
if parser.has_section(section): | |
# Returns whether the option was removed or not | |
modified_something = parser.remove_option(section, name) | |
if modified_something: | |
# name removed from parser, section may now be empty | |
section_iter = iter(parser.items(section)) | |
try: | |
val = six.next(section_iter) | |
except StopIteration: | |
val = None | |
if val is None: | |
parser.remove_section(section) | |
self._mark_as_modified(fname, parser) | |
else: | |
raise ConfigurationError( | |
"Fatal Internal error [id=1]. Please report as a bug." | |
) | |
del self._config[self.load_only][key] | |
def save(self): | |
# type: () -> None | |
"""Save the currentin-memory state. | |
""" | |
self._ensure_have_load_only() | |
for fname, parser in self._modified_parsers: | |
logger.info("Writing to %s", fname) | |
# Ensure directory exists. | |
ensure_dir(os.path.dirname(fname)) | |
with open(fname, "w") as f: | |
parser.write(f) # type: ignore | |
# | |
# Private routines | |
# | |
def _ensure_have_load_only(self): | |
# type: () -> None | |
if self.load_only is None: | |
raise ConfigurationError("Needed a specific file to be modifying.") | |
logger.debug("Will be working with %s variant only", self.load_only) | |
@property | |
def _dictionary(self): | |
# type: () -> Dict[str, Any] | |
"""A dictionary representing the loaded configuration. | |
""" | |
# NOTE: Dictionaries are not populated if not loaded. So, conditionals | |
# are not needed here. | |
retval = {} | |
for variant in self._override_order: | |
retval.update(self._config[variant]) | |
return retval | |
def _load_config_files(self): | |
# type: () -> None | |
"""Loads configuration from configuration files | |
""" | |
config_files = dict(self._iter_config_files()) | |
if config_files[kinds.ENV][0:1] == [os.devnull]: | |
logger.debug( | |
"Skipping loading configuration files due to " | |
"environment's PIP_CONFIG_FILE being os.devnull" | |
) | |
return | |
for variant, files in config_files.items(): | |
for fname in files: | |
# If there's specific variant set in `load_only`, load only | |
# that variant, not the others. | |
if self.load_only is not None and variant != self.load_only: | |
logger.debug( | |
"Skipping file '%s' (variant: %s)", fname, variant | |
) | |
continue | |
parser = self._load_file(variant, fname) | |
# Keeping track of the parsers used | |
self._parsers[variant].append((fname, parser)) | |
def _load_file(self, variant, fname): | |
# type: (Kind, str) -> RawConfigParser | |
logger.debug("For variant '%s', will try loading '%s'", variant, fname) | |
parser = self._construct_parser(fname) | |
for section in parser.sections(): | |
items = parser.items(section) | |
self._config[variant].update(self._normalized_keys(section, items)) | |
return parser | |
def _construct_parser(self, fname): | |
# type: (str) -> RawConfigParser | |
parser = configparser.RawConfigParser() | |
# If there is no such file, don't bother reading it but create the | |
# parser anyway, to hold the data. | |
# Doing this is useful when modifying and saving files, where we don't | |
# need to construct a parser. | |
if os.path.exists(fname): | |
try: | |
parser.read(fname) | |
except UnicodeDecodeError: | |
raise ConfigurationError(( | |
"ERROR: " | |
"Configuration file contains invalid %s characters.\n" | |
"Please fix your configuration, located at %s\n" | |
) % (locale.getpreferredencoding(False), fname)) | |
return parser | |
def _load_environment_vars(self): | |
# type: () -> None | |
"""Loads configuration from environment variables | |
""" | |
self._config[kinds.ENV_VAR].update( | |
self._normalized_keys(":env:", self._get_environ_vars()) | |
) | |
def _normalized_keys(self, section, items): | |
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] | |
"""Normalizes items to construct a dictionary with normalized keys. | |
This routine is where the names become keys and are made the same | |
regardless of source - configuration files or environment. | |
""" | |
normalized = {} | |
for name, val in items: | |
key = section + "." + _normalize_name(name) | |
normalized[key] = val | |
return normalized | |
def _get_environ_vars(self): | |
# type: () -> Iterable[Tuple[str, str]] | |
"""Returns a generator with all environmental vars with prefix PIP_""" | |
for key, val in os.environ.items(): | |
should_be_yielded = ( | |
key.startswith("PIP_") and | |
key[4:].lower() not in self._ignore_env_names | |
) | |
if should_be_yielded: | |
yield key[4:].lower(), val | |
# XXX: This is patched in the tests. | |
def _iter_config_files(self): | |
# type: () -> Iterable[Tuple[Kind, List[str]]] | |
"""Yields variant and configuration files associated with it. | |
This should be treated like items of a dictionary. | |
""" | |
# SMELL: Move the conditions out of this function | |
# environment variables have the lowest priority | |
config_file = os.environ.get('PIP_CONFIG_FILE', None) | |
if config_file is not None: | |
yield kinds.ENV, [config_file] | |
else: | |
yield kinds.ENV, [] | |
# at the base we have any global configuration | |
yield kinds.GLOBAL, list(site_config_files) | |
# per-user configuration next | |
should_load_user_config = not self.isolated and not ( | |
config_file and os.path.exists(config_file) | |
) | |
if should_load_user_config: | |
# The legacy config file is overridden by the new config file | |
yield kinds.USER, [legacy_config_file, new_config_file] | |
# finally virtualenv configuration first trumping others | |
if running_under_virtualenv(): | |
yield kinds.VENV, [venv_config_file] | |
def _get_parser_to_modify(self): | |
# type: () -> Tuple[str, RawConfigParser] | |
# Determine which parser to modify | |
parsers = self._parsers[self.load_only] | |
if not parsers: | |
# This should not happen if everything works correctly. | |
raise ConfigurationError( | |
"Fatal Internal error [id=2]. Please report as a bug." | |
) | |
# Use the highest priority parser. | |
return parsers[-1] | |
# XXX: This is patched in the tests. | |
def _mark_as_modified(self, fname, parser): | |
# type: (str, RawConfigParser) -> None | |
file_parser_tuple = (fname, parser) | |
if file_parser_tuple not in self._modified_parsers: | |
self._modified_parsers.append(file_parser_tuple) |
from __future__ import absolute_import | |
import cgi | |
import email.utils | |
import getpass | |
import json | |
import logging | |
import mimetypes | |
import os | |
import platform | |
import re | |
import shutil | |
import sys | |
from pip._vendor import requests, six, urllib3 | |
from pip._vendor.cachecontrol import CacheControlAdapter | |
from pip._vendor.cachecontrol.caches import FileCache | |
from pip._vendor.lockfile import LockError | |
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter | |
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth | |
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response | |
from pip._vendor.requests.structures import CaseInsensitiveDict | |
from pip._vendor.requests.utils import get_netrc_auth | |
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is | |
# why we ignore the type on this import | |
from pip._vendor.six.moves import xmlrpc_client # type: ignore | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._vendor.six.moves.urllib import request as urllib_request | |
from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote | |
from pip._vendor.urllib3.util import IS_PYOPENSSL | |
import pip | |
from pip._internal.compat import WINDOWS | |
from pip._internal.exceptions import HashMismatch, InstallationError | |
from pip._internal.locations import write_delete_marker_file | |
from pip._internal.models import PyPI | |
from pip._internal.utils.encoding import auto_decode | |
from pip._internal.utils.filesystem import check_path_owner | |
from pip._internal.utils.glibc import libc_ver | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume, | |
display_path, format_size, get_installed_version, rmtree, splitext, | |
unpack_file, | |
) | |
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.utils.ui import DownloadProgressProvider | |
from pip._internal.vcs import vcs | |
try: | |
import ssl # noqa | |
except ImportError: | |
ssl = None | |
HAS_TLS = (ssl is not None) or IS_PYOPENSSL | |
__all__ = ['get_file_content', | |
'is_url', 'url_to_path', 'path_to_url', | |
'is_archive_file', 'unpack_vcs_link', | |
'unpack_file_url', 'is_vcs_url', 'is_file_url', | |
'unpack_http_url', 'unpack_url'] | |
logger = logging.getLogger(__name__) | |
def user_agent(): | |
""" | |
Return a string representing the user agent. | |
""" | |
data = { | |
"installer": {"name": "pip", "version": pip.__version__}, | |
"python": platform.python_version(), | |
"implementation": { | |
"name": platform.python_implementation(), | |
}, | |
} | |
if data["implementation"]["name"] == 'CPython': | |
data["implementation"]["version"] = platform.python_version() | |
elif data["implementation"]["name"] == 'PyPy': | |
if sys.pypy_version_info.releaselevel == 'final': | |
pypy_version_info = sys.pypy_version_info[:3] | |
else: | |
pypy_version_info = sys.pypy_version_info | |
data["implementation"]["version"] = ".".join( | |
[str(x) for x in pypy_version_info] | |
) | |
elif data["implementation"]["name"] == 'Jython': | |
# Complete Guess | |
data["implementation"]["version"] = platform.python_version() | |
elif data["implementation"]["name"] == 'IronPython': | |
# Complete Guess | |
data["implementation"]["version"] = platform.python_version() | |
if sys.platform.startswith("linux"): | |
from pip._vendor import distro | |
distro_infos = dict(filter( | |
lambda x: x[1], | |
zip(["name", "version", "id"], distro.linux_distribution()), | |
)) | |
libc = dict(filter( | |
lambda x: x[1], | |
zip(["lib", "version"], libc_ver()), | |
)) | |
if libc: | |
distro_infos["libc"] = libc | |
if distro_infos: | |
data["distro"] = distro_infos | |
if sys.platform.startswith("darwin") and platform.mac_ver()[0]: | |
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} | |
if platform.system(): | |
data.setdefault("system", {})["name"] = platform.system() | |
if platform.release(): | |
data.setdefault("system", {})["release"] = platform.release() | |
if platform.machine(): | |
data["cpu"] = platform.machine() | |
if HAS_TLS: | |
data["openssl_version"] = ssl.OPENSSL_VERSION | |
setuptools_version = get_installed_version("setuptools") | |
if setuptools_version is not None: | |
data["setuptools_version"] = setuptools_version | |
return "{data[installer][name]}/{data[installer][version]} {json}".format( | |
data=data, | |
json=json.dumps(data, separators=(",", ":"), sort_keys=True), | |
) | |
class MultiDomainBasicAuth(AuthBase): | |
def __init__(self, prompting=True): | |
self.prompting = prompting | |
self.passwords = {} | |
def __call__(self, req): | |
parsed = urllib_parse.urlparse(req.url) | |
# Get the netloc without any embedded credentials | |
netloc = parsed.netloc.rsplit("@", 1)[-1] | |
# Set the url of the request to the url without any credentials | |
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) | |
# Use any stored credentials that we have for this netloc | |
username, password = self.passwords.get(netloc, (None, None)) | |
# Extract credentials embedded in the url if we have none stored | |
if username is None: | |
username, password = self.parse_credentials(parsed.netloc) | |
# Get creds from netrc if we still don't have them | |
if username is None and password is None: | |
netrc_auth = get_netrc_auth(req.url) | |
username, password = netrc_auth if netrc_auth else (None, None) | |
if username or password: | |
# Store the username and password | |
self.passwords[netloc] = (username, password) | |
# Send the basic auth with this request | |
req = HTTPBasicAuth(username or "", password or "")(req) | |
# Attach a hook to handle 401 responses | |
req.register_hook("response", self.handle_401) | |
return req | |
def handle_401(self, resp, **kwargs): | |
# We only care about 401 responses, anything else we want to just | |
# pass through the actual response | |
if resp.status_code != 401: | |
return resp | |
# We are not able to prompt the user so simply return the response | |
if not self.prompting: | |
return resp | |
parsed = urllib_parse.urlparse(resp.url) | |
# Prompt the user for a new username and password | |
username = six.moves.input("User for %s: " % parsed.netloc) | |
password = getpass.getpass("Password: ") | |
# Store the new username and password to use for future requests | |
if username or password: | |
self.passwords[parsed.netloc] = (username, password) | |
# Consume content and release the original connection to allow our new | |
# request to reuse the same one. | |
resp.content | |
resp.raw.release_conn() | |
# Add our new username and password to the request | |
req = HTTPBasicAuth(username or "", password or "")(resp.request) | |
# Send our new request | |
new_resp = resp.connection.send(req, **kwargs) | |
new_resp.history.append(resp) | |
return new_resp | |
def parse_credentials(self, netloc): | |
if "@" in netloc: | |
userinfo = netloc.rsplit("@", 1)[0] | |
if ":" in userinfo: | |
user, pwd = userinfo.split(":", 1) | |
return (urllib_unquote(user), urllib_unquote(pwd)) | |
return urllib_unquote(userinfo), None | |
return None, None | |
class LocalFSAdapter(BaseAdapter): | |
def send(self, request, stream=None, timeout=None, verify=None, cert=None, | |
proxies=None): | |
pathname = url_to_path(request.url) | |
resp = Response() | |
resp.status_code = 200 | |
resp.url = request.url | |
try: | |
stats = os.stat(pathname) | |
except OSError as exc: | |
resp.status_code = 404 | |
resp.raw = exc | |
else: | |
modified = email.utils.formatdate(stats.st_mtime, usegmt=True) | |
content_type = mimetypes.guess_type(pathname)[0] or "text/plain" | |
resp.headers = CaseInsensitiveDict({ | |
"Content-Type": content_type, | |
"Content-Length": stats.st_size, | |
"Last-Modified": modified, | |
}) | |
resp.raw = open(pathname, "rb") | |
resp.close = resp.raw.close | |
return resp | |
def close(self): | |
pass | |
class SafeFileCache(FileCache): | |
""" | |
A file based cache which is safe to use even when the target directory may | |
not be accessible or writable. | |
""" | |
def __init__(self, *args, **kwargs): | |
super(SafeFileCache, self).__init__(*args, **kwargs) | |
# Check to ensure that the directory containing our cache directory | |
# is owned by the user current executing pip. If it does not exist | |
# we will check the parent directory until we find one that does exist. | |
# If it is not owned by the user executing pip then we will disable | |
# the cache and log a warning. | |
if not check_path_owner(self.directory): | |
logger.warning( | |
"The directory '%s' or its parent directory is not owned by " | |
"the current user and the cache has been disabled. Please " | |
"check the permissions and owner of that directory. If " | |
"executing pip with sudo, you may want sudo's -H flag.", | |
self.directory, | |
) | |
# Set our directory to None to disable the Cache | |
self.directory = None | |
def get(self, *args, **kwargs): | |
# If we don't have a directory, then the cache should be a no-op. | |
if self.directory is None: | |
return | |
try: | |
return super(SafeFileCache, self).get(*args, **kwargs) | |
except (LockError, OSError, IOError): | |
# We intentionally silence this error, if we can't access the cache | |
# then we can just skip caching and process the request as if | |
# caching wasn't enabled. | |
pass | |
def set(self, *args, **kwargs): | |
# If we don't have a directory, then the cache should be a no-op. | |
if self.directory is None: | |
return | |
try: | |
return super(SafeFileCache, self).set(*args, **kwargs) | |
except (LockError, OSError, IOError): | |
# We intentionally silence this error, if we can't access the cache | |
# then we can just skip caching and process the request as if | |
# caching wasn't enabled. | |
pass | |
def delete(self, *args, **kwargs): | |
# If we don't have a directory, then the cache should be a no-op. | |
if self.directory is None: | |
return | |
try: | |
return super(SafeFileCache, self).delete(*args, **kwargs) | |
except (LockError, OSError, IOError): | |
# We intentionally silence this error, if we can't access the cache | |
# then we can just skip caching and process the request as if | |
# caching wasn't enabled. | |
pass | |
class InsecureHTTPAdapter(HTTPAdapter): | |
def cert_verify(self, conn, url, verify, cert): | |
conn.cert_reqs = 'CERT_NONE' | |
conn.ca_certs = None | |
class PipSession(requests.Session): | |
timeout = None | |
def __init__(self, *args, **kwargs): | |
retries = kwargs.pop("retries", 0) | |
cache = kwargs.pop("cache", None) | |
insecure_hosts = kwargs.pop("insecure_hosts", []) | |
super(PipSession, self).__init__(*args, **kwargs) | |
# Attach our User Agent to the request | |
self.headers["User-Agent"] = user_agent() | |
# Attach our Authentication handler to the session | |
self.auth = MultiDomainBasicAuth() | |
# Create our urllib3.Retry instance which will allow us to customize | |
# how we handle retries. | |
retries = urllib3.Retry( | |
# Set the total number of retries that a particular request can | |
# have. | |
total=retries, | |
# A 503 error from PyPI typically means that the Fastly -> Origin | |
# connection got interrupted in some way. A 503 error in general | |
# is typically considered a transient error so we'll go ahead and | |
# retry it. | |
# A 500 may indicate transient error in Amazon S3 | |
# A 520 or 527 - may indicate transient error in CloudFlare | |
status_forcelist=[500, 503, 520, 527], | |
# Add a small amount of back off between failed requests in | |
# order to prevent hammering the service. | |
backoff_factor=0.25, | |
) | |
# We want to _only_ cache responses on securely fetched origins. We do | |
# this because we can't validate the response of an insecurely fetched | |
# origin, and we don't want someone to be able to poison the cache and | |
# require manual eviction from the cache to fix it. | |
if cache: | |
secure_adapter = CacheControlAdapter( | |
cache=SafeFileCache(cache, use_dir_lock=True), | |
max_retries=retries, | |
) | |
else: | |
secure_adapter = HTTPAdapter(max_retries=retries) | |
# Our Insecure HTTPAdapter disables HTTPS validation. It does not | |
# support caching (see above) so we'll use it for all http:// URLs as | |
# well as any https:// host that we've marked as ignoring TLS errors | |
# for. | |
insecure_adapter = InsecureHTTPAdapter(max_retries=retries) | |
self.mount("https://", secure_adapter) | |
self.mount("http://", insecure_adapter) | |
# Enable file:// urls | |
self.mount("file://", LocalFSAdapter()) | |
# We want to use a non-validating adapter for any requests which are | |
# deemed insecure. | |
for host in insecure_hosts: | |
self.mount("https://{}/".format(host), insecure_adapter) | |
def request(self, method, url, *args, **kwargs): | |
# Allow setting a default timeout on a session | |
kwargs.setdefault("timeout", self.timeout) | |
# Dispatch the actual request | |
return super(PipSession, self).request(method, url, *args, **kwargs) | |
def get_file_content(url, comes_from=None, session=None): | |
"""Gets the content of a file; it may be a filename, file: URL, or | |
http: URL. Returns (location, content). Content is unicode. | |
:param url: File path or url. | |
:param comes_from: Origin description of requirements. | |
:param session: Instance of pip.download.PipSession. | |
""" | |
if session is None: | |
raise TypeError( | |
"get_file_content() missing 1 required keyword argument: 'session'" | |
) | |
match = _scheme_re.search(url) | |
if match: | |
scheme = match.group(1).lower() | |
if (scheme == 'file' and comes_from and | |
comes_from.startswith('http')): | |
raise InstallationError( | |
'Requirements file %s references URL %s, which is local' | |
% (comes_from, url)) | |
if scheme == 'file': | |
path = url.split(':', 1)[1] | |
path = path.replace('\\', '/') | |
match = _url_slash_drive_re.match(path) | |
if match: | |
path = match.group(1) + ':' + path.split('|', 1)[1] | |
path = urllib_parse.unquote(path) | |
if path.startswith('/'): | |
path = '/' + path.lstrip('/') | |
url = path | |
else: | |
# FIXME: catch some errors | |
resp = session.get(url) | |
resp.raise_for_status() | |
return resp.url, resp.text | |
try: | |
with open(url, 'rb') as f: | |
content = auto_decode(f.read()) | |
except IOError as exc: | |
raise InstallationError( | |
'Could not open requirements file: %s' % str(exc) | |
) | |
return url, content | |
_scheme_re = re.compile(r'^(http|https|file):', re.I) | |
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) | |
def is_url(name): | |
"""Returns true if the name looks like a URL""" | |
if ':' not in name: | |
return False | |
scheme = name.split(':', 1)[0].lower() | |
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes | |
def url_to_path(url): | |
""" | |
Convert a file: URL to a path. | |
""" | |
assert url.startswith('file:'), ( | |
"You can only turn file: urls into filenames (not %r)" % url) | |
_, netloc, path, _, _ = urllib_parse.urlsplit(url) | |
# if we have a UNC path, prepend UNC share notation | |
if netloc: | |
netloc = '\\\\' + netloc | |
path = urllib_request.url2pathname(netloc + path) | |
return path | |
def path_to_url(path): | |
""" | |
Convert a path to a file: URL. The path will be made absolute and have | |
quoted path parts. | |
""" | |
path = os.path.normpath(os.path.abspath(path)) | |
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) | |
return url | |
def is_archive_file(name): | |
"""Return True if `name` is a considered as an archive file.""" | |
ext = splitext(name)[1].lower() | |
if ext in ARCHIVE_EXTENSIONS: | |
return True | |
return False | |
def unpack_vcs_link(link, location): | |
vcs_backend = _get_used_vcs_backend(link) | |
vcs_backend.unpack(location) | |
def _get_used_vcs_backend(link): | |
for backend in vcs.backends: | |
if link.scheme in backend.schemes: | |
vcs_backend = backend(link.url) | |
return vcs_backend | |
def is_vcs_url(link): | |
return bool(_get_used_vcs_backend(link)) | |
def is_file_url(link): | |
return link.url.lower().startswith('file:') | |
def is_dir_url(link): | |
"""Return whether a file:// Link points to a directory. | |
``link`` must not have any other scheme but file://. Call is_file_url() | |
first. | |
""" | |
link_path = url_to_path(link.url_without_fragment) | |
return os.path.isdir(link_path) | |
def _progress_indicator(iterable, *args, **kwargs): | |
return iterable | |
def _download_url(resp, link, content_file, hashes, progress_bar): | |
try: | |
total_length = int(resp.headers['content-length']) | |
except (ValueError, KeyError, TypeError): | |
total_length = 0 | |
cached_resp = getattr(resp, "from_cache", False) | |
if logger.getEffectiveLevel() > logging.INFO: | |
show_progress = False | |
elif cached_resp: | |
show_progress = False | |
elif total_length > (40 * 1000): | |
show_progress = True | |
elif not total_length: | |
show_progress = True | |
else: | |
show_progress = False | |
show_url = link.show_url | |
def resp_read(chunk_size): | |
try: | |
# Special case for urllib3. | |
for chunk in resp.raw.stream( | |
chunk_size, | |
# We use decode_content=False here because we don't | |
# want urllib3 to mess with the raw bytes we get | |
# from the server. If we decompress inside of | |
# urllib3 then we cannot verify the checksum | |
# because the checksum will be of the compressed | |
# file. This breakage will only occur if the | |
# server adds a Content-Encoding header, which | |
# depends on how the server was configured: | |
# - Some servers will notice that the file isn't a | |
# compressible file and will leave the file alone | |
# and with an empty Content-Encoding | |
# - Some servers will notice that the file is | |
# already compressed and will leave the file | |
# alone and will add a Content-Encoding: gzip | |
# header | |
# - Some servers won't notice anything at all and | |
# will take a file that's already been compressed | |
# and compress it again and set the | |
# Content-Encoding: gzip header | |
# | |
# By setting this not to decode automatically we | |
# hope to eliminate problems with the second case. | |
decode_content=False): | |
yield chunk | |
except AttributeError: | |
# Standard file-like object. | |
while True: | |
chunk = resp.raw.read(chunk_size) | |
if not chunk: | |
break | |
yield chunk | |
def written_chunks(chunks): | |
for chunk in chunks: | |
content_file.write(chunk) | |
yield chunk | |
progress_indicator = _progress_indicator | |
if link.netloc == PyPI.netloc: | |
url = show_url | |
else: | |
url = link.url_without_fragment | |
if show_progress: # We don't show progress on cached responses | |
progress_indicator = DownloadProgressProvider(progress_bar, | |
max=total_length) | |
if total_length: | |
logger.info("Downloading %s (%s)", url, format_size(total_length)) | |
else: | |
logger.info("Downloading %s", url) | |
elif cached_resp: | |
logger.info("Using cached %s", url) | |
else: | |
logger.info("Downloading %s", url) | |
logger.debug('Downloading from URL %s', link) | |
downloaded_chunks = written_chunks( | |
progress_indicator( | |
resp_read(CONTENT_CHUNK_SIZE), | |
CONTENT_CHUNK_SIZE | |
) | |
) | |
if hashes: | |
hashes.check_against_chunks(downloaded_chunks) | |
else: | |
consume(downloaded_chunks) | |
def _copy_file(filename, location, link): | |
copy = True | |
download_location = os.path.join(location, link.filename) | |
if os.path.exists(download_location): | |
response = ask_path_exists( | |
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' % | |
display_path(download_location), ('i', 'w', 'b', 'a')) | |
if response == 'i': | |
copy = False | |
elif response == 'w': | |
logger.warning('Deleting %s', display_path(download_location)) | |
os.remove(download_location) | |
elif response == 'b': | |
dest_file = backup_dir(download_location) | |
logger.warning( | |
'Backing up %s to %s', | |
display_path(download_location), | |
display_path(dest_file), | |
) | |
shutil.move(download_location, dest_file) | |
elif response == 'a': | |
sys.exit(-1) | |
if copy: | |
shutil.copy(filename, download_location) | |
logger.info('Saved %s', display_path(download_location)) | |
def unpack_http_url(link, location, download_dir=None, | |
session=None, hashes=None, progress_bar="on"): | |
if session is None: | |
raise TypeError( | |
"unpack_http_url() missing 1 required keyword argument: 'session'" | |
) | |
with TempDirectory(kind="unpack") as temp_dir: | |
# If a download dir is specified, is the file already downloaded there? | |
already_downloaded_path = None | |
if download_dir: | |
already_downloaded_path = _check_download_dir(link, | |
download_dir, | |
hashes) | |
if already_downloaded_path: | |
from_path = already_downloaded_path | |
content_type = mimetypes.guess_type(from_path)[0] | |
else: | |
# let's download to a tmp dir | |
from_path, content_type = _download_http_url(link, | |
session, | |
temp_dir.path, | |
hashes, | |
progress_bar) | |
# unpack the archive to the build dir location. even when only | |
# downloading archives, they have to be unpacked to parse dependencies | |
unpack_file(from_path, location, content_type, link) | |
# a download dir is specified; let's copy the archive there | |
if download_dir and not already_downloaded_path: | |
_copy_file(from_path, download_dir, link) | |
if not already_downloaded_path: | |
os.unlink(from_path) | |
def unpack_file_url(link, location, download_dir=None, hashes=None): | |
"""Unpack link into location. | |
If download_dir is provided and link points to a file, make a copy | |
of the link file inside download_dir. | |
""" | |
link_path = url_to_path(link.url_without_fragment) | |
# If it's a url to a local directory | |
if is_dir_url(link): | |
if os.path.isdir(location): | |
rmtree(location) | |
shutil.copytree(link_path, location, symlinks=True) | |
if download_dir: | |
logger.info('Link is a directory, ignoring download_dir') | |
return | |
# If --require-hashes is off, `hashes` is either empty, the | |
# link's embedded hash, or MissingHashes; it is required to | |
# match. If --require-hashes is on, we are satisfied by any | |
# hash in `hashes` matching: a URL-based or an option-based | |
# one; no internet-sourced hash will be in `hashes`. | |
if hashes: | |
hashes.check_against_path(link_path) | |
# If a download dir is specified, is the file already there and valid? | |
already_downloaded_path = None | |
if download_dir: | |
already_downloaded_path = _check_download_dir(link, | |
download_dir, | |
hashes) | |
if already_downloaded_path: | |
from_path = already_downloaded_path | |
else: | |
from_path = link_path | |
content_type = mimetypes.guess_type(from_path)[0] | |
# unpack the archive to the build dir location. even when only downloading | |
# archives, they have to be unpacked to parse dependencies | |
unpack_file(from_path, location, content_type, link) | |
# a download dir is specified and not already downloaded | |
if download_dir and not already_downloaded_path: | |
_copy_file(from_path, download_dir, link) | |
def _copy_dist_from_dir(link_path, location): | |
"""Copy distribution files in `link_path` to `location`. | |
Invoked when user requests to install a local directory. E.g.: | |
pip install . | |
pip install ~/dev/git-repos/python-prompt-toolkit | |
""" | |
# Note: This is currently VERY SLOW if you have a lot of data in the | |
# directory, because it copies everything with `shutil.copytree`. | |
# What it should really do is build an sdist and install that. | |
# See https://github.com/pypa/pip/issues/2195 | |
if os.path.isdir(location): | |
rmtree(location) | |
# build an sdist | |
setup_py = 'setup.py' | |
sdist_args = [sys.executable] | |
sdist_args.append('-c') | |
sdist_args.append(SETUPTOOLS_SHIM % setup_py) | |
sdist_args.append('sdist') | |
sdist_args += ['--dist-dir', location] | |
logger.info('Running setup.py sdist for %s', link_path) | |
with indent_log(): | |
call_subprocess(sdist_args, cwd=link_path, show_stdout=False) | |
# unpack sdist into `location` | |
sdist = os.path.join(location, os.listdir(location)[0]) | |
logger.info('Unpacking sdist %s into %s', sdist, location) | |
unpack_file(sdist, location, content_type=None, link=None) | |
class PipXmlrpcTransport(xmlrpc_client.Transport): | |
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession` | |
object. | |
""" | |
def __init__(self, index_url, session, use_datetime=False): | |
xmlrpc_client.Transport.__init__(self, use_datetime) | |
index_parts = urllib_parse.urlparse(index_url) | |
self._scheme = index_parts.scheme | |
self._session = session | |
def request(self, host, handler, request_body, verbose=False): | |
parts = (self._scheme, host, handler, None, None, None) | |
url = urllib_parse.urlunparse(parts) | |
try: | |
headers = {'Content-Type': 'text/xml'} | |
response = self._session.post(url, data=request_body, | |
headers=headers, stream=True) | |
response.raise_for_status() | |
self.verbose = verbose | |
return self.parse_response(response.raw) | |
except requests.HTTPError as exc: | |
logger.critical( | |
"HTTP error %s while getting %s", | |
exc.response.status_code, url, | |
) | |
raise | |
def unpack_url(link, location, download_dir=None, | |
only_download=False, session=None, hashes=None, | |
progress_bar="on"): | |
"""Unpack link. | |
If link is a VCS link: | |
if only_download, export into download_dir and ignore location | |
else unpack into location | |
for other types of link: | |
- unpack into location | |
- if download_dir, copy the file into download_dir | |
- if only_download, mark location for deletion | |
:param hashes: A Hashes object, one of whose embedded hashes must match, | |
or HashMismatch will be raised. If the Hashes is empty, no matches are | |
required, and unhashable types of requirements (like VCS ones, which | |
would ordinarily raise HashUnsupported) are allowed. | |
""" | |
# non-editable vcs urls | |
if is_vcs_url(link): | |
unpack_vcs_link(link, location) | |
# file urls | |
elif is_file_url(link): | |
unpack_file_url(link, location, download_dir, hashes=hashes) | |
# http urls | |
else: | |
if session is None: | |
session = PipSession() | |
unpack_http_url( | |
link, | |
location, | |
download_dir, | |
session, | |
hashes=hashes, | |
progress_bar=progress_bar | |
) | |
if only_download: | |
write_delete_marker_file(location) | |
def _download_http_url(link, session, temp_dir, hashes, progress_bar): | |
"""Download link url into temp_dir using provided session""" | |
target_url = link.url.split('#', 1)[0] | |
try: | |
resp = session.get( | |
target_url, | |
# We use Accept-Encoding: identity here because requests | |
# defaults to accepting compressed responses. This breaks in | |
# a variety of ways depending on how the server is configured. | |
# - Some servers will notice that the file isn't a compressible | |
# file and will leave the file alone and with an empty | |
# Content-Encoding | |
# - Some servers will notice that the file is already | |
# compressed and will leave the file alone and will add a | |
# Content-Encoding: gzip header | |
# - Some servers won't notice anything at all and will take | |
# a file that's already been compressed and compress it again | |
# and set the Content-Encoding: gzip header | |
# By setting this to request only the identity encoding We're | |
# hoping to eliminate the third case. Hopefully there does not | |
# exist a server which when given a file will notice it is | |
# already compressed and that you're not asking for a | |
# compressed file and will then decompress it before sending | |
# because if that's the case I don't think it'll ever be | |
# possible to make this work. | |
headers={"Accept-Encoding": "identity"}, | |
stream=True, | |
) | |
resp.raise_for_status() | |
except requests.HTTPError as exc: | |
logger.critical( | |
"HTTP error %s while getting %s", exc.response.status_code, link, | |
) | |
raise | |
content_type = resp.headers.get('content-type', '') | |
filename = link.filename # fallback | |
# Have a look at the Content-Disposition header for a better guess | |
content_disposition = resp.headers.get('content-disposition') | |
if content_disposition: | |
type, params = cgi.parse_header(content_disposition) | |
# We use ``or`` here because we don't want to use an "empty" value | |
# from the filename param. | |
filename = params.get('filename') or filename | |
ext = splitext(filename)[1] | |
if not ext: | |
ext = mimetypes.guess_extension(content_type) | |
if ext: | |
filename += ext | |
if not ext and link.url != resp.url: | |
ext = os.path.splitext(resp.url)[1] | |
if ext: | |
filename += ext | |
file_path = os.path.join(temp_dir, filename) | |
with open(file_path, 'wb') as content_file: | |
_download_url(resp, link, content_file, hashes, progress_bar) | |
return file_path, content_type | |
def _check_download_dir(link, download_dir, hashes): | |
""" Check download_dir for previously downloaded file with correct hash | |
If a correct file is found return its path else None | |
""" | |
download_path = os.path.join(download_dir, link.filename) | |
if os.path.exists(download_path): | |
# If already downloaded, does its hash match? | |
logger.info('File was already downloaded %s', download_path) | |
if hashes: | |
try: | |
hashes.check_against_path(download_path) | |
except HashMismatch: | |
logger.warning( | |
'Previously-downloaded file %s has bad hash. ' | |
'Re-downloading.', | |
download_path | |
) | |
os.unlink(download_path) | |
return None | |
return download_path | |
return None |
"""Exceptions used throughout package""" | |
from __future__ import absolute_import | |
from itertools import chain, groupby, repeat | |
from pip._vendor.six import iteritems | |
class PipError(Exception): | |
"""Base pip exception""" | |
class ConfigurationError(PipError): | |
"""General exception in configuration""" | |
class InstallationError(PipError): | |
"""General exception during installation""" | |
class UninstallationError(PipError): | |
"""General exception during uninstallation""" | |
class DistributionNotFound(InstallationError): | |
"""Raised when a distribution cannot be found to satisfy a requirement""" | |
class RequirementsFileParseError(InstallationError): | |
"""Raised when a general error occurs parsing a requirements file line.""" | |
class BestVersionAlreadyInstalled(PipError): | |
"""Raised when the most up-to-date version of a package is already | |
installed.""" | |
class BadCommand(PipError): | |
"""Raised when virtualenv or a command is not found""" | |
class CommandError(PipError): | |
"""Raised when there is an error in command-line arguments""" | |
class PreviousBuildDirError(PipError): | |
"""Raised when there's a previous conflicting build directory""" | |
class InvalidWheelFilename(InstallationError): | |
"""Invalid wheel filename.""" | |
class UnsupportedWheel(InstallationError): | |
"""Unsupported wheel.""" | |
class HashErrors(InstallationError): | |
"""Multiple HashError instances rolled into one for reporting""" | |
def __init__(self): | |
self.errors = [] | |
def append(self, error): | |
self.errors.append(error) | |
def __str__(self): | |
lines = [] | |
self.errors.sort(key=lambda e: e.order) | |
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): | |
lines.append(cls.head) | |
lines.extend(e.body() for e in errors_of_cls) | |
if lines: | |
return '\n'.join(lines) | |
def __nonzero__(self): | |
return bool(self.errors) | |
def __bool__(self): | |
return self.__nonzero__() | |
class HashError(InstallationError): | |
""" | |
A failure to verify a package against known-good hashes | |
:cvar order: An int sorting hash exception classes by difficulty of | |
recovery (lower being harder), so the user doesn't bother fretting | |
about unpinned packages when he has deeper issues, like VCS | |
dependencies, to deal with. Also keeps error reports in a | |
deterministic order. | |
:cvar head: A section heading for display above potentially many | |
exceptions of this kind | |
:ivar req: The InstallRequirement that triggered this error. This is | |
pasted on after the exception is instantiated, because it's not | |
typically available earlier. | |
""" | |
req = None | |
head = '' | |
def body(self): | |
"""Return a summary of me for display under the heading. | |
This default implementation simply prints a description of the | |
triggering requirement. | |
:param req: The InstallRequirement that provoked this error, with | |
populate_link() having already been called | |
""" | |
return ' %s' % self._requirement_name() | |
def __str__(self): | |
return '%s\n%s' % (self.head, self.body()) | |
def _requirement_name(self): | |
"""Return a description of the requirement that triggered me. | |
This default implementation returns long description of the req, with | |
line numbers | |
""" | |
return str(self.req) if self.req else 'unknown package' | |
class VcsHashUnsupported(HashError): | |
"""A hash was provided for a version-control-system-based requirement, but | |
we don't have a method for hashing those.""" | |
order = 0 | |
head = ("Can't verify hashes for these requirements because we don't " | |
"have a way to hash version control repositories:") | |
class DirectoryUrlHashUnsupported(HashError): | |
"""A hash was provided for a version-control-system-based requirement, but | |
we don't have a method for hashing those.""" | |
order = 1 | |
head = ("Can't verify hashes for these file:// requirements because they " | |
"point to directories:") | |
class HashMissing(HashError): | |
"""A hash was needed for a requirement but is absent.""" | |
order = 2 | |
head = ('Hashes are required in --require-hashes mode, but they are ' | |
'missing from some requirements. Here is a list of those ' | |
'requirements along with the hashes their downloaded archives ' | |
'actually had. Add lines like these to your requirements files to ' | |
'prevent tampering. (If you did not enable --require-hashes ' | |
'manually, note that it turns on automatically when any package ' | |
'has a hash.)') | |
def __init__(self, gotten_hash): | |
""" | |
:param gotten_hash: The hash of the (possibly malicious) archive we | |
just downloaded | |
""" | |
self.gotten_hash = gotten_hash | |
def body(self): | |
# Dodge circular import. | |
from pip._internal.utils.hashes import FAVORITE_HASH | |
package = None | |
if self.req: | |
# In the case of URL-based requirements, display the original URL | |
# seen in the requirements file rather than the package name, | |
# so the output can be directly copied into the requirements file. | |
package = (self.req.original_link if self.req.original_link | |
# In case someone feeds something downright stupid | |
# to InstallRequirement's constructor. | |
else getattr(self.req, 'req', None)) | |
return ' %s --hash=%s:%s' % (package or 'unknown package', | |
FAVORITE_HASH, | |
self.gotten_hash) | |
class HashUnpinned(HashError): | |
"""A requirement had a hash specified but was not pinned to a specific | |
version.""" | |
order = 3 | |
head = ('In --require-hashes mode, all requirements must have their ' | |
'versions pinned with ==. These do not:') | |
class HashMismatch(HashError): | |
""" | |
Distribution file hash values don't match. | |
:ivar package_name: The name of the package that triggered the hash | |
mismatch. Feel free to write to this after the exception is raise to | |
improve its error message. | |
""" | |
order = 4 | |
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' | |
'FILE. If you have updated the package versions, please update ' | |
'the hashes. Otherwise, examine the package contents carefully; ' | |
'someone may have tampered with them.') | |
def __init__(self, allowed, gots): | |
""" | |
:param allowed: A dict of algorithm names pointing to lists of allowed | |
hex digests | |
:param gots: A dict of algorithm names pointing to hashes we | |
actually got from the files under suspicion | |
""" | |
self.allowed = allowed | |
self.gots = gots | |
def body(self): | |
return ' %s:\n%s' % (self._requirement_name(), | |
self._hash_comparison()) | |
def _hash_comparison(self): | |
""" | |
Return a comparison of actual and expected hash values. | |
Example:: | |
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde | |
or 123451234512345123451234512345123451234512345 | |
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef | |
""" | |
def hash_then_or(hash_name): | |
# For now, all the decent hashes have 6-char names, so we can get | |
# away with hard-coding space literals. | |
return chain([hash_name], repeat(' or')) | |
lines = [] | |
for hash_name, expecteds in iteritems(self.allowed): | |
prefix = hash_then_or(hash_name) | |
lines.extend((' Expected %s %s' % (next(prefix), e)) | |
for e in expecteds) | |
lines.append(' Got %s\n' % | |
self.gots[hash_name].hexdigest()) | |
prefix = ' or' | |
return '\n'.join(lines) | |
class UnsupportedPythonVersion(InstallationError): | |
"""Unsupported python version according to Requires-Python package | |
metadata.""" |
"""Routines related to PyPI, indexes""" | |
from __future__ import absolute_import | |
import cgi | |
import itertools | |
import logging | |
import mimetypes | |
import os | |
import posixpath | |
import re | |
import sys | |
import warnings | |
from collections import namedtuple | |
from pip._vendor import html5lib, requests, six | |
from pip._vendor.distlib.compat import unescape | |
from pip._vendor.packaging import specifiers | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._vendor.packaging.version import parse as parse_version | |
from pip._vendor.requests.exceptions import SSLError | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._vendor.six.moves.urllib import request as urllib_request | |
from pip._internal.compat import ipaddress | |
from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path | |
from pip._internal.exceptions import ( | |
BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename, | |
UnsupportedWheel, | |
) | |
from pip._internal.models import PyPI | |
from pip._internal.pep425tags import get_supported | |
from pip._internal.utils.deprecation import RemovedInPip11Warning | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path, | |
splitext, | |
) | |
from pip._internal.utils.packaging import check_requires_python | |
from pip._internal.wheel import Wheel, wheel_ext | |
__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] | |
SECURE_ORIGINS = [ | |
# protocol, hostname, port | |
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) | |
("https", "*", "*"), | |
("*", "localhost", "*"), | |
("*", "127.0.0.0/8", "*"), | |
("*", "::1/128", "*"), | |
("file", "*", None), | |
# ssh is always secure. | |
("ssh", "*", "*"), | |
] | |
logger = logging.getLogger(__name__) | |
class InstallationCandidate(object): | |
def __init__(self, project, version, location): | |
self.project = project | |
self.version = parse_version(version) | |
self.location = location | |
self._key = (self.project, self.version, self.location) | |
def __repr__(self): | |
return "<InstallationCandidate({!r}, {!r}, {!r})>".format( | |
self.project, self.version, self.location, | |
) | |
def __hash__(self): | |
return hash(self._key) | |
def __lt__(self, other): | |
return self._compare(other, lambda s, o: s < o) | |
def __le__(self, other): | |
return self._compare(other, lambda s, o: s <= o) | |
def __eq__(self, other): | |
return self._compare(other, lambda s, o: s == o) | |
def __ge__(self, other): | |
return self._compare(other, lambda s, o: s >= o) | |
def __gt__(self, other): | |
return self._compare(other, lambda s, o: s > o) | |
def __ne__(self, other): | |
return self._compare(other, lambda s, o: s != o) | |
def _compare(self, other, method): | |
if not isinstance(other, InstallationCandidate): | |
return NotImplemented | |
return method(self._key, other._key) | |
class PackageFinder(object): | |
"""This finds packages. | |
This is meant to match easy_install's technique for looking for | |
packages, by reading pages and looking for appropriate links. | |
""" | |
def __init__(self, find_links, index_urls, allow_all_prereleases=False, | |
trusted_hosts=None, process_dependency_links=False, | |
session=None, format_control=None, platform=None, | |
versions=None, abi=None, implementation=None): | |
"""Create a PackageFinder. | |
:param format_control: A FormatControl object or None. Used to control | |
the selection of source packages / binary packages when consulting | |
the index and links. | |
:param platform: A string or None. If None, searches for packages | |
that are supported by the current system. Otherwise, will find | |
packages that can be built on the platform passed in. These | |
packages will only be downloaded for distribution: they will | |
not be built locally. | |
:param versions: A list of strings or None. This is passed directly | |
to pep425tags.py in the get_supported() method. | |
:param abi: A string or None. This is passed directly | |
to pep425tags.py in the get_supported() method. | |
:param implementation: A string or None. This is passed directly | |
to pep425tags.py in the get_supported() method. | |
""" | |
if session is None: | |
raise TypeError( | |
"PackageFinder() missing 1 required keyword argument: " | |
"'session'" | |
) | |
# Build find_links. If an argument starts with ~, it may be | |
# a local file relative to a home directory. So try normalizing | |
# it and if it exists, use the normalized version. | |
# This is deliberately conservative - it might be fine just to | |
# blindly normalize anything starting with a ~... | |
self.find_links = [] | |
for link in find_links: | |
if link.startswith('~'): | |
new_link = normalize_path(link) | |
if os.path.exists(new_link): | |
link = new_link | |
self.find_links.append(link) | |
self.index_urls = index_urls | |
self.dependency_links = [] | |
# These are boring links that have already been logged somehow: | |
self.logged_links = set() | |
self.format_control = format_control or FormatControl(set(), set()) | |
# Domains that we won't emit warnings for when not using HTTPS | |
self.secure_origins = [ | |
("*", host, "*") | |
for host in (trusted_hosts if trusted_hosts else []) | |
] | |
# Do we want to allow _all_ pre-releases? | |
self.allow_all_prereleases = allow_all_prereleases | |
# Do we process dependency links? | |
self.process_dependency_links = process_dependency_links | |
# The Session we'll use to make requests | |
self.session = session | |
# The valid tags to check potential found wheel candidates against | |
self.valid_tags = get_supported( | |
versions=versions, | |
platform=platform, | |
abi=abi, | |
impl=implementation, | |
) | |
# If we don't have TLS enabled, then WARN if anyplace we're looking | |
# relies on TLS. | |
if not HAS_TLS: | |
for link in itertools.chain(self.index_urls, self.find_links): | |
parsed = urllib_parse.urlparse(link) | |
if parsed.scheme == "https": | |
logger.warning( | |
"pip is configured with locations that require " | |
"TLS/SSL, however the ssl module in Python is not " | |
"available." | |
) | |
break | |
def get_formatted_locations(self): | |
lines = [] | |
if self.index_urls and self.index_urls != [PyPI.simple_url]: | |
lines.append( | |
"Looking in indexes: {}".format(", ".join(self.index_urls)) | |
) | |
if self.find_links: | |
lines.append( | |
"Looking in links: {}".format(", ".join(self.find_links)) | |
) | |
return "\n".join(lines) | |
def add_dependency_links(self, links): | |
# # FIXME: this shouldn't be global list this, it should only | |
# # apply to requirements of the package that specifies the | |
# # dependency_links value | |
# # FIXME: also, we should track comes_from (i.e., use Link) | |
if self.process_dependency_links: | |
warnings.warn( | |
"Dependency Links processing has been deprecated and will be " | |
"removed in a future release.", | |
RemovedInPip11Warning, | |
) | |
self.dependency_links.extend(links) | |
@staticmethod | |
def _sort_locations(locations, expand_dir=False): | |
""" | |
Sort locations into "files" (archives) and "urls", and return | |
a pair of lists (files,urls) | |
""" | |
files = [] | |
urls = [] | |
# puts the url for the given file path into the appropriate list | |
def sort_path(path): | |
url = path_to_url(path) | |
if mimetypes.guess_type(url, strict=False)[0] == 'text/html': | |
urls.append(url) | |
else: | |
files.append(url) | |
for url in locations: | |
is_local_path = os.path.exists(url) | |
is_file_url = url.startswith('file:') | |
if is_local_path or is_file_url: | |
if is_local_path: | |
path = url | |
else: | |
path = url_to_path(url) | |
if os.path.isdir(path): | |
if expand_dir: | |
path = os.path.realpath(path) | |
for item in os.listdir(path): | |
sort_path(os.path.join(path, item)) | |
elif is_file_url: | |
urls.append(url) | |
elif os.path.isfile(path): | |
sort_path(path) | |
else: | |
logger.warning( | |
"Url '%s' is ignored: it is neither a file " | |
"nor a directory.", url, | |
) | |
elif is_url(url): | |
# Only add url with clear scheme | |
urls.append(url) | |
else: | |
logger.warning( | |
"Url '%s' is ignored. It is either a non-existing " | |
"path or lacks a specific scheme.", url, | |
) | |
return files, urls | |
def _candidate_sort_key(self, candidate): | |
""" | |
Function used to generate link sort key for link tuples. | |
The greater the return value, the more preferred it is. | |
If not finding wheels, then sorted by version only. | |
If finding wheels, then the sort order is by version, then: | |
1. existing installs | |
2. wheels ordered via Wheel.support_index_min(self.valid_tags) | |
3. source archives | |
Note: it was considered to embed this logic into the Link | |
comparison operators, but then different sdist links | |
with the same version, would have to be considered equal | |
""" | |
support_num = len(self.valid_tags) | |
build_tag = tuple() | |
if candidate.location.is_wheel: | |
# can raise InvalidWheelFilename | |
wheel = Wheel(candidate.location.filename) | |
if not wheel.supported(self.valid_tags): | |
raise UnsupportedWheel( | |
"%s is not a supported wheel for this platform. It " | |
"can't be sorted." % wheel.filename | |
) | |
pri = -(wheel.support_index_min(self.valid_tags)) | |
if wheel.build_tag is not None: | |
match = re.match(r'^(\d+)(.*)$', wheel.build_tag) | |
build_tag_groups = match.groups() | |
build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) | |
else: # sdist | |
pri = -(support_num) | |
return (candidate.version, build_tag, pri) | |
def _validate_secure_origin(self, logger, location): | |
# Determine if this url used a secure transport mechanism | |
parsed = urllib_parse.urlparse(str(location)) | |
origin = (parsed.scheme, parsed.hostname, parsed.port) | |
# The protocol to use to see if the protocol matches. | |
# Don't count the repository type as part of the protocol: in | |
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against | |
# the last scheme.) | |
protocol = origin[0].rsplit('+', 1)[-1] | |
# Determine if our origin is a secure origin by looking through our | |
# hardcoded list of secure origins, as well as any additional ones | |
# configured on this PackageFinder instance. | |
for secure_origin in (SECURE_ORIGINS + self.secure_origins): | |
if protocol != secure_origin[0] and secure_origin[0] != "*": | |
continue | |
try: | |
# We need to do this decode dance to ensure that we have a | |
# unicode object, even on Python 2.x. | |
addr = ipaddress.ip_address( | |
origin[1] | |
if ( | |
isinstance(origin[1], six.text_type) or | |
origin[1] is None | |
) | |
else origin[1].decode("utf8") | |
) | |
network = ipaddress.ip_network( | |
secure_origin[1] | |
if isinstance(secure_origin[1], six.text_type) | |
else secure_origin[1].decode("utf8") | |
) | |
except ValueError: | |
# We don't have both a valid address or a valid network, so | |
# we'll check this origin against hostnames. | |
if (origin[1] and | |
origin[1].lower() != secure_origin[1].lower() and | |
secure_origin[1] != "*"): | |
continue | |
else: | |
# We have a valid address and network, so see if the address | |
# is contained within the network. | |
if addr not in network: | |
continue | |
# Check to see if the port patches | |
if (origin[2] != secure_origin[2] and | |
secure_origin[2] != "*" and | |
secure_origin[2] is not None): | |
continue | |
# If we've gotten here, then this origin matches the current | |
# secure origin and we should return True | |
return True | |
# If we've gotten to this point, then the origin isn't secure and we | |
# will not accept it as a valid location to search. We will however | |
# log a warning that we are ignoring it. | |
logger.warning( | |
"The repository located at %s is not a trusted or secure host and " | |
"is being ignored. If this repository is available via HTTPS we " | |
"recommend you use HTTPS instead, otherwise you may silence " | |
"this warning and allow it anyway with '--trusted-host %s'.", | |
parsed.hostname, | |
parsed.hostname, | |
) | |
return False | |
def _get_index_urls_locations(self, project_name): | |
"""Returns the locations found via self.index_urls | |
Checks the url_name on the main (first in the list) index and | |
use this url_name to produce all locations | |
""" | |
def mkurl_pypi_url(url): | |
loc = posixpath.join( | |
url, | |
urllib_parse.quote(canonicalize_name(project_name))) | |
# For maximum compatibility with easy_install, ensure the path | |
# ends in a trailing slash. Although this isn't in the spec | |
# (and PyPI can handle it without the slash) some other index | |
# implementations might break if they relied on easy_install's | |
# behavior. | |
if not loc.endswith('/'): | |
loc = loc + '/' | |
return loc | |
return [mkurl_pypi_url(url) for url in self.index_urls] | |
def find_all_candidates(self, project_name): | |
"""Find all available InstallationCandidate for project_name | |
This checks index_urls, find_links and dependency_links. | |
All versions found are returned as an InstallationCandidate list. | |
See _link_package_versions for details on which files are accepted | |
""" | |
index_locations = self._get_index_urls_locations(project_name) | |
index_file_loc, index_url_loc = self._sort_locations(index_locations) | |
fl_file_loc, fl_url_loc = self._sort_locations( | |
self.find_links, expand_dir=True, | |
) | |
dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) | |
file_locations = (Link(url) for url in itertools.chain( | |
index_file_loc, fl_file_loc, dep_file_loc, | |
)) | |
# We trust every url that the user has given us whether it was given | |
# via --index-url or --find-links | |
# We explicitly do not trust links that came from dependency_links | |
# We want to filter out any thing which does not have a secure origin. | |
url_locations = [ | |
link for link in itertools.chain( | |
(Link(url) for url in index_url_loc), | |
(Link(url) for url in fl_url_loc), | |
(Link(url) for url in dep_url_loc), | |
) | |
if self._validate_secure_origin(logger, link) | |
] | |
logger.debug('%d location(s) to search for versions of %s:', | |
len(url_locations), project_name) | |
for location in url_locations: | |
logger.debug('* %s', location) | |
canonical_name = canonicalize_name(project_name) | |
formats = fmt_ctl_formats(self.format_control, canonical_name) | |
search = Search(project_name, canonical_name, formats) | |
find_links_versions = self._package_versions( | |
# We trust every directly linked archive in find_links | |
(Link(url, '-f') for url in self.find_links), | |
search | |
) | |
page_versions = [] | |
for page in self._get_pages(url_locations, project_name): | |
logger.debug('Analyzing links from page %s', page.url) | |
with indent_log(): | |
page_versions.extend( | |
self._package_versions(page.links, search) | |
) | |
dependency_versions = self._package_versions( | |
(Link(url) for url in self.dependency_links), search | |
) | |
if dependency_versions: | |
logger.debug( | |
'dependency_links found: %s', | |
', '.join([ | |
version.location.url for version in dependency_versions | |
]) | |
) | |
file_versions = self._package_versions(file_locations, search) | |
if file_versions: | |
file_versions.sort(reverse=True) | |
logger.debug( | |
'Local files found: %s', | |
', '.join([ | |
url_to_path(candidate.location.url) | |
for candidate in file_versions | |
]) | |
) | |
# This is an intentional priority ordering | |
return ( | |
file_versions + find_links_versions + page_versions + | |
dependency_versions | |
) | |
def find_requirement(self, req, upgrade): | |
"""Try to find a Link matching req | |
Expects req, an InstallRequirement and upgrade, a boolean | |
Returns a Link if found, | |
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise | |
""" | |
all_candidates = self.find_all_candidates(req.name) | |
# Filter out anything which doesn't match our specifier | |
compatible_versions = set( | |
req.specifier.filter( | |
# We turn the version object into a str here because otherwise | |
# when we're debundled but setuptools isn't, Python will see | |
# packaging.version.Version and | |
# pkg_resources._vendor.packaging.version.Version as different | |
# types. This way we'll use a str as a common data interchange | |
# format. If we stop using the pkg_resources provided specifier | |
# and start using our own, we can drop the cast to str(). | |
[str(c.version) for c in all_candidates], | |
prereleases=( | |
self.allow_all_prereleases | |
if self.allow_all_prereleases else None | |
), | |
) | |
) | |
applicable_candidates = [ | |
# Again, converting to str to deal with debundling. | |
c for c in all_candidates if str(c.version) in compatible_versions | |
] | |
if applicable_candidates: | |
best_candidate = max(applicable_candidates, | |
key=self._candidate_sort_key) | |
else: | |
best_candidate = None | |
if req.satisfied_by is not None: | |
installed_version = parse_version(req.satisfied_by.version) | |
else: | |
installed_version = None | |
if installed_version is None and best_candidate is None: | |
logger.critical( | |
'Could not find a version that satisfies the requirement %s ' | |
'(from versions: %s)', | |
req, | |
', '.join( | |
sorted( | |
{str(c.version) for c in all_candidates}, | |
key=parse_version, | |
) | |
) | |
) | |
raise DistributionNotFound( | |
'No matching distribution found for %s' % req | |
) | |
best_installed = False | |
if installed_version and ( | |
best_candidate is None or | |
best_candidate.version <= installed_version): | |
best_installed = True | |
if not upgrade and installed_version is not None: | |
if best_installed: | |
logger.debug( | |
'Existing installed version (%s) is most up-to-date and ' | |
'satisfies requirement', | |
installed_version, | |
) | |
else: | |
logger.debug( | |
'Existing installed version (%s) satisfies requirement ' | |
'(most up-to-date version is %s)', | |
installed_version, | |
best_candidate.version, | |
) | |
return None | |
if best_installed: | |
# We have an existing version, and its the best version | |
logger.debug( | |
'Installed version (%s) is most up-to-date (past versions: ' | |
'%s)', | |
installed_version, | |
', '.join(sorted(compatible_versions, key=parse_version)) or | |
"none", | |
) | |
raise BestVersionAlreadyInstalled | |
logger.debug( | |
'Using version %s (newest of versions: %s)', | |
best_candidate.version, | |
', '.join(sorted(compatible_versions, key=parse_version)) | |
) | |
return best_candidate.location | |
def _get_pages(self, locations, project_name): | |
""" | |
Yields (page, page_url) from the given locations, skipping | |
locations that have errors. | |
""" | |
seen = set() | |
for location in locations: | |
if location in seen: | |
continue | |
seen.add(location) | |
page = self._get_page(location) | |
if page is None: | |
continue | |
yield page | |
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') | |
def _sort_links(self, links): | |
""" | |
Returns elements of links in order, non-egg links first, egg links | |
second, while eliminating duplicates | |
""" | |
eggs, no_eggs = [], [] | |
seen = set() | |
for link in links: | |
if link not in seen: | |
seen.add(link) | |
if link.egg_fragment: | |
eggs.append(link) | |
else: | |
no_eggs.append(link) | |
return no_eggs + eggs | |
def _package_versions(self, links, search): | |
result = [] | |
for link in self._sort_links(links): | |
v = self._link_package_versions(link, search) | |
if v is not None: | |
result.append(v) | |
return result | |
def _log_skipped_link(self, link, reason): | |
if link not in self.logged_links: | |
logger.debug('Skipping link %s; %s', link, reason) | |
self.logged_links.add(link) | |
def _link_package_versions(self, link, search): | |
"""Return an InstallationCandidate or None""" | |
version = None | |
if link.egg_fragment: | |
egg_info = link.egg_fragment | |
ext = link.ext | |
else: | |
egg_info, ext = link.splitext() | |
if not ext: | |
self._log_skipped_link(link, 'not a file') | |
return | |
if ext not in SUPPORTED_EXTENSIONS: | |
self._log_skipped_link( | |
link, 'unsupported archive format: %s' % ext, | |
) | |
return | |
if "binary" not in search.formats and ext == wheel_ext: | |
self._log_skipped_link( | |
link, 'No binaries permitted for %s' % search.supplied, | |
) | |
return | |
if "macosx10" in link.path and ext == '.zip': | |
self._log_skipped_link(link, 'macosx10 one') | |
return | |
if ext == wheel_ext: | |
try: | |
wheel = Wheel(link.filename) | |
except InvalidWheelFilename: | |
self._log_skipped_link(link, 'invalid wheel filename') | |
return | |
if canonicalize_name(wheel.name) != search.canonical: | |
self._log_skipped_link( | |
link, 'wrong project name (not %s)' % search.supplied) | |
return | |
if not wheel.supported(self.valid_tags): | |
self._log_skipped_link( | |
link, 'it is not compatible with this Python') | |
return | |
version = wheel.version | |
# This should be up by the search.ok_binary check, but see issue 2700. | |
if "source" not in search.formats and ext != wheel_ext: | |
self._log_skipped_link( | |
link, 'No sources permitted for %s' % search.supplied, | |
) | |
return | |
if not version: | |
version = egg_info_matches(egg_info, search.supplied, link) | |
if version is None: | |
self._log_skipped_link( | |
link, 'wrong project name (not %s)' % search.supplied) | |
return | |
match = self._py_version_re.search(version) | |
if match: | |
version = version[:match.start()] | |
py_version = match.group(1) | |
if py_version != sys.version[:3]: | |
self._log_skipped_link( | |
link, 'Python version is incorrect') | |
return | |
try: | |
support_this_python = check_requires_python(link.requires_python) | |
except specifiers.InvalidSpecifier: | |
logger.debug("Package %s has an invalid Requires-Python entry: %s", | |
link.filename, link.requires_python) | |
support_this_python = True | |
if not support_this_python: | |
logger.debug("The package %s is incompatible with the python" | |
"version in use. Acceptable python versions are:%s", | |
link, link.requires_python) | |
return | |
logger.debug('Found link %s, version: %s', link, version) | |
return InstallationCandidate(search.supplied, version, link) | |
def _get_page(self, link): | |
return HTMLPage.get_page(link, session=self.session) | |
def egg_info_matches( | |
egg_info, search_name, link, | |
_egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): | |
"""Pull the version part out of a string. | |
:param egg_info: The string to parse. E.g. foo-2.1 | |
:param search_name: The name of the package this belongs to. None to | |
infer the name. Note that this cannot unambiguously parse strings | |
like foo-2-2 which might be foo, 2-2 or foo-2, 2. | |
:param link: The link the string came from, for logging on failure. | |
""" | |
match = _egg_info_re.search(egg_info) | |
if not match: | |
logger.debug('Could not parse version from link: %s', link) | |
return None | |
if search_name is None: | |
full_match = match.group(0) | |
return full_match[full_match.index('-'):] | |
name = match.group(0).lower() | |
# To match the "safe" name that pkg_resources creates: | |
name = name.replace('_', '-') | |
# project name and version must be separated by a dash | |
look_for = search_name.lower() + "-" | |
if name.startswith(look_for): | |
return match.group(0)[len(look_for):] | |
else: | |
return None | |
class HTMLPage(object): | |
"""Represents one page, along with its URL""" | |
def __init__(self, content, url, headers=None): | |
# Determine if we have any encoding information in our headers | |
encoding = None | |
if headers and "Content-Type" in headers: | |
content_type, params = cgi.parse_header(headers["Content-Type"]) | |
if "charset" in params: | |
encoding = params['charset'] | |
self.content = content | |
self.parsed = html5lib.parse( | |
self.content, | |
transport_encoding=encoding, | |
namespaceHTMLElements=False, | |
) | |
self.url = url | |
self.headers = headers | |
def __str__(self): | |
return self.url | |
@classmethod | |
def get_page(cls, link, skip_archives=True, session=None): | |
if session is None: | |
raise TypeError( | |
"get_page() missing 1 required keyword argument: 'session'" | |
) | |
url = link.url | |
url = url.split('#', 1)[0] | |
# Check for VCS schemes that do not support lookup as web pages. | |
from pip._internal.vcs import VcsSupport | |
for scheme in VcsSupport.schemes: | |
if url.lower().startswith(scheme) and url[len(scheme)] in '+:': | |
logger.debug('Cannot look at %s URL %s', scheme, link) | |
return None | |
try: | |
if skip_archives: | |
filename = link.filename | |
for bad_ext in ARCHIVE_EXTENSIONS: | |
if filename.endswith(bad_ext): | |
content_type = cls._get_content_type( | |
url, session=session, | |
) | |
if content_type.lower().startswith('text/html'): | |
break | |
else: | |
logger.debug( | |
'Skipping page %s because of Content-Type: %s', | |
link, | |
content_type, | |
) | |
return | |
logger.debug('Getting page %s', url) | |
# Tack index.html onto file:// URLs that point to directories | |
(scheme, netloc, path, params, query, fragment) = \ | |
urllib_parse.urlparse(url) | |
if (scheme == 'file' and | |
os.path.isdir(urllib_request.url2pathname(path))): | |
# add trailing slash if not present so urljoin doesn't trim | |
# final segment | |
if not url.endswith('/'): | |
url += '/' | |
url = urllib_parse.urljoin(url, 'index.html') | |
logger.debug(' file: URL is directory, getting %s', url) | |
resp = session.get( | |
url, | |
headers={ | |
"Accept": "text/html", | |
"Cache-Control": "max-age=600", | |
}, | |
) | |
resp.raise_for_status() | |
# The check for archives above only works if the url ends with | |
# something that looks like an archive. However that is not a | |
# requirement of an url. Unless we issue a HEAD request on every | |
# url we cannot know ahead of time for sure if something is HTML | |
# or not. However we can check after we've downloaded it. | |
content_type = resp.headers.get('Content-Type', 'unknown') | |
if not content_type.lower().startswith("text/html"): | |
logger.debug( | |
'Skipping page %s because of Content-Type: %s', | |
link, | |
content_type, | |
) | |
return | |
inst = cls(resp.content, resp.url, resp.headers) | |
except requests.HTTPError as exc: | |
cls._handle_fail(link, exc, url) | |
except SSLError as exc: | |
reason = "There was a problem confirming the ssl certificate: " | |
reason += str(exc) | |
cls._handle_fail(link, reason, url, meth=logger.info) | |
except requests.ConnectionError as exc: | |
cls._handle_fail(link, "connection error: %s" % exc, url) | |
except requests.Timeout: | |
cls._handle_fail(link, "timed out", url) | |
else: | |
return inst | |
@staticmethod | |
def _handle_fail(link, reason, url, meth=None): | |
if meth is None: | |
meth = logger.debug | |
meth("Could not fetch URL %s: %s - skipping", link, reason) | |
@staticmethod | |
def _get_content_type(url, session): | |
"""Get the Content-Type of the given url, using a HEAD request""" | |
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) | |
if scheme not in {'http', 'https'}: | |
# FIXME: some warning or something? | |
# assertion error? | |
return '' | |
resp = session.head(url, allow_redirects=True) | |
resp.raise_for_status() | |
return resp.headers.get("Content-Type", "") | |
@cached_property | |
def base_url(self): | |
bases = [ | |
x for x in self.parsed.findall(".//base") | |
if x.get("href") is not None | |
] | |
if bases and bases[0].get("href"): | |
return bases[0].get("href") | |
else: | |
return self.url | |
@property | |
def links(self): | |
"""Yields all links in the page""" | |
for anchor in self.parsed.findall(".//a"): | |
if anchor.get("href"): | |
href = anchor.get("href") | |
url = self.clean_link( | |
urllib_parse.urljoin(self.base_url, href) | |
) | |
pyrequire = anchor.get('data-requires-python') | |
pyrequire = unescape(pyrequire) if pyrequire else None | |
yield Link(url, self, requires_python=pyrequire) | |
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) | |
def clean_link(self, url): | |
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in | |
the link, it will be rewritten to %20 (while not over-quoting | |
% or other characters).""" | |
return self._clean_re.sub( | |
lambda match: '%%%2x' % ord(match.group(0)), url) | |
class Link(object): | |
def __init__(self, url, comes_from=None, requires_python=None): | |
""" | |
Object representing a parsed link from https://pypi.org/simple/* | |
url: | |
url of the resource pointed to (href of the link) | |
comes_from: | |
instance of HTMLPage where the link was found, or string. | |
requires_python: | |
String containing the `Requires-Python` metadata field, specified | |
in PEP 345. This may be specified by a data-requires-python | |
attribute in the HTML link tag, as described in PEP 503. | |
""" | |
# url can be a UNC windows share | |
if url.startswith('\\\\'): | |
url = path_to_url(url) | |
self.url = url | |
self.comes_from = comes_from | |
self.requires_python = requires_python if requires_python else None | |
def __str__(self): | |
if self.requires_python: | |
rp = ' (requires-python:%s)' % self.requires_python | |
else: | |
rp = '' | |
if self.comes_from: | |
return '%s (from %s)%s' % (self.url, self.comes_from, rp) | |
else: | |
return str(self.url) | |
def __repr__(self): | |
return '<Link %s>' % self | |
def __eq__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url == other.url | |
def __ne__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url != other.url | |
def __lt__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url < other.url | |
def __le__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url <= other.url | |
def __gt__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url > other.url | |
def __ge__(self, other): | |
if not isinstance(other, Link): | |
return NotImplemented | |
return self.url >= other.url | |
def __hash__(self): | |
return hash(self.url) | |
@property | |
def filename(self): | |
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url) | |
name = posixpath.basename(path.rstrip('/')) or netloc | |
name = urllib_parse.unquote(name) | |
assert name, ('URL %r produced no filename' % self.url) | |
return name | |
@property | |
def scheme(self): | |
return urllib_parse.urlsplit(self.url)[0] | |
@property | |
def netloc(self): | |
return urllib_parse.urlsplit(self.url)[1] | |
@property | |
def path(self): | |
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) | |
def splitext(self): | |
return splitext(posixpath.basename(self.path.rstrip('/'))) | |
@property | |
def ext(self): | |
return self.splitext()[1] | |
@property | |
def url_without_fragment(self): | |
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) | |
return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) | |
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') | |
@property | |
def egg_fragment(self): | |
match = self._egg_fragment_re.search(self.url) | |
if not match: | |
return None | |
return match.group(1) | |
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') | |
@property | |
def subdirectory_fragment(self): | |
match = self._subdirectory_fragment_re.search(self.url) | |
if not match: | |
return None | |
return match.group(1) | |
_hash_re = re.compile( | |
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' | |
) | |
@property | |
def hash(self): | |
match = self._hash_re.search(self.url) | |
if match: | |
return match.group(2) | |
return None | |
@property | |
def hash_name(self): | |
match = self._hash_re.search(self.url) | |
if match: | |
return match.group(1) | |
return None | |
@property | |
def show_url(self): | |
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) | |
@property | |
def is_wheel(self): | |
return self.ext == wheel_ext | |
@property | |
def is_artifact(self): | |
""" | |
Determines if this points to an actual artifact (e.g. a tarball) or if | |
it points to an "abstract" thing like a path or a VCS location. | |
""" | |
from pip._internal.vcs import vcs | |
if self.scheme in vcs.all_schemes: | |
return False | |
return True | |
FormatControl = namedtuple('FormatControl', 'no_binary only_binary') | |
"""This object has two fields, no_binary and only_binary. | |
If a field is falsy, it isn't set. If it is {':all:'}, it should match all | |
packages except those listed in the other field. Only one field can be set | |
to {':all:'} at a time. The rest of the time exact package name matches | |
are listed, with any given package only showing up in one field at a time. | |
""" | |
def fmt_ctl_handle_mutual_exclude(value, target, other): | |
new = value.split(',') | |
while ':all:' in new: | |
other.clear() | |
target.clear() | |
target.add(':all:') | |
del new[:new.index(':all:') + 1] | |
if ':none:' not in new: | |
# Without a none, we want to discard everything as :all: covers it | |
return | |
for name in new: | |
if name == ':none:': | |
target.clear() | |
continue | |
name = canonicalize_name(name) | |
other.discard(name) | |
target.add(name) | |
def fmt_ctl_formats(fmt_ctl, canonical_name): | |
result = {"binary", "source"} | |
if canonical_name in fmt_ctl.only_binary: | |
result.discard('source') | |
elif canonical_name in fmt_ctl.no_binary: | |
result.discard('binary') | |
elif ':all:' in fmt_ctl.only_binary: | |
result.discard('source') | |
elif ':all:' in fmt_ctl.no_binary: | |
result.discard('binary') | |
return frozenset(result) | |
def fmt_ctl_no_binary(fmt_ctl): | |
fmt_ctl_handle_mutual_exclude( | |
':all:', fmt_ctl.no_binary, fmt_ctl.only_binary, | |
) | |
Search = namedtuple('Search', 'supplied canonical formats') | |
"""Capture key aspects of a search. | |
:attribute supplied: The user supplied package. | |
:attribute canonical: The canonical package name. | |
:attribute formats: The formats allowed for this package. Should be a set | |
with 'binary' or 'source' or both in it. | |
""" |
"""Locations where we look for configs, install stuff, etc""" | |
from __future__ import absolute_import | |
import os | |
import os.path | |
import platform | |
import site | |
import sys | |
import sysconfig | |
from distutils import sysconfig as distutils_sysconfig | |
from distutils.command.install import SCHEME_KEYS, install # type: ignore | |
from pip._internal.compat import WINDOWS, expanduser | |
from pip._internal.utils import appdirs | |
# Application Directories | |
USER_CACHE_DIR = appdirs.user_cache_dir("pip") | |
DELETE_MARKER_MESSAGE = '''\ | |
This file is placed here by pip to indicate the source was put | |
here by pip. | |
Once this package is successfully installed this source code will be | |
deleted (unless you remove this file). | |
''' | |
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' | |
def write_delete_marker_file(directory): | |
""" | |
Write the pip delete marker file into this directory. | |
""" | |
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) | |
with open(filepath, 'w') as marker_fp: | |
marker_fp.write(DELETE_MARKER_MESSAGE) | |
def running_under_virtualenv(): | |
""" | |
Return True if we're running inside a virtualenv, False otherwise. | |
""" | |
if hasattr(sys, 'real_prefix'): | |
return True | |
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): | |
return True | |
return False | |
def virtualenv_no_global(): | |
""" | |
Return True if in a venv and no system site packages. | |
""" | |
# this mirrors the logic in virtualenv.py for locating the | |
# no-global-site-packages.txt file | |
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) | |
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') | |
if running_under_virtualenv() and os.path.isfile(no_global_file): | |
return True | |
if running_under_virtualenv(): | |
src_prefix = os.path.join(sys.prefix, 'src') | |
else: | |
# FIXME: keep src in cwd for now (it is not a temporary folder) | |
try: | |
src_prefix = os.path.join(os.getcwd(), 'src') | |
except OSError: | |
# In case the current working directory has been renamed or deleted | |
sys.exit( | |
"The folder you are executing pip from can no longer be found." | |
) | |
# under macOS + virtualenv sys.prefix is not properly resolved | |
# it is something like /path/to/python/bin/.. | |
# Note: using realpath due to tmp dirs on OSX being symlinks | |
src_prefix = os.path.abspath(src_prefix) | |
# FIXME doesn't account for venv linked to global site-packages | |
site_packages = sysconfig.get_path("purelib") | |
# This is because of a bug in PyPy's sysconfig module, see | |
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths | |
# for more information. | |
if platform.python_implementation().lower() == "pypy": | |
site_packages = distutils_sysconfig.get_python_lib() | |
try: | |
# Use getusersitepackages if this is present, as it ensures that the | |
# value is initialised properly. | |
user_site = site.getusersitepackages() | |
except AttributeError: | |
user_site = site.USER_SITE | |
user_dir = expanduser('~') | |
if WINDOWS: | |
bin_py = os.path.join(sys.prefix, 'Scripts') | |
bin_user = os.path.join(user_site, 'Scripts') | |
# buildout uses 'bin' on Windows too? | |
if not os.path.exists(bin_py): | |
bin_py = os.path.join(sys.prefix, 'bin') | |
bin_user = os.path.join(user_site, 'bin') | |
config_basename = 'pip.ini' | |
legacy_storage_dir = os.path.join(user_dir, 'pip') | |
legacy_config_file = os.path.join( | |
legacy_storage_dir, | |
config_basename, | |
) | |
else: | |
bin_py = os.path.join(sys.prefix, 'bin') | |
bin_user = os.path.join(user_site, 'bin') | |
config_basename = 'pip.conf' | |
legacy_storage_dir = os.path.join(user_dir, '.pip') | |
legacy_config_file = os.path.join( | |
legacy_storage_dir, | |
config_basename, | |
) | |
# Forcing to use /usr/local/bin for standard macOS framework installs | |
# Also log to ~/Library/Logs/ for use with the Console.app log viewer | |
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': | |
bin_py = '/usr/local/bin' | |
site_config_files = [ | |
os.path.join(path, config_basename) | |
for path in appdirs.site_config_dirs('pip') | |
] | |
venv_config_file = os.path.join(sys.prefix, config_basename) | |
new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename) | |
def distutils_scheme(dist_name, user=False, home=None, root=None, | |
isolated=False, prefix=None): | |
""" | |
Return a distutils install scheme | |
""" | |
from distutils.dist import Distribution | |
scheme = {} | |
if isolated: | |
extra_dist_args = {"script_args": ["--no-user-cfg"]} | |
else: | |
extra_dist_args = {} | |
dist_args = {'name': dist_name} | |
dist_args.update(extra_dist_args) | |
d = Distribution(dist_args) | |
d.parse_config_files() | |
i = d.get_command_obj('install', create=True) | |
# NOTE: setting user or home has the side-effect of creating the home dir | |
# or user base for installations during finalize_options() | |
# ideally, we'd prefer a scheme class that has no side-effects. | |
assert not (user and prefix), "user={} prefix={}".format(user, prefix) | |
i.user = user or i.user | |
if user: | |
i.prefix = "" | |
i.prefix = prefix or i.prefix | |
i.home = home or i.home | |
i.root = root or i.root | |
i.finalize_options() | |
for key in SCHEME_KEYS: | |
scheme[key] = getattr(i, 'install_' + key) | |
# install_lib specified in setup.cfg should install *everything* | |
# into there (i.e. it takes precedence over both purelib and | |
# platlib). Note, i.install_lib is *always* set after | |
# finalize_options(); we only want to override here if the user | |
# has explicitly requested it hence going back to the config | |
if 'install_lib' in d.get_option_dict('install'): | |
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) | |
if running_under_virtualenv(): | |
scheme['headers'] = os.path.join( | |
sys.prefix, | |
'include', | |
'site', | |
'python' + sys.version[:3], | |
dist_name, | |
) | |
if root is not None: | |
path_no_drive = os.path.splitdrive( | |
os.path.abspath(scheme["headers"]))[1] | |
scheme["headers"] = os.path.join( | |
root, | |
path_no_drive[1:], | |
) | |
return scheme |
from pip._internal.models.index import Index, PyPI | |
__all__ = ["Index", "PyPI"] |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
class Index(object): | |
def __init__(self, url): | |
self.url = url | |
self.netloc = urllib_parse.urlsplit(url).netloc | |
self.simple_url = self.url_to_path('simple') | |
self.pypi_url = self.url_to_path('pypi') | |
def url_to_path(self, path): | |
return urllib_parse.urljoin(self.url, path) | |
PyPI = Index('https://pypi.org/') |
"""Validation of dependencies of packages | |
""" | |
from collections import namedtuple | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._internal.operations.prepare import make_abstract_dist | |
from pip._internal.utils.misc import get_installed_distributions | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from pip._internal.req.req_install import InstallRequirement | |
from typing import Any, Dict, Iterator, Set, Tuple, List | |
# Shorthands | |
PackageSet = Dict[str, 'PackageDetails'] | |
Missing = Tuple[str, Any] | |
Conflicting = Tuple[str, str, Any] | |
MissingDict = Dict[str, List[Missing]] | |
ConflictingDict = Dict[str, List[Conflicting]] | |
CheckResult = Tuple[MissingDict, ConflictingDict] | |
PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) | |
def create_package_set_from_installed(**kwargs): | |
# type: (**Any) -> PackageSet | |
"""Converts a list of distributions into a PackageSet. | |
""" | |
# Default to using all packages installed on the system | |
if kwargs == {}: | |
kwargs = {"local_only": False, "skip": ()} | |
retval = {} | |
for dist in get_installed_distributions(**kwargs): | |
name = canonicalize_name(dist.project_name) | |
retval[name] = PackageDetails(dist.version, dist.requires()) | |
return retval | |
def check_package_set(package_set): | |
# type: (PackageSet) -> CheckResult | |
"""Check if a package set is consistent | |
""" | |
missing = dict() | |
conflicting = dict() | |
for package_name in package_set: | |
# Info about dependencies of package_name | |
missing_deps = set() # type: Set[Missing] | |
conflicting_deps = set() # type: Set[Conflicting] | |
for req in package_set[package_name].requires: | |
name = canonicalize_name(req.project_name) # type: str | |
# Check if it's missing | |
if name not in package_set: | |
missed = True | |
if req.marker is not None: | |
missed = req.marker.evaluate() | |
if missed: | |
missing_deps.add((name, req)) | |
continue | |
# Check if there's a conflict | |
version = package_set[name].version # type: str | |
if not req.specifier.contains(version, prereleases=True): | |
conflicting_deps.add((name, version, req)) | |
def str_key(x): | |
return str(x) | |
if missing_deps: | |
missing[package_name] = sorted(missing_deps, key=str_key) | |
if conflicting_deps: | |
conflicting[package_name] = sorted(conflicting_deps, key=str_key) | |
return missing, conflicting | |
def check_install_conflicts(to_install): | |
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] | |
"""For checking if the dependency graph would be consistent after \ | |
installing given requirements | |
""" | |
# Start from the current state | |
state = create_package_set_from_installed() | |
_simulate_installation_of(to_install, state) | |
return state, check_package_set(state) | |
# NOTE from @pradyunsg | |
# This required a minor update in dependency link handling logic over at | |
# operations.prepare.IsSDist.dist() to get it working | |
def _simulate_installation_of(to_install, state): | |
# type: (List[InstallRequirement], PackageSet) -> None | |
"""Computes the version of packages after installing to_install. | |
""" | |
# Modify it as installing requirement_set would (assuming no errors) | |
for inst_req in to_install: | |
dist = make_abstract_dist(inst_req).dist(finder=None) | |
name = canonicalize_name(dist.key) | |
state[name] = PackageDetails(dist.version, dist.requires()) |
from __future__ import absolute_import | |
import collections | |
import logging | |
import os | |
import re | |
import warnings | |
from pip._vendor import pkg_resources, six | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._vendor.pkg_resources import RequirementParseError | |
from pip._internal.exceptions import InstallationError | |
from pip._internal.req import InstallRequirement | |
from pip._internal.req.req_file import COMMENT_RE | |
from pip._internal.utils.deprecation import RemovedInPip11Warning | |
from pip._internal.utils.misc import ( | |
dist_is_editable, get_installed_distributions, | |
) | |
logger = logging.getLogger(__name__) | |
def freeze( | |
requirement=None, | |
find_links=None, local_only=None, user_only=None, skip_regex=None, | |
isolated=False, | |
wheel_cache=None, | |
exclude_editable=False, | |
skip=()): | |
find_links = find_links or [] | |
skip_match = None | |
if skip_regex: | |
skip_match = re.compile(skip_regex).search | |
dependency_links = [] | |
for dist in pkg_resources.working_set: | |
if dist.has_metadata('dependency_links.txt'): | |
dependency_links.extend( | |
dist.get_metadata_lines('dependency_links.txt') | |
) | |
for link in find_links: | |
if '#egg=' in link: | |
dependency_links.append(link) | |
for link in find_links: | |
yield '-f %s' % link | |
installations = {} | |
for dist in get_installed_distributions(local_only=local_only, | |
skip=(), | |
user_only=user_only): | |
try: | |
req = FrozenRequirement.from_dist( | |
dist, | |
dependency_links | |
) | |
except RequirementParseError: | |
logger.warning( | |
"Could not parse requirement: %s", | |
dist.project_name | |
) | |
continue | |
if exclude_editable and req.editable: | |
continue | |
installations[req.name] = req | |
if requirement: | |
# the options that don't get turned into an InstallRequirement | |
# should only be emitted once, even if the same option is in multiple | |
# requirements files, so we need to keep track of what has been emitted | |
# so that we don't emit it again if it's seen again | |
emitted_options = set() | |
# keep track of which files a requirement is in so that we can | |
# give an accurate warning if a requirement appears multiple times. | |
req_files = collections.defaultdict(list) | |
for req_file_path in requirement: | |
with open(req_file_path) as req_file: | |
for line in req_file: | |
if (not line.strip() or | |
line.strip().startswith('#') or | |
(skip_match and skip_match(line)) or | |
line.startswith(( | |
'-r', '--requirement', | |
'-Z', '--always-unzip', | |
'-f', '--find-links', | |
'-i', '--index-url', | |
'--pre', | |
'--trusted-host', | |
'--process-dependency-links', | |
'--extra-index-url'))): | |
line = line.rstrip() | |
if line not in emitted_options: | |
emitted_options.add(line) | |
yield line | |
continue | |
if line.startswith('-e') or line.startswith('--editable'): | |
if line.startswith('-e'): | |
line = line[2:].strip() | |
else: | |
line = line[len('--editable'):].strip().lstrip('=') | |
line_req = InstallRequirement.from_editable( | |
line, | |
isolated=isolated, | |
wheel_cache=wheel_cache, | |
) | |
else: | |
line_req = InstallRequirement.from_line( | |
COMMENT_RE.sub('', line).strip(), | |
isolated=isolated, | |
wheel_cache=wheel_cache, | |
) | |
if not line_req.name: | |
logger.info( | |
"Skipping line in requirement file [%s] because " | |
"it's not clear what it would install: %s", | |
req_file_path, line.strip(), | |
) | |
logger.info( | |
" (add #egg=PackageName to the URL to avoid" | |
" this warning)" | |
) | |
elif line_req.name not in installations: | |
# either it's not installed, or it is installed | |
# but has been processed already | |
if not req_files[line_req.name]: | |
logger.warning( | |
"Requirement file [%s] contains %s, but that " | |
"package is not installed", | |
req_file_path, | |
COMMENT_RE.sub('', line).strip(), | |
) | |
else: | |
req_files[line_req.name].append(req_file_path) | |
else: | |
yield str(installations[line_req.name]).rstrip() | |
del installations[line_req.name] | |
req_files[line_req.name].append(req_file_path) | |
# Warn about requirements that were included multiple times (in a | |
# single requirements file or in different requirements files). | |
for name, files in six.iteritems(req_files): | |
if len(files) > 1: | |
logger.warning("Requirement %s included multiple times [%s]", | |
name, ', '.join(sorted(set(files)))) | |
yield( | |
'## The following requirements were added by ' | |
'pip freeze:' | |
) | |
for installation in sorted( | |
installations.values(), key=lambda x: x.name.lower()): | |
if canonicalize_name(installation.name) not in skip: | |
yield str(installation).rstrip() | |
class FrozenRequirement(object): | |
def __init__(self, name, req, editable, comments=()): | |
self.name = name | |
self.req = req | |
self.editable = editable | |
self.comments = comments | |
_rev_re = re.compile(r'-r(\d+)$') | |
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$') | |
@classmethod | |
def from_dist(cls, dist, dependency_links): | |
location = os.path.normcase(os.path.abspath(dist.location)) | |
comments = [] | |
from pip._internal.vcs import vcs, get_src_requirement | |
if dist_is_editable(dist) and vcs.get_backend_name(location): | |
editable = True | |
try: | |
req = get_src_requirement(dist, location) | |
except InstallationError as exc: | |
logger.warning( | |
"Error when trying to get requirement for VCS system %s, " | |
"falling back to uneditable format", exc | |
) | |
req = None | |
if req is None: | |
logger.warning( | |
'Could not determine repository location of %s', location | |
) | |
comments.append( | |
'## !! Could not determine repository location' | |
) | |
req = dist.as_requirement() | |
editable = False | |
else: | |
editable = False | |
req = dist.as_requirement() | |
specs = req.specs | |
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ | |
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ | |
(specs, dist) | |
version = specs[0][1] | |
ver_match = cls._rev_re.search(version) | |
date_match = cls._date_re.search(version) | |
if ver_match or date_match: | |
svn_backend = vcs.get_backend('svn') | |
if svn_backend: | |
svn_location = svn_backend().get_location( | |
dist, | |
dependency_links, | |
) | |
if not svn_location: | |
logger.warning( | |
'Warning: cannot find svn location for %s', req, | |
) | |
comments.append( | |
'## FIXME: could not find svn URL in dependency_links ' | |
'for this package:' | |
) | |
else: | |
warnings.warn( | |
"SVN editable detection based on dependency links " | |
"will be dropped in the future.", | |
RemovedInPip11Warning, | |
) | |
comments.append( | |
'# Installing as editable to satisfy requirement %s:' % | |
req | |
) | |
if ver_match: | |
rev = ver_match.group(1) | |
else: | |
rev = '{%s}' % date_match.group(1) | |
editable = True | |
req = '%s@%s#egg=%s' % ( | |
svn_location, | |
rev, | |
cls.egg_name(dist) | |
) | |
return cls(dist.project_name, req, editable, comments) | |
@staticmethod | |
def egg_name(dist): | |
name = dist.egg_name() | |
match = re.search(r'-py\d\.\d$', name) | |
if match: | |
name = name[:match.start()] | |
return name | |
def __str__(self): | |
req = self.req | |
if self.editable: | |
req = '-e %s' % req | |
return '\n'.join(list(self.comments) + [str(req)]) + '\n' |
"""Prepares a distribution for installation | |
""" | |
import itertools | |
import logging | |
import os | |
import sys | |
from copy import copy | |
from pip._vendor import pkg_resources, requests | |
from pip._internal.build_env import NoOpBuildEnvironment | |
from pip._internal.compat import expanduser | |
from pip._internal.download import ( | |
is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path, | |
) | |
from pip._internal.exceptions import ( | |
DirectoryUrlHashUnsupported, HashUnpinned, InstallationError, | |
PreviousBuildDirError, VcsHashUnsupported, | |
) | |
from pip._internal.index import FormatControl | |
from pip._internal.req.req_install import InstallRequirement | |
from pip._internal.utils.hashes import MissingHashes | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
call_subprocess, display_path, normalize_path, | |
) | |
from pip._internal.utils.ui import open_spinner | |
from pip._internal.vcs import vcs | |
logger = logging.getLogger(__name__) | |
def make_abstract_dist(req): | |
"""Factory to make an abstract dist object. | |
Preconditions: Either an editable req with a source_dir, or satisfied_by or | |
a wheel link, or a non-editable req with a source_dir. | |
:return: A concrete DistAbstraction. | |
""" | |
if req.editable: | |
return IsSDist(req) | |
elif req.link and req.link.is_wheel: | |
return IsWheel(req) | |
else: | |
return IsSDist(req) | |
def _install_build_reqs(finder, prefix, build_requirements): | |
# NOTE: What follows is not a very good thing. | |
# Eventually, this should move into the BuildEnvironment class and | |
# that should handle all the isolation and sub-process invocation. | |
finder = copy(finder) | |
finder.format_control = FormatControl(set(), set([":all:"])) | |
urls = [ | |
finder.find_requirement( | |
InstallRequirement.from_line(r), upgrade=False).url | |
for r in build_requirements | |
] | |
args = [ | |
sys.executable, '-m', 'pip', 'install', '--ignore-installed', | |
'--no-user', '--prefix', prefix, | |
] + list(urls) | |
with open_spinner("Installing build dependencies") as spinner: | |
call_subprocess(args, show_stdout=False, spinner=spinner) | |
class DistAbstraction(object): | |
"""Abstracts out the wheel vs non-wheel Resolver.resolve() logic. | |
The requirements for anything installable are as follows: | |
- we must be able to determine the requirement name | |
(or we can't correctly handle the non-upgrade case). | |
- we must be able to generate a list of run-time dependencies | |
without installing any additional packages (or we would | |
have to either burn time by doing temporary isolated installs | |
or alternatively violate pips 'don't start installing unless | |
all requirements are available' rule - neither of which are | |
desirable). | |
- for packages with setup requirements, we must also be able | |
to determine their requirements without installing additional | |
packages (for the same reason as run-time dependencies) | |
- we must be able to create a Distribution object exposing the | |
above metadata. | |
""" | |
def __init__(self, req): | |
self.req = req | |
def dist(self, finder): | |
"""Return a setuptools Dist object.""" | |
raise NotImplementedError(self.dist) | |
def prep_for_dist(self, finder): | |
"""Ensure that we can get a Dist for this requirement.""" | |
raise NotImplementedError(self.dist) | |
class IsWheel(DistAbstraction): | |
def dist(self, finder): | |
return list(pkg_resources.find_distributions( | |
self.req.source_dir))[0] | |
def prep_for_dist(self, finder, build_isolation): | |
# FIXME:https://github.com/pypa/pip/issues/1112 | |
pass | |
class IsSDist(DistAbstraction): | |
def dist(self, finder): | |
dist = self.req.get_dist() | |
# FIXME: shouldn't be globally added. | |
if finder and dist.has_metadata('dependency_links.txt'): | |
finder.add_dependency_links( | |
dist.get_metadata_lines('dependency_links.txt') | |
) | |
return dist | |
def prep_for_dist(self, finder, build_isolation): | |
# Before calling "setup.py egg_info", we need to set-up the build | |
# environment. | |
build_requirements, isolate = self.req.get_pep_518_info() | |
should_isolate = build_isolation and isolate | |
minimum_requirements = ('setuptools', 'wheel') | |
missing_requirements = set(minimum_requirements) - set( | |
pkg_resources.Requirement(r).key | |
for r in build_requirements | |
) | |
if missing_requirements: | |
def format_reqs(rs): | |
return ' and '.join(map(repr, sorted(rs))) | |
logger.warning( | |
"Missing build time requirements in pyproject.toml for %s: " | |
"%s.", self.req, format_reqs(missing_requirements) | |
) | |
logger.warning( | |
"This version of pip does not implement PEP 517 so it cannot " | |
"build a wheel without %s.", format_reqs(minimum_requirements) | |
) | |
if should_isolate: | |
with self.req.build_env: | |
pass | |
_install_build_reqs(finder, self.req.build_env.path, | |
build_requirements) | |
else: | |
self.req.build_env = NoOpBuildEnvironment(no_clean=False) | |
self.req.run_egg_info() | |
self.req.assert_source_matches_version() | |
class Installed(DistAbstraction): | |
def dist(self, finder): | |
return self.req.satisfied_by | |
def prep_for_dist(self, finder): | |
pass | |
class RequirementPreparer(object): | |
"""Prepares a Requirement | |
""" | |
def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, | |
progress_bar, build_isolation): | |
super(RequirementPreparer, self).__init__() | |
self.src_dir = src_dir | |
self.build_dir = build_dir | |
# Where still packed archives should be written to. If None, they are | |
# not saved, and are deleted immediately after unpacking. | |
self.download_dir = download_dir | |
# Where still-packed .whl files should be written to. If None, they are | |
# written to the download_dir parameter. Separate to download_dir to | |
# permit only keeping wheel archives for pip wheel. | |
if wheel_download_dir: | |
wheel_download_dir = normalize_path(wheel_download_dir) | |
self.wheel_download_dir = wheel_download_dir | |
# NOTE | |
# download_dir and wheel_download_dir overlap semantically and may | |
# be combined if we're willing to have non-wheel archives present in | |
# the wheelhouse output by 'pip wheel'. | |
self.progress_bar = progress_bar | |
# Is build isolation allowed? | |
self.build_isolation = build_isolation | |
@property | |
def _download_should_save(self): | |
# TODO: Modify to reduce indentation needed | |
if self.download_dir: | |
self.download_dir = expanduser(self.download_dir) | |
if os.path.exists(self.download_dir): | |
return True | |
else: | |
logger.critical('Could not find download directory') | |
raise InstallationError( | |
"Could not find or access download directory '%s'" | |
% display_path(self.download_dir)) | |
return False | |
def prepare_linked_requirement(self, req, session, finder, | |
upgrade_allowed, require_hashes): | |
"""Prepare a requirement that would be obtained from req.link | |
""" | |
# TODO: Breakup into smaller functions | |
if req.link and req.link.scheme == 'file': | |
path = url_to_path(req.link.url) | |
logger.info('Processing %s', display_path(path)) | |
else: | |
logger.info('Collecting %s', req) | |
with indent_log(): | |
# @@ if filesystem packages are not marked | |
# editable in a req, a non deterministic error | |
# occurs when the script attempts to unpack the | |
# build directory | |
req.ensure_has_source_dir(self.build_dir) | |
# If a checkout exists, it's unwise to keep going. version | |
# inconsistencies are logged later, but do not fail the | |
# installation. | |
# FIXME: this won't upgrade when there's an existing | |
# package unpacked in `req.source_dir` | |
# package unpacked in `req.source_dir` | |
if os.path.exists(os.path.join(req.source_dir, 'setup.py')): | |
raise PreviousBuildDirError( | |
"pip can't proceed with requirements '%s' due to a" | |
" pre-existing build directory (%s). This is " | |
"likely due to a previous installation that failed" | |
". pip is being responsible and not assuming it " | |
"can delete this. Please delete it and try again." | |
% (req, req.source_dir) | |
) | |
req.populate_link(finder, upgrade_allowed, require_hashes) | |
# We can't hit this spot and have populate_link return None. | |
# req.satisfied_by is None here (because we're | |
# guarded) and upgrade has no impact except when satisfied_by | |
# is not None. | |
# Then inside find_requirement existing_applicable -> False | |
# If no new versions are found, DistributionNotFound is raised, | |
# otherwise a result is guaranteed. | |
assert req.link | |
link = req.link | |
# Now that we have the real link, we can tell what kind of | |
# requirements we have and raise some more informative errors | |
# than otherwise. (For example, we can raise VcsHashUnsupported | |
# for a VCS URL rather than HashMissing.) | |
if require_hashes: | |
# We could check these first 2 conditions inside | |
# unpack_url and save repetition of conditions, but then | |
# we would report less-useful error messages for | |
# unhashable requirements, complaining that there's no | |
# hash provided. | |
if is_vcs_url(link): | |
raise VcsHashUnsupported() | |
elif is_file_url(link) and is_dir_url(link): | |
raise DirectoryUrlHashUnsupported() | |
if not req.original_link and not req.is_pinned: | |
# Unpinned packages are asking for trouble when a new | |
# version is uploaded. This isn't a security check, but | |
# it saves users a surprising hash mismatch in the | |
# future. | |
# | |
# file:/// URLs aren't pinnable, so don't complain | |
# about them not being pinned. | |
raise HashUnpinned() | |
hashes = req.hashes(trust_internet=not require_hashes) | |
if require_hashes and not hashes: | |
# Known-good hashes are missing for this requirement, so | |
# shim it with a facade object that will provoke hash | |
# computation and then raise a HashMissing exception | |
# showing the user what the hash should be. | |
hashes = MissingHashes() | |
try: | |
download_dir = self.download_dir | |
# We always delete unpacked sdists after pip ran. | |
autodelete_unpacked = True | |
if req.link.is_wheel and self.wheel_download_dir: | |
# when doing 'pip wheel` we download wheels to a | |
# dedicated dir. | |
download_dir = self.wheel_download_dir | |
if req.link.is_wheel: | |
if download_dir: | |
# When downloading, we only unpack wheels to get | |
# metadata. | |
autodelete_unpacked = True | |
else: | |
# When installing a wheel, we use the unpacked | |
# wheel. | |
autodelete_unpacked = False | |
unpack_url( | |
req.link, req.source_dir, | |
download_dir, autodelete_unpacked, | |
session=session, hashes=hashes, | |
progress_bar=self.progress_bar | |
) | |
except requests.HTTPError as exc: | |
logger.critical( | |
'Could not install requirement %s because of error %s', | |
req, | |
exc, | |
) | |
raise InstallationError( | |
'Could not install requirement %s because of HTTP ' | |
'error %s for URL %s' % | |
(req, exc, req.link) | |
) | |
abstract_dist = make_abstract_dist(req) | |
abstract_dist.prep_for_dist(finder, self.build_isolation) | |
if self._download_should_save: | |
# Make a .zip of the source_dir we already created. | |
if req.link.scheme in vcs.all_schemes: | |
req.archive(self.download_dir) | |
return abstract_dist | |
def prepare_editable_requirement(self, req, require_hashes, use_user_site, | |
finder): | |
"""Prepare an editable requirement | |
""" | |
assert req.editable, "cannot prepare a non-editable req as editable" | |
logger.info('Obtaining %s', req) | |
with indent_log(): | |
if require_hashes: | |
raise InstallationError( | |
'The editable requirement %s cannot be installed when ' | |
'requiring hashes, because there is no single file to ' | |
'hash.' % req | |
) | |
req.ensure_has_source_dir(self.src_dir) | |
req.update_editable(not self._download_should_save) | |
abstract_dist = make_abstract_dist(req) | |
abstract_dist.prep_for_dist(finder, self.build_isolation) | |
if self._download_should_save: | |
req.archive(self.download_dir) | |
req.check_if_exists(use_user_site) | |
return abstract_dist | |
def prepare_installed_requirement(self, req, require_hashes, skip_reason): | |
"""Prepare an already-installed requirement | |
""" | |
assert req.satisfied_by, "req should have been satisfied but isn't" | |
assert skip_reason is not None, ( | |
"did not get skip reason skipped but req.satisfied_by " | |
"is set to %r" % (req.satisfied_by,) | |
) | |
logger.info( | |
'Requirement %s: %s (%s)', | |
skip_reason, req, req.satisfied_by.version | |
) | |
with indent_log(): | |
if require_hashes: | |
logger.debug( | |
'Since it is already installed, we are trusting this ' | |
'package without checking its hash. To ensure a ' | |
'completely repeatable environment, install into an ' | |
'empty virtualenv.' | |
) | |
abstract_dist = Installed(req) | |
return abstract_dist |
from __future__ import absolute_import | |
import logging | |
from .req_install import InstallRequirement | |
from .req_set import RequirementSet | |
from .req_file import parse_requirements | |
from pip._internal.utils.logging import indent_log | |
__all__ = [ | |
"RequirementSet", "InstallRequirement", | |
"parse_requirements", "install_given_reqs", | |
] | |
logger = logging.getLogger(__name__) | |
def install_given_reqs(to_install, install_options, global_options=(), | |
*args, **kwargs): | |
""" | |
Install everything in the given list. | |
(to be called after having downloaded and unpacked the packages) | |
""" | |
if to_install: | |
logger.info( | |
'Installing collected packages: %s', | |
', '.join([req.name for req in to_install]), | |
) | |
with indent_log(): | |
for requirement in to_install: | |
if requirement.conflicts_with: | |
logger.info( | |
'Found existing installation: %s', | |
requirement.conflicts_with, | |
) | |
with indent_log(): | |
uninstalled_pathset = requirement.uninstall( | |
auto_confirm=True | |
) | |
try: | |
requirement.install( | |
install_options, | |
global_options, | |
*args, | |
**kwargs | |
) | |
except: | |
should_rollback = ( | |
requirement.conflicts_with and | |
not requirement.install_succeeded | |
) | |
# if install did not succeed, rollback previous uninstall | |
if should_rollback: | |
uninstalled_pathset.rollback() | |
raise | |
else: | |
should_commit = ( | |
requirement.conflicts_with and | |
requirement.install_succeeded | |
) | |
if should_commit: | |
uninstalled_pathset.commit() | |
requirement.remove_temporary_source() | |
return to_install |
""" | |
Requirements file parsing | |
""" | |
from __future__ import absolute_import | |
import optparse | |
import os | |
import re | |
import shlex | |
import sys | |
from pip._vendor.six.moves import filterfalse | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._internal import cmdoptions | |
from pip._internal.download import get_file_content | |
from pip._internal.exceptions import RequirementsFileParseError | |
from pip._internal.req.req_install import InstallRequirement | |
__all__ = ['parse_requirements'] | |
SCHEME_RE = re.compile(r'^(http|https|file):', re.I) | |
COMMENT_RE = re.compile(r'(^|\s)+#.*$') | |
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the | |
# variable name consisting of only uppercase letters, digits or the '_' | |
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, | |
# 2013 Edition. | |
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') | |
SUPPORTED_OPTIONS = [ | |
cmdoptions.constraints, | |
cmdoptions.editable, | |
cmdoptions.requirements, | |
cmdoptions.no_index, | |
cmdoptions.index_url, | |
cmdoptions.find_links, | |
cmdoptions.extra_index_url, | |
cmdoptions.always_unzip, | |
cmdoptions.no_binary, | |
cmdoptions.only_binary, | |
cmdoptions.pre, | |
cmdoptions.process_dependency_links, | |
cmdoptions.trusted_host, | |
cmdoptions.require_hashes, | |
] | |
# options to be passed to requirements | |
SUPPORTED_OPTIONS_REQ = [ | |
cmdoptions.install_options, | |
cmdoptions.global_options, | |
cmdoptions.hash, | |
] | |
# the 'dest' string values | |
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] | |
def parse_requirements(filename, finder=None, comes_from=None, options=None, | |
session=None, constraint=False, wheel_cache=None): | |
"""Parse a requirements file and yield InstallRequirement instances. | |
:param filename: Path or url of requirements file. | |
:param finder: Instance of pip.index.PackageFinder. | |
:param comes_from: Origin description of requirements. | |
:param options: cli options. | |
:param session: Instance of pip.download.PipSession. | |
:param constraint: If true, parsing a constraint file rather than | |
requirements file. | |
:param wheel_cache: Instance of pip.wheel.WheelCache | |
""" | |
if session is None: | |
raise TypeError( | |
"parse_requirements() missing 1 required keyword argument: " | |
"'session'" | |
) | |
_, content = get_file_content( | |
filename, comes_from=comes_from, session=session | |
) | |
lines_enum = preprocess(content, options) | |
for line_number, line in lines_enum: | |
req_iter = process_line(line, filename, line_number, finder, | |
comes_from, options, session, wheel_cache, | |
constraint=constraint) | |
for req in req_iter: | |
yield req | |
def preprocess(content, options): | |
"""Split, filter, and join lines, and return a line iterator | |
:param content: the content of the requirements file | |
:param options: cli options | |
""" | |
lines_enum = enumerate(content.splitlines(), start=1) | |
lines_enum = join_lines(lines_enum) | |
lines_enum = ignore_comments(lines_enum) | |
lines_enum = skip_regex(lines_enum, options) | |
lines_enum = expand_env_variables(lines_enum) | |
return lines_enum | |
def process_line(line, filename, line_number, finder=None, comes_from=None, | |
options=None, session=None, wheel_cache=None, | |
constraint=False): | |
"""Process a single requirements line; This can result in creating/yielding | |
requirements, or updating the finder. | |
For lines that contain requirements, the only options that have an effect | |
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the | |
requirement. Other options from SUPPORTED_OPTIONS may be present, but are | |
ignored. | |
For lines that do not contain requirements, the only options that have an | |
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may | |
be present, but are ignored. These lines may contain multiple options | |
(although our docs imply only one is supported), and all our parsed and | |
affect the finder. | |
:param constraint: If True, parsing a constraints file. | |
:param options: OptionParser options that we may update | |
""" | |
parser = build_parser(line) | |
defaults = parser.get_default_values() | |
defaults.index_url = None | |
if finder: | |
# `finder.format_control` will be updated during parsing | |
defaults.format_control = finder.format_control | |
args_str, options_str = break_args_options(line) | |
if sys.version_info < (2, 7, 3): | |
# Prior to 2.7.3, shlex cannot deal with unicode entries | |
options_str = options_str.encode('utf8') | |
opts, _ = parser.parse_args(shlex.split(options_str), defaults) | |
# preserve for the nested code path | |
line_comes_from = '%s %s (line %s)' % ( | |
'-c' if constraint else '-r', filename, line_number, | |
) | |
# yield a line requirement | |
if args_str: | |
isolated = options.isolated_mode if options else False | |
if options: | |
cmdoptions.check_install_build_global(options, opts) | |
# get the options that apply to requirements | |
req_options = {} | |
for dest in SUPPORTED_OPTIONS_REQ_DEST: | |
if dest in opts.__dict__ and opts.__dict__[dest]: | |
req_options[dest] = opts.__dict__[dest] | |
yield InstallRequirement.from_line( | |
args_str, line_comes_from, constraint=constraint, | |
isolated=isolated, options=req_options, wheel_cache=wheel_cache | |
) | |
# yield an editable requirement | |
elif opts.editables: | |
isolated = options.isolated_mode if options else False | |
yield InstallRequirement.from_editable( | |
opts.editables[0], comes_from=line_comes_from, | |
constraint=constraint, isolated=isolated, wheel_cache=wheel_cache | |
) | |
# parse a nested requirements file | |
elif opts.requirements or opts.constraints: | |
if opts.requirements: | |
req_path = opts.requirements[0] | |
nested_constraint = False | |
else: | |
req_path = opts.constraints[0] | |
nested_constraint = True | |
# original file is over http | |
if SCHEME_RE.search(filename): | |
# do a url join so relative paths work | |
req_path = urllib_parse.urljoin(filename, req_path) | |
# original file and nested file are paths | |
elif not SCHEME_RE.search(req_path): | |
# do a join so relative paths work | |
req_path = os.path.join(os.path.dirname(filename), req_path) | |
# TODO: Why not use `comes_from='-r {} (line {})'` here as well? | |
parser = parse_requirements( | |
req_path, finder, comes_from, options, session, | |
constraint=nested_constraint, wheel_cache=wheel_cache | |
) | |
for req in parser: | |
yield req | |
# percolate hash-checking option upward | |
elif opts.require_hashes: | |
options.require_hashes = opts.require_hashes | |
# set finder options | |
elif finder: | |
if opts.index_url: | |
finder.index_urls = [opts.index_url] | |
if opts.no_index is True: | |
finder.index_urls = [] | |
if opts.extra_index_urls: | |
finder.index_urls.extend(opts.extra_index_urls) | |
if opts.find_links: | |
# FIXME: it would be nice to keep track of the source | |
# of the find_links: support a find-links local path | |
# relative to a requirements file. | |
value = opts.find_links[0] | |
req_dir = os.path.dirname(os.path.abspath(filename)) | |
relative_to_reqs_file = os.path.join(req_dir, value) | |
if os.path.exists(relative_to_reqs_file): | |
value = relative_to_reqs_file | |
finder.find_links.append(value) | |
if opts.pre: | |
finder.allow_all_prereleases = True | |
if opts.process_dependency_links: | |
finder.process_dependency_links = True | |
if opts.trusted_hosts: | |
finder.secure_origins.extend( | |
("*", host, "*") for host in opts.trusted_hosts) | |
def break_args_options(line): | |
"""Break up the line into an args and options string. We only want to shlex | |
(and then optparse) the options, not the args. args can contain markers | |
which are corrupted by shlex. | |
""" | |
tokens = line.split(' ') | |
args = [] | |
options = tokens[:] | |
for token in tokens: | |
if token.startswith('-') or token.startswith('--'): | |
break | |
else: | |
args.append(token) | |
options.pop(0) | |
return ' '.join(args), ' '.join(options) | |
def build_parser(line): | |
""" | |
Return a parser for parsing requirement lines | |
""" | |
parser = optparse.OptionParser(add_help_option=False) | |
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ | |
for option_factory in option_factories: | |
option = option_factory() | |
parser.add_option(option) | |
# By default optparse sys.exits on parsing errors. We want to wrap | |
# that in our own exception. | |
def parser_exit(self, msg): | |
# add offending line | |
msg = 'Invalid requirement: %s\n%s' % (line, msg) | |
raise RequirementsFileParseError(msg) | |
parser.exit = parser_exit | |
return parser | |
def join_lines(lines_enum): | |
"""Joins a line ending in '\' with the previous line (except when following | |
comments). The joined line takes on the index of the first line. | |
""" | |
primary_line_number = None | |
new_line = [] | |
for line_number, line in lines_enum: | |
if not line.endswith('\\') or COMMENT_RE.match(line): | |
if COMMENT_RE.match(line): | |
# this ensures comments are always matched later | |
line = ' ' + line | |
if new_line: | |
new_line.append(line) | |
yield primary_line_number, ''.join(new_line) | |
new_line = [] | |
else: | |
yield line_number, line | |
else: | |
if not new_line: | |
primary_line_number = line_number | |
new_line.append(line.strip('\\')) | |
# last line contains \ | |
if new_line: | |
yield primary_line_number, ''.join(new_line) | |
# TODO: handle space after '\'. | |
def ignore_comments(lines_enum): | |
""" | |
Strips comments and filter empty lines. | |
""" | |
for line_number, line in lines_enum: | |
line = COMMENT_RE.sub('', line) | |
line = line.strip() | |
if line: | |
yield line_number, line | |
def skip_regex(lines_enum, options): | |
""" | |
Skip lines that match '--skip-requirements-regex' pattern | |
Note: the regex pattern is only built once | |
""" | |
skip_regex = options.skip_requirements_regex if options else None | |
if skip_regex: | |
pattern = re.compile(skip_regex) | |
lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum) | |
return lines_enum | |
def expand_env_variables(lines_enum): | |
"""Replace all environment variables that can be retrieved via `os.getenv`. | |
The only allowed format for environment variables defined in the | |
requirement file is `${MY_VARIABLE_1}` to ensure two things: | |
1. Strings that contain a `$` aren't accidentally (partially) expanded. | |
2. Ensure consistency across platforms for requirement files. | |
These points are the result of a discusssion on the `github pull | |
request #3514 <https://github.com/pypa/pip/pull/3514>`_. | |
Valid characters in variable names follow the `POSIX standard | |
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited | |
to uppercase letter, digits and the `_` (underscore). | |
""" | |
for line_number, line in lines_enum: | |
for env_var, var_name in ENV_VAR_RE.findall(line): | |
value = os.getenv(var_name) | |
if not value: | |
continue | |
line = line.replace(env_var, value) | |
yield line_number, line |
from __future__ import absolute_import | |
import logging | |
import os | |
import re | |
import shutil | |
import sys | |
import sysconfig | |
import traceback | |
import warnings | |
import zipfile | |
from distutils.util import change_root | |
from email.parser import FeedParser # type: ignore | |
from pip._vendor import pkg_resources, pytoml, six | |
from pip._vendor.packaging import specifiers | |
from pip._vendor.packaging.markers import Marker | |
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._vendor.packaging.version import parse as parse_version | |
from pip._vendor.packaging.version import Version | |
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements | |
from pip._internal import wheel | |
from pip._internal.build_env import BuildEnvironment | |
from pip._internal.compat import native_str | |
from pip._internal.download import ( | |
is_archive_file, is_url, path_to_url, url_to_path, | |
) | |
from pip._internal.exceptions import InstallationError, UninstallationError | |
from pip._internal.locations import ( | |
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv, | |
) | |
from pip._internal.req.req_uninstall import UninstallPathSet | |
from pip._internal.utils.deprecation import RemovedInPip11Warning | |
from pip._internal.utils.hashes import Hashes | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
_make_build_dir, ask_path_exists, backup_dir, call_subprocess, | |
display_path, dist_in_site_packages, dist_in_usersite, ensure_dir, | |
get_installed_version, is_installable_dir, read_text_file, rmtree, | |
) | |
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.utils.ui import open_spinner | |
from pip._internal.vcs import vcs | |
from pip._internal.wheel import Wheel, move_wheel_files | |
logger = logging.getLogger(__name__) | |
operators = specifiers.Specifier._operators.keys() | |
def _strip_extras(path): | |
m = re.match(r'^(.+)(\[[^\]]+\])$', path) | |
extras = None | |
if m: | |
path_no_extras = m.group(1) | |
extras = m.group(2) | |
else: | |
path_no_extras = path | |
return path_no_extras, extras | |
class InstallRequirement(object): | |
""" | |
Represents something that may be installed later on, may have information | |
about where to fetch the relavant requirement and also contains logic for | |
installing the said requirement. | |
""" | |
def __init__(self, req, comes_from, source_dir=None, editable=False, | |
link=None, update=True, markers=None, | |
isolated=False, options=None, wheel_cache=None, | |
constraint=False, extras=()): | |
assert req is None or isinstance(req, Requirement), req | |
self.req = req | |
self.comes_from = comes_from | |
self.constraint = constraint | |
if source_dir is not None: | |
self.source_dir = os.path.normpath(os.path.abspath(source_dir)) | |
else: | |
self.source_dir = None | |
self.editable = editable | |
self._wheel_cache = wheel_cache | |
if link is not None: | |
self.link = self.original_link = link | |
else: | |
from pip._internal.index import Link | |
self.link = self.original_link = req and req.url and Link(req.url) | |
if extras: | |
self.extras = extras | |
elif req: | |
self.extras = { | |
pkg_resources.safe_extra(extra) for extra in req.extras | |
} | |
else: | |
self.extras = set() | |
if markers is not None: | |
self.markers = markers | |
else: | |
self.markers = req and req.marker | |
self._egg_info_path = None | |
# This holds the pkg_resources.Distribution object if this requirement | |
# is already available: | |
self.satisfied_by = None | |
# This hold the pkg_resources.Distribution object if this requirement | |
# conflicts with another installed distribution: | |
self.conflicts_with = None | |
# Temporary build location | |
self._temp_build_dir = TempDirectory(kind="req-build") | |
# Used to store the global directory where the _temp_build_dir should | |
# have been created. Cf _correct_build_location method. | |
self._ideal_build_dir = None | |
# True if the editable should be updated: | |
self.update = update | |
# Set to True after successful installation | |
self.install_succeeded = None | |
# UninstallPathSet of uninstalled distribution (for possible rollback) | |
self.uninstalled_pathset = None | |
self.options = options if options else {} | |
# Set to True after successful preparation of this requirement | |
self.prepared = False | |
self.is_direct = False | |
self.isolated = isolated | |
self.build_env = BuildEnvironment(no_clean=True) | |
@classmethod | |
def from_editable(cls, editable_req, comes_from=None, isolated=False, | |
options=None, wheel_cache=None, constraint=False): | |
from pip._internal.index import Link | |
name, url, extras_override = parse_editable(editable_req) | |
if url.startswith('file:'): | |
source_dir = url_to_path(url) | |
else: | |
source_dir = None | |
if name is not None: | |
try: | |
req = Requirement(name) | |
except InvalidRequirement: | |
raise InstallationError("Invalid requirement: '%s'" % name) | |
else: | |
req = None | |
return cls( | |
req, comes_from, source_dir=source_dir, | |
editable=True, | |
link=Link(url), | |
constraint=constraint, | |
isolated=isolated, | |
options=options if options else {}, | |
wheel_cache=wheel_cache, | |
extras=extras_override or (), | |
) | |
@classmethod | |
def from_req(cls, req, comes_from=None, isolated=False, wheel_cache=None): | |
try: | |
req = Requirement(req) | |
except InvalidRequirement: | |
raise InstallationError("Invalid requirement: '%s'" % req) | |
if req.url: | |
raise InstallationError( | |
"Direct url requirement (like %s) are not allowed for " | |
"dependencies" % req | |
) | |
return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache) | |
@classmethod | |
def from_line( | |
cls, name, comes_from=None, isolated=False, options=None, | |
wheel_cache=None, constraint=False): | |
"""Creates an InstallRequirement from a name, which might be a | |
requirement, directory containing 'setup.py', filename, or URL. | |
""" | |
from pip._internal.index import Link | |
if is_url(name): | |
marker_sep = '; ' | |
else: | |
marker_sep = ';' | |
if marker_sep in name: | |
name, markers = name.split(marker_sep, 1) | |
markers = markers.strip() | |
if not markers: | |
markers = None | |
else: | |
markers = Marker(markers) | |
else: | |
markers = None | |
name = name.strip() | |
req = None | |
path = os.path.normpath(os.path.abspath(name)) | |
link = None | |
extras = None | |
if is_url(name): | |
link = Link(name) | |
else: | |
p, extras = _strip_extras(path) | |
looks_like_dir = os.path.isdir(p) and ( | |
os.path.sep in name or | |
(os.path.altsep is not None and os.path.altsep in name) or | |
name.startswith('.') | |
) | |
if looks_like_dir: | |
if not is_installable_dir(p): | |
raise InstallationError( | |
"Directory %r is not installable. File 'setup.py' " | |
"not found." % name | |
) | |
link = Link(path_to_url(p)) | |
elif is_archive_file(p): | |
if not os.path.isfile(p): | |
logger.warning( | |
'Requirement %r looks like a filename, but the ' | |
'file does not exist', | |
name | |
) | |
link = Link(path_to_url(p)) | |
# it's a local file, dir, or url | |
if link: | |
# Handle relative file URLs | |
if link.scheme == 'file' and re.search(r'\.\./', link.url): | |
link = Link( | |
path_to_url(os.path.normpath(os.path.abspath(link.path)))) | |
# wheel file | |
if link.is_wheel: | |
wheel = Wheel(link.filename) # can raise InvalidWheelFilename | |
req = "%s==%s" % (wheel.name, wheel.version) | |
else: | |
# set the req to the egg fragment. when it's not there, this | |
# will become an 'unnamed' requirement | |
req = link.egg_fragment | |
# a requirement specifier | |
else: | |
req = name | |
if extras: | |
extras = Requirement("placeholder" + extras.lower()).extras | |
else: | |
extras = () | |
if req is not None: | |
try: | |
req = Requirement(req) | |
except InvalidRequirement: | |
if os.path.sep in req: | |
add_msg = "It looks like a path." | |
add_msg += deduce_helpful_msg(req) | |
elif '=' in req and not any(op in req for op in operators): | |
add_msg = "= is not a valid operator. Did you mean == ?" | |
else: | |
add_msg = traceback.format_exc() | |
raise InstallationError( | |
"Invalid requirement: '%s'\n%s" % (req, add_msg)) | |
return cls( | |
req, comes_from, link=link, markers=markers, | |
isolated=isolated, | |
options=options if options else {}, | |
wheel_cache=wheel_cache, | |
constraint=constraint, | |
extras=extras, | |
) | |
def __str__(self): | |
if self.req: | |
s = str(self.req) | |
if self.link: | |
s += ' from %s' % self.link.url | |
else: | |
s = self.link.url if self.link else None | |
if self.satisfied_by is not None: | |
s += ' in %s' % display_path(self.satisfied_by.location) | |
if self.comes_from: | |
if isinstance(self.comes_from, six.string_types): | |
comes_from = self.comes_from | |
else: | |
comes_from = self.comes_from.from_path() | |
if comes_from: | |
s += ' (from %s)' % comes_from | |
return s | |
def __repr__(self): | |
return '<%s object: %s editable=%r>' % ( | |
self.__class__.__name__, str(self), self.editable) | |
def populate_link(self, finder, upgrade, require_hashes): | |
"""Ensure that if a link can be found for this, that it is found. | |
Note that self.link may still be None - if Upgrade is False and the | |
requirement is already installed. | |
If require_hashes is True, don't use the wheel cache, because cached | |
wheels, always built locally, have different hashes than the files | |
downloaded from the index server and thus throw false hash mismatches. | |
Furthermore, cached wheels at present have undeterministic contents due | |
to file modification times. | |
""" | |
if self.link is None: | |
self.link = finder.find_requirement(self, upgrade) | |
if self._wheel_cache is not None and not require_hashes: | |
old_link = self.link | |
self.link = self._wheel_cache.get(self.link, self.name) | |
if old_link != self.link: | |
logger.debug('Using cached wheel link: %s', self.link) | |
@property | |
def specifier(self): | |
return self.req.specifier | |
@property | |
def is_pinned(self): | |
"""Return whether I am pinned to an exact version. | |
For example, some-package==1.2 is pinned; some-package>1.2 is not. | |
""" | |
specifiers = self.specifier | |
return (len(specifiers) == 1 and | |
next(iter(specifiers)).operator in {'==', '==='}) | |
def from_path(self): | |
if self.req is None: | |
return None | |
s = str(self.req) | |
if self.comes_from: | |
if isinstance(self.comes_from, six.string_types): | |
comes_from = self.comes_from | |
else: | |
comes_from = self.comes_from.from_path() | |
if comes_from: | |
s += '->' + comes_from | |
return s | |
def build_location(self, build_dir): | |
assert build_dir is not None | |
if self._temp_build_dir.path is not None: | |
return self._temp_build_dir.path | |
if self.req is None: | |
# for requirement via a path to a directory: the name of the | |
# package is not available yet so we create a temp directory | |
# Once run_egg_info will have run, we'll be able | |
# to fix it via _correct_build_location | |
# Some systems have /tmp as a symlink which confuses custom | |
# builds (such as numpy). Thus, we ensure that the real path | |
# is returned. | |
self._temp_build_dir.create() | |
self._ideal_build_dir = build_dir | |
return self._temp_build_dir.path | |
if self.editable: | |
name = self.name.lower() | |
else: | |
name = self.name | |
# FIXME: Is there a better place to create the build_dir? (hg and bzr | |
# need this) | |
if not os.path.exists(build_dir): | |
logger.debug('Creating directory %s', build_dir) | |
_make_build_dir(build_dir) | |
return os.path.join(build_dir, name) | |
def _correct_build_location(self): | |
"""Move self._temp_build_dir to self._ideal_build_dir/self.req.name | |
For some requirements (e.g. a path to a directory), the name of the | |
package is not available until we run egg_info, so the build_location | |
will return a temporary directory and store the _ideal_build_dir. | |
This is only called by self.egg_info_path to fix the temporary build | |
directory. | |
""" | |
if self.source_dir is not None: | |
return | |
assert self.req is not None | |
assert self._temp_build_dir.path | |
assert self._ideal_build_dir.path | |
old_location = self._temp_build_dir.path | |
self._temp_build_dir.path = None | |
new_location = self.build_location(self._ideal_build_dir) | |
if os.path.exists(new_location): | |
raise InstallationError( | |
'A package already exists in %s; please remove it to continue' | |
% display_path(new_location)) | |
logger.debug( | |
'Moving package %s from %s to new location %s', | |
self, display_path(old_location), display_path(new_location), | |
) | |
shutil.move(old_location, new_location) | |
self._temp_build_dir.path = new_location | |
self._ideal_build_dir = None | |
self.source_dir = os.path.normpath(os.path.abspath(new_location)) | |
self._egg_info_path = None | |
@property | |
def name(self): | |
if self.req is None: | |
return None | |
return native_str(pkg_resources.safe_name(self.req.name)) | |
@property | |
def setup_py_dir(self): | |
return os.path.join( | |
self.source_dir, | |
self.link and self.link.subdirectory_fragment or '') | |
@property | |
def setup_py(self): | |
assert self.source_dir, "No source dir for %s" % self | |
setup_py = os.path.join(self.setup_py_dir, 'setup.py') | |
# Python2 __file__ should not be unicode | |
if six.PY2 and isinstance(setup_py, six.text_type): | |
setup_py = setup_py.encode(sys.getfilesystemencoding()) | |
return setup_py | |
@property | |
def pyproject_toml(self): | |
assert self.source_dir, "No source dir for %s" % self | |
pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml') | |
# Python2 __file__ should not be unicode | |
if six.PY2 and isinstance(pp_toml, six.text_type): | |
pp_toml = pp_toml.encode(sys.getfilesystemencoding()) | |
return pp_toml | |
def get_pep_518_info(self): | |
"""Get a list of the packages required to build the project, if any, | |
and a flag indicating whether pyproject.toml is present, indicating | |
that the build should be isolated. | |
Build requirements can be specified in a pyproject.toml, as described | |
in PEP 518. If this file exists but doesn't specify build | |
requirements, pip will default to installing setuptools and wheel. | |
""" | |
if os.path.isfile(self.pyproject_toml): | |
with open(self.pyproject_toml) as f: | |
pp_toml = pytoml.load(f) | |
build_sys = pp_toml.get('build-system', {}) | |
return (build_sys.get('requires', ['setuptools', 'wheel']), True) | |
return (['setuptools', 'wheel'], False) | |
def run_egg_info(self): | |
assert self.source_dir | |
if self.name: | |
logger.debug( | |
'Running setup.py (path:%s) egg_info for package %s', | |
self.setup_py, self.name, | |
) | |
else: | |
logger.debug( | |
'Running setup.py (path:%s) egg_info for package from %s', | |
self.setup_py, self.link, | |
) | |
with indent_log(): | |
script = SETUPTOOLS_SHIM % self.setup_py | |
base_cmd = [sys.executable, '-c', script] | |
if self.isolated: | |
base_cmd += ["--no-user-cfg"] | |
egg_info_cmd = base_cmd + ['egg_info'] | |
# We can't put the .egg-info files at the root, because then the | |
# source code will be mistaken for an installed egg, causing | |
# problems | |
if self.editable: | |
egg_base_option = [] | |
else: | |
egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') | |
ensure_dir(egg_info_dir) | |
egg_base_option = ['--egg-base', 'pip-egg-info'] | |
with self.build_env: | |
call_subprocess( | |
egg_info_cmd + egg_base_option, | |
cwd=self.setup_py_dir, | |
show_stdout=False, | |
command_desc='python setup.py egg_info') | |
if not self.req: | |
if isinstance(parse_version(self.pkg_info()["Version"]), Version): | |
op = "==" | |
else: | |
op = "===" | |
self.req = Requirement( | |
"".join([ | |
self.pkg_info()["Name"], | |
op, | |
self.pkg_info()["Version"], | |
]) | |
) | |
self._correct_build_location() | |
else: | |
metadata_name = canonicalize_name(self.pkg_info()["Name"]) | |
if canonicalize_name(self.req.name) != metadata_name: | |
logger.warning( | |
'Running setup.py (path:%s) egg_info for package %s ' | |
'produced metadata for project name %s. Fix your ' | |
'#egg=%s fragments.', | |
self.setup_py, self.name, metadata_name, self.name | |
) | |
self.req = Requirement(metadata_name) | |
def egg_info_data(self, filename): | |
if self.satisfied_by is not None: | |
if not self.satisfied_by.has_metadata(filename): | |
return None | |
return self.satisfied_by.get_metadata(filename) | |
assert self.source_dir | |
filename = self.egg_info_path(filename) | |
if not os.path.exists(filename): | |
return None | |
data = read_text_file(filename) | |
return data | |
def egg_info_path(self, filename): | |
if self._egg_info_path is None: | |
if self.editable: | |
base = self.source_dir | |
else: | |
base = os.path.join(self.setup_py_dir, 'pip-egg-info') | |
filenames = os.listdir(base) | |
if self.editable: | |
filenames = [] | |
for root, dirs, files in os.walk(base): | |
for dir in vcs.dirnames: | |
if dir in dirs: | |
dirs.remove(dir) | |
# Iterate over a copy of ``dirs``, since mutating | |
# a list while iterating over it can cause trouble. | |
# (See https://github.com/pypa/pip/pull/462.) | |
for dir in list(dirs): | |
# Don't search in anything that looks like a virtualenv | |
# environment | |
if ( | |
os.path.lexists( | |
os.path.join(root, dir, 'bin', 'python') | |
) or | |
os.path.exists( | |
os.path.join( | |
root, dir, 'Scripts', 'Python.exe' | |
) | |
)): | |
dirs.remove(dir) | |
# Also don't search through tests | |
elif dir == 'test' or dir == 'tests': | |
dirs.remove(dir) | |
filenames.extend([os.path.join(root, dir) | |
for dir in dirs]) | |
filenames = [f for f in filenames if f.endswith('.egg-info')] | |
if not filenames: | |
raise InstallationError( | |
'No files/directories in %s (from %s)' % (base, filename) | |
) | |
assert filenames, \ | |
"No files/directories in %s (from %s)" % (base, filename) | |
# if we have more than one match, we pick the toplevel one. This | |
# can easily be the case if there is a dist folder which contains | |
# an extracted tarball for testing purposes. | |
if len(filenames) > 1: | |
filenames.sort( | |
key=lambda x: x.count(os.path.sep) + | |
(os.path.altsep and x.count(os.path.altsep) or 0) | |
) | |
self._egg_info_path = os.path.join(base, filenames[0]) | |
return os.path.join(self._egg_info_path, filename) | |
def pkg_info(self): | |
p = FeedParser() | |
data = self.egg_info_data('PKG-INFO') | |
if not data: | |
logger.warning( | |
'No PKG-INFO file found in %s', | |
display_path(self.egg_info_path('PKG-INFO')), | |
) | |
p.feed(data or '') | |
return p.close() | |
_requirements_section_re = re.compile(r'\[(.*?)\]') | |
@property | |
def installed_version(self): | |
return get_installed_version(self.name) | |
def assert_source_matches_version(self): | |
assert self.source_dir | |
version = self.pkg_info()['version'] | |
if self.req.specifier and version not in self.req.specifier: | |
logger.warning( | |
'Requested %s, but installing version %s', | |
self, | |
version, | |
) | |
else: | |
logger.debug( | |
'Source in %s has version %s, which satisfies requirement %s', | |
display_path(self.source_dir), | |
version, | |
self, | |
) | |
def update_editable(self, obtain=True): | |
if not self.link: | |
logger.debug( | |
"Cannot update repository at %s; repository location is " | |
"unknown", | |
self.source_dir, | |
) | |
return | |
assert self.editable | |
assert self.source_dir | |
if self.link.scheme == 'file': | |
# Static paths don't get updated | |
return | |
assert '+' in self.link.url, "bad url: %r" % self.link.url | |
if not self.update: | |
return | |
vc_type, url = self.link.url.split('+', 1) | |
backend = vcs.get_backend(vc_type) | |
if backend: | |
vcs_backend = backend(self.link.url) | |
if obtain: | |
vcs_backend.obtain(self.source_dir) | |
else: | |
vcs_backend.export(self.source_dir) | |
else: | |
assert 0, ( | |
'Unexpected version control type (in %s): %s' | |
% (self.link, vc_type)) | |
def uninstall(self, auto_confirm=False, verbose=False, | |
use_user_site=False): | |
""" | |
Uninstall the distribution currently satisfying this requirement. | |
Prompts before removing or modifying files unless | |
``auto_confirm`` is True. | |
Refuses to delete or modify files outside of ``sys.prefix`` - | |
thus uninstallation within a virtual environment can only | |
modify that virtual environment, even if the virtualenv is | |
linked to global site-packages. | |
""" | |
if not self.check_if_exists(use_user_site): | |
logger.warning("Skipping %s as it is not installed.", self.name) | |
return | |
dist = self.satisfied_by or self.conflicts_with | |
uninstalled_pathset = UninstallPathSet.from_dist(dist) | |
uninstalled_pathset.remove(auto_confirm, verbose) | |
return uninstalled_pathset | |
def archive(self, build_dir): | |
assert self.source_dir | |
create_archive = True | |
archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) | |
archive_path = os.path.join(build_dir, archive_name) | |
if os.path.exists(archive_path): | |
response = ask_path_exists( | |
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % | |
display_path(archive_path), ('i', 'w', 'b', 'a')) | |
if response == 'i': | |
create_archive = False | |
elif response == 'w': | |
logger.warning('Deleting %s', display_path(archive_path)) | |
os.remove(archive_path) | |
elif response == 'b': | |
dest_file = backup_dir(archive_path) | |
logger.warning( | |
'Backing up %s to %s', | |
display_path(archive_path), | |
display_path(dest_file), | |
) | |
shutil.move(archive_path, dest_file) | |
elif response == 'a': | |
sys.exit(-1) | |
if create_archive: | |
zip = zipfile.ZipFile( | |
archive_path, 'w', zipfile.ZIP_DEFLATED, | |
allowZip64=True | |
) | |
dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) | |
for dirpath, dirnames, filenames in os.walk(dir): | |
if 'pip-egg-info' in dirnames: | |
dirnames.remove('pip-egg-info') | |
for dirname in dirnames: | |
dirname = os.path.join(dirpath, dirname) | |
name = self._clean_zip_name(dirname, dir) | |
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') | |
zipdir.external_attr = 0x1ED << 16 # 0o755 | |
zip.writestr(zipdir, '') | |
for filename in filenames: | |
if filename == PIP_DELETE_MARKER_FILENAME: | |
continue | |
filename = os.path.join(dirpath, filename) | |
name = self._clean_zip_name(filename, dir) | |
zip.write(filename, self.name + '/' + name) | |
zip.close() | |
logger.info('Saved %s', display_path(archive_path)) | |
def _clean_zip_name(self, name, prefix): | |
assert name.startswith(prefix + os.path.sep), ( | |
"name %r doesn't start with prefix %r" % (name, prefix) | |
) | |
name = name[len(prefix) + 1:] | |
name = name.replace(os.path.sep, '/') | |
return name | |
def match_markers(self, extras_requested=None): | |
if not extras_requested: | |
# Provide an extra to safely evaluate the markers | |
# without matching any extra | |
extras_requested = ('',) | |
if self.markers is not None: | |
return any( | |
self.markers.evaluate({'extra': extra}) | |
for extra in extras_requested) | |
else: | |
return True | |
def install(self, install_options, global_options=None, root=None, | |
home=None, prefix=None, warn_script_location=True, | |
use_user_site=False, pycompile=True): | |
global_options = global_options if global_options is not None else [] | |
if self.editable: | |
self.install_editable( | |
install_options, global_options, prefix=prefix, | |
) | |
return | |
if self.is_wheel: | |
version = wheel.wheel_version(self.source_dir) | |
wheel.check_compatibility(version, self.name) | |
self.move_wheel_files( | |
self.source_dir, root=root, prefix=prefix, home=home, | |
warn_script_location=warn_script_location, | |
use_user_site=use_user_site, pycompile=pycompile, | |
) | |
self.install_succeeded = True | |
return | |
# Extend the list of global and install options passed on to | |
# the setup.py call with the ones from the requirements file. | |
# Options specified in requirements file override those | |
# specified on the command line, since the last option given | |
# to setup.py is the one that is used. | |
global_options = list(global_options) + \ | |
self.options.get('global_options', []) | |
install_options = list(install_options) + \ | |
self.options.get('install_options', []) | |
if self.isolated: | |
global_options = global_options + ["--no-user-cfg"] | |
with TempDirectory(kind="record") as temp_dir: | |
record_filename = os.path.join(temp_dir.path, 'install-record.txt') | |
install_args = self.get_install_args( | |
global_options, record_filename, root, prefix, pycompile, | |
) | |
msg = 'Running setup.py install for %s' % (self.name,) | |
with open_spinner(msg) as spinner: | |
with indent_log(): | |
with self.build_env: | |
call_subprocess( | |
install_args + install_options, | |
cwd=self.setup_py_dir, | |
show_stdout=False, | |
spinner=spinner, | |
) | |
if not os.path.exists(record_filename): | |
logger.debug('Record file %s not found', record_filename) | |
return | |
self.install_succeeded = True | |
def prepend_root(path): | |
if root is None or not os.path.isabs(path): | |
return path | |
else: | |
return change_root(root, path) | |
with open(record_filename) as f: | |
for line in f: | |
directory = os.path.dirname(line) | |
if directory.endswith('.egg-info'): | |
egg_info_dir = prepend_root(directory) | |
break | |
else: | |
logger.warning( | |
'Could not find .egg-info directory in install record' | |
' for %s', | |
self, | |
) | |
# FIXME: put the record somewhere | |
# FIXME: should this be an error? | |
return | |
new_lines = [] | |
with open(record_filename) as f: | |
for line in f: | |
filename = line.strip() | |
if os.path.isdir(filename): | |
filename += os.path.sep | |
new_lines.append( | |
os.path.relpath(prepend_root(filename), egg_info_dir) | |
) | |
new_lines.sort() | |
ensure_dir(egg_info_dir) | |
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') | |
with open(inst_files_path, 'w') as f: | |
f.write('\n'.join(new_lines) + '\n') | |
def ensure_has_source_dir(self, parent_dir): | |
"""Ensure that a source_dir is set. | |
This will create a temporary build dir if the name of the requirement | |
isn't known yet. | |
:param parent_dir: The ideal pip parent_dir for the source_dir. | |
Generally src_dir for editables and build_dir for sdists. | |
:return: self.source_dir | |
""" | |
if self.source_dir is None: | |
self.source_dir = self.build_location(parent_dir) | |
return self.source_dir | |
def get_install_args(self, global_options, record_filename, root, prefix, | |
pycompile): | |
install_args = [sys.executable, "-u"] | |
install_args.append('-c') | |
install_args.append(SETUPTOOLS_SHIM % self.setup_py) | |
install_args += list(global_options) + \ | |
['install', '--record', record_filename] | |
install_args += ['--single-version-externally-managed'] | |
if root is not None: | |
install_args += ['--root', root] | |
if prefix is not None: | |
install_args += ['--prefix', prefix] | |
if pycompile: | |
install_args += ["--compile"] | |
else: | |
install_args += ["--no-compile"] | |
if running_under_virtualenv(): | |
py_ver_str = 'python' + sysconfig.get_python_version() | |
install_args += ['--install-headers', | |
os.path.join(sys.prefix, 'include', 'site', | |
py_ver_str, self.name)] | |
return install_args | |
def remove_temporary_source(self): | |
"""Remove the source files from this requirement, if they are marked | |
for deletion""" | |
if self.source_dir and os.path.exists( | |
os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): | |
logger.debug('Removing source in %s', self.source_dir) | |
rmtree(self.source_dir) | |
self.source_dir = None | |
self._temp_build_dir.cleanup() | |
self.build_env.cleanup() | |
def install_editable(self, install_options, | |
global_options=(), prefix=None): | |
logger.info('Running setup.py develop for %s', self.name) | |
if self.isolated: | |
global_options = list(global_options) + ["--no-user-cfg"] | |
if prefix: | |
prefix_param = ['--prefix={}'.format(prefix)] | |
install_options = list(install_options) + prefix_param | |
with indent_log(): | |
# FIXME: should we do --install-headers here too? | |
with self.build_env: | |
call_subprocess( | |
[ | |
sys.executable, | |
'-c', | |
SETUPTOOLS_SHIM % self.setup_py | |
] + | |
list(global_options) + | |
['develop', '--no-deps'] + | |
list(install_options), | |
cwd=self.setup_py_dir, | |
show_stdout=False, | |
) | |
self.install_succeeded = True | |
def check_if_exists(self, use_user_site): | |
"""Find an installed distribution that satisfies or conflicts | |
with this requirement, and set self.satisfied_by or | |
self.conflicts_with appropriately. | |
""" | |
if self.req is None: | |
return False | |
try: | |
# get_distribution() will resolve the entire list of requirements | |
# anyway, and we've already determined that we need the requirement | |
# in question, so strip the marker so that we don't try to | |
# evaluate it. | |
no_marker = Requirement(str(self.req)) | |
no_marker.marker = None | |
self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) | |
if self.editable and self.satisfied_by: | |
self.conflicts_with = self.satisfied_by | |
# when installing editables, nothing pre-existing should ever | |
# satisfy | |
self.satisfied_by = None | |
return True | |
except pkg_resources.DistributionNotFound: | |
return False | |
except pkg_resources.VersionConflict: | |
existing_dist = pkg_resources.get_distribution( | |
self.req.name | |
) | |
if use_user_site: | |
if dist_in_usersite(existing_dist): | |
self.conflicts_with = existing_dist | |
elif (running_under_virtualenv() and | |
dist_in_site_packages(existing_dist)): | |
raise InstallationError( | |
"Will not install to the user site because it will " | |
"lack sys.path precedence to %s in %s" % | |
(existing_dist.project_name, existing_dist.location) | |
) | |
else: | |
self.conflicts_with = existing_dist | |
return True | |
@property | |
def is_wheel(self): | |
return self.link and self.link.is_wheel | |
def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, | |
warn_script_location=True, use_user_site=False, | |
pycompile=True): | |
move_wheel_files( | |
self.name, self.req, wheeldir, | |
user=use_user_site, | |
home=home, | |
root=root, | |
prefix=prefix, | |
pycompile=pycompile, | |
isolated=self.isolated, | |
warn_script_location=warn_script_location, | |
) | |
def get_dist(self): | |
"""Return a pkg_resources.Distribution built from self.egg_info_path""" | |
egg_info = self.egg_info_path('').rstrip(os.path.sep) | |
base_dir = os.path.dirname(egg_info) | |
metadata = pkg_resources.PathMetadata(base_dir, egg_info) | |
dist_name = os.path.splitext(os.path.basename(egg_info))[0] | |
return pkg_resources.Distribution( | |
os.path.dirname(egg_info), | |
project_name=dist_name, | |
metadata=metadata, | |
) | |
@property | |
def has_hash_options(self): | |
"""Return whether any known-good hashes are specified as options. | |
These activate --require-hashes mode; hashes specified as part of a | |
URL do not. | |
""" | |
return bool(self.options.get('hashes', {})) | |
def hashes(self, trust_internet=True): | |
"""Return a hash-comparer that considers my option- and URL-based | |
hashes to be known-good. | |
Hashes in URLs--ones embedded in the requirements file, not ones | |
downloaded from an index server--are almost peers with ones from | |
flags. They satisfy --require-hashes (whether it was implicitly or | |
explicitly activated) but do not activate it. md5 and sha224 are not | |
allowed in flags, which should nudge people toward good algos. We | |
always OR all hashes together, even ones from URLs. | |
:param trust_internet: Whether to trust URL-based (#md5=...) hashes | |
downloaded from the internet, as by populate_link() | |
""" | |
good_hashes = self.options.get('hashes', {}).copy() | |
link = self.link if trust_internet else self.original_link | |
if link and link.hash: | |
good_hashes.setdefault(link.hash_name, []).append(link.hash) | |
return Hashes(good_hashes) | |
def _strip_postfix(req): | |
""" | |
Strip req postfix ( -dev, 0.2, etc ) | |
""" | |
# FIXME: use package_to_requirement? | |
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) | |
if match: | |
# Strip off -dev, -0.2, etc. | |
warnings.warn( | |
"#egg cleanup for editable urls will be dropped in the future", | |
RemovedInPip11Warning, | |
) | |
req = match.group(1) | |
return req | |
def parse_editable(editable_req): | |
"""Parses an editable requirement into: | |
- a requirement name | |
- an URL | |
- extras | |
- editable options | |
Accepted requirements: | |
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir | |
.[some_extra] | |
""" | |
from pip._internal.index import Link | |
url = editable_req | |
# If a file path is specified with extras, strip off the extras. | |
url_no_extras, extras = _strip_extras(url) | |
if os.path.isdir(url_no_extras): | |
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): | |
raise InstallationError( | |
"Directory %r is not installable. File 'setup.py' not found." % | |
url_no_extras | |
) | |
# Treating it as code that has already been checked out | |
url_no_extras = path_to_url(url_no_extras) | |
if url_no_extras.lower().startswith('file:'): | |
package_name = Link(url_no_extras).egg_fragment | |
if extras: | |
return ( | |
package_name, | |
url_no_extras, | |
Requirement("placeholder" + extras.lower()).extras, | |
) | |
else: | |
return package_name, url_no_extras, None | |
for version_control in vcs: | |
if url.lower().startswith('%s:' % version_control): | |
url = '%s+%s' % (version_control, url) | |
break | |
if '+' not in url: | |
raise InstallationError( | |
'%s should either be a path to a local project or a VCS url ' | |
'beginning with svn+, git+, hg+, or bzr+' % | |
editable_req | |
) | |
vc_type = url.split('+', 1)[0].lower() | |
if not vcs.get_backend(vc_type): | |
error_message = 'For --editable=%s only ' % editable_req + \ | |
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ | |
' is currently supported' | |
raise InstallationError(error_message) | |
package_name = Link(url).egg_fragment | |
if not package_name: | |
raise InstallationError( | |
"Could not detect requirement name for '%s', please specify one " | |
"with #egg=your_package_name" % editable_req | |
) | |
return _strip_postfix(package_name), url, None | |
def deduce_helpful_msg(req): | |
"""Returns helpful msg in case requirements file does not exist, | |
or cannot be parsed. | |
:params req: Requirements file path | |
""" | |
msg = "" | |
if os.path.exists(req): | |
msg = " It does exist." | |
# Try to parse and check if it is a requirements file. | |
try: | |
with open(req, 'r') as fp: | |
# parse first line only | |
next(parse_requirements(fp.read())) | |
msg += " The argument you provided " + \ | |
"(%s) appears to be a" % (req) + \ | |
" requirements file. If that is the" + \ | |
" case, use the '-r' flag to install" + \ | |
" the packages specified within it." | |
except RequirementParseError: | |
logger.debug("Cannot parse '%s' as requirements \ | |
file" % (req), exc_info=1) | |
else: | |
msg += " File '%s' does not exist." % (req) | |
return msg |
from __future__ import absolute_import | |
import logging | |
from collections import OrderedDict | |
from pip._internal.exceptions import InstallationError | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.wheel import Wheel | |
logger = logging.getLogger(__name__) | |
class RequirementSet(object): | |
def __init__(self, require_hashes=False): | |
"""Create a RequirementSet. | |
:param wheel_cache: The pip wheel cache, for passing to | |
InstallRequirement. | |
""" | |
self.requirements = OrderedDict() | |
self.require_hashes = require_hashes | |
# Mapping of alias: real_name | |
self.requirement_aliases = {} | |
self.unnamed_requirements = [] | |
self.successfully_downloaded = [] | |
self.reqs_to_cleanup = [] | |
def __str__(self): | |
reqs = [req for req in self.requirements.values() | |
if not req.comes_from] | |
reqs.sort(key=lambda req: req.name.lower()) | |
return ' '.join([str(req.req) for req in reqs]) | |
def __repr__(self): | |
reqs = [req for req in self.requirements.values()] | |
reqs.sort(key=lambda req: req.name.lower()) | |
reqs_str = ', '.join([str(req.req) for req in reqs]) | |
return ('<%s object; %d requirement(s): %s>' | |
% (self.__class__.__name__, len(reqs), reqs_str)) | |
def add_requirement(self, install_req, parent_req_name=None, | |
extras_requested=None): | |
"""Add install_req as a requirement to install. | |
:param parent_req_name: The name of the requirement that needed this | |
added. The name is used because when multiple unnamed requirements | |
resolve to the same name, we could otherwise end up with dependency | |
links that point outside the Requirements set. parent_req must | |
already be added. Note that None implies that this is a user | |
supplied requirement, vs an inferred one. | |
:param extras_requested: an iterable of extras used to evaluate the | |
environment markers. | |
:return: Additional requirements to scan. That is either [] if | |
the requirement is not applicable, or [install_req] if the | |
requirement is applicable and has just been added. | |
""" | |
name = install_req.name | |
if not install_req.match_markers(extras_requested): | |
logger.info("Ignoring %s: markers '%s' don't match your " | |
"environment", install_req.name, | |
install_req.markers) | |
return [], None | |
# This check has to come after we filter requirements with the | |
# environment markers. | |
if install_req.link and install_req.link.is_wheel: | |
wheel = Wheel(install_req.link.filename) | |
if not wheel.supported(): | |
raise InstallationError( | |
"%s is not a supported wheel on this platform." % | |
wheel.filename | |
) | |
# This next bit is really a sanity check. | |
assert install_req.is_direct == (parent_req_name is None), ( | |
"a direct req shouldn't have a parent and also, " | |
"a non direct req should have a parent" | |
) | |
if not name: | |
# url or path requirement w/o an egg fragment | |
self.unnamed_requirements.append(install_req) | |
return [install_req], None | |
else: | |
try: | |
existing_req = self.get_requirement(name) | |
except KeyError: | |
existing_req = None | |
if (parent_req_name is None and existing_req and not | |
existing_req.constraint and | |
existing_req.extras == install_req.extras and not | |
existing_req.req.specifier == install_req.req.specifier): | |
raise InstallationError( | |
'Double requirement given: %s (already in %s, name=%r)' | |
% (install_req, existing_req, name)) | |
if not existing_req: | |
# Add requirement | |
self.requirements[name] = install_req | |
# FIXME: what about other normalizations? E.g., _ vs. -? | |
if name.lower() != name: | |
self.requirement_aliases[name.lower()] = name | |
result = [install_req] | |
else: | |
# Assume there's no need to scan, and that we've already | |
# encountered this for scanning. | |
result = [] | |
if not install_req.constraint and existing_req.constraint: | |
if (install_req.link and not (existing_req.link and | |
install_req.link.path == existing_req.link.path)): | |
self.reqs_to_cleanup.append(install_req) | |
raise InstallationError( | |
"Could not satisfy constraints for '%s': " | |
"installation from path or url cannot be " | |
"constrained to a version" % name, | |
) | |
# If we're now installing a constraint, mark the existing | |
# object for real installation. | |
existing_req.constraint = False | |
existing_req.extras = tuple( | |
sorted(set(existing_req.extras).union( | |
set(install_req.extras)))) | |
logger.debug("Setting %s extras to: %s", | |
existing_req, existing_req.extras) | |
# And now we need to scan this. | |
result = [existing_req] | |
# Canonicalise to the already-added object for the backref | |
# check below. | |
install_req = existing_req | |
# We return install_req here to allow for the caller to add it to | |
# the dependency information for the parent package. | |
return result, install_req | |
def has_requirement(self, project_name): | |
name = project_name.lower() | |
if (name in self.requirements and | |
not self.requirements[name].constraint or | |
name in self.requirement_aliases and | |
not self.requirements[self.requirement_aliases[name]].constraint): | |
return True | |
return False | |
@property | |
def has_requirements(self): | |
return list(req for req in self.requirements.values() if not | |
req.constraint) or self.unnamed_requirements | |
def get_requirement(self, project_name): | |
for name in project_name, project_name.lower(): | |
if name in self.requirements: | |
return self.requirements[name] | |
if name in self.requirement_aliases: | |
return self.requirements[self.requirement_aliases[name]] | |
raise KeyError("No project with the name %r" % project_name) | |
def cleanup_files(self): | |
"""Clean up files, remove builds.""" | |
logger.debug('Cleaning up...') | |
with indent_log(): | |
for req in self.reqs_to_cleanup: | |
req.remove_temporary_source() |
from __future__ import absolute_import | |
import csv | |
import functools | |
import logging | |
import os | |
import sys | |
import sysconfig | |
from pip._vendor import pkg_resources | |
from pip._internal.compat import WINDOWS, cache_from_source, uses_pycache | |
from pip._internal.exceptions import UninstallationError | |
from pip._internal.locations import bin_py, bin_user | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local, | |
normalize_path, renames, | |
) | |
from pip._internal.utils.temp_dir import TempDirectory | |
logger = logging.getLogger(__name__) | |
def _script_names(dist, script_name, is_gui): | |
"""Create the fully qualified name of the files created by | |
{console,gui}_scripts for the given ``dist``. | |
Returns the list of file names | |
""" | |
if dist_in_usersite(dist): | |
bin_dir = bin_user | |
else: | |
bin_dir = bin_py | |
exe_name = os.path.join(bin_dir, script_name) | |
paths_to_remove = [exe_name] | |
if WINDOWS: | |
paths_to_remove.append(exe_name + '.exe') | |
paths_to_remove.append(exe_name + '.exe.manifest') | |
if is_gui: | |
paths_to_remove.append(exe_name + '-script.pyw') | |
else: | |
paths_to_remove.append(exe_name + '-script.py') | |
return paths_to_remove | |
def _unique(fn): | |
@functools.wraps(fn) | |
def unique(*args, **kw): | |
seen = set() | |
for item in fn(*args, **kw): | |
if item not in seen: | |
seen.add(item) | |
yield item | |
return unique | |
@_unique | |
def uninstallation_paths(dist): | |
""" | |
Yield all the uninstallation paths for dist based on RECORD-without-.pyc | |
Yield paths to all the files in RECORD. For each .py file in RECORD, add | |
the .pyc in the same directory. | |
UninstallPathSet.add() takes care of the __pycache__ .pyc. | |
""" | |
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) | |
for row in r: | |
path = os.path.join(dist.location, row[0]) | |
yield path | |
if path.endswith('.py'): | |
dn, fn = os.path.split(path) | |
base = fn[:-3] | |
path = os.path.join(dn, base + '.pyc') | |
yield path | |
def compact(paths): | |
"""Compact a path set to contain the minimal number of paths | |
necessary to contain all paths in the set. If /a/path/ and | |
/a/path/to/a/file.txt are both in the set, leave only the | |
shorter path.""" | |
sep = os.path.sep | |
short_paths = set() | |
for path in sorted(paths, key=len): | |
should_add = any( | |
path.startswith(shortpath.rstrip("*")) and | |
path[len(shortpath.rstrip("*").rstrip(sep))] == sep | |
for shortpath in short_paths | |
) | |
if not should_add: | |
short_paths.add(path) | |
return short_paths | |
def compress_for_output_listing(paths): | |
"""Returns a tuple of 2 sets of which paths to display to user | |
The first set contains paths that would be deleted. Files of a package | |
are not added and the top-level directory of the package has a '*' added | |
at the end - to signify that all it's contents are removed. | |
The second set contains files that would have been skipped in the above | |
folders. | |
""" | |
will_remove = list(paths) | |
will_skip = set() | |
# Determine folders and files | |
folders = set() | |
files = set() | |
for path in will_remove: | |
if path.endswith(".pyc"): | |
continue | |
if path.endswith("__init__.py") or ".dist-info" in path: | |
folders.add(os.path.dirname(path)) | |
files.add(path) | |
folders = compact(folders) | |
# This walks the tree using os.walk to not miss extra folders | |
# that might get added. | |
for folder in folders: | |
for dirpath, _, dirfiles in os.walk(folder): | |
for fname in dirfiles: | |
if fname.endswith(".pyc"): | |
continue | |
file_ = os.path.normcase(os.path.join(dirpath, fname)) | |
if os.path.isfile(file_) and file_ not in files: | |
# We are skipping this file. Add it to the set. | |
will_skip.add(file_) | |
will_remove = files | { | |
os.path.join(folder, "*") for folder in folders | |
} | |
return will_remove, will_skip | |
class UninstallPathSet(object): | |
"""A set of file paths to be removed in the uninstallation of a | |
requirement.""" | |
def __init__(self, dist): | |
self.paths = set() | |
self._refuse = set() | |
self.pth = {} | |
self.dist = dist | |
self.save_dir = TempDirectory(kind="uninstall") | |
self._moved_paths = [] | |
def _permitted(self, path): | |
""" | |
Return True if the given path is one we are permitted to | |
remove/modify, False otherwise. | |
""" | |
return is_local(path) | |
def add(self, path): | |
head, tail = os.path.split(path) | |
# we normalize the head to resolve parent directory symlinks, but not | |
# the tail, since we only want to uninstall symlinks, not their targets | |
path = os.path.join(normalize_path(head), os.path.normcase(tail)) | |
if not os.path.exists(path): | |
return | |
if self._permitted(path): | |
self.paths.add(path) | |
else: | |
self._refuse.add(path) | |
# __pycache__ files can show up after 'installed-files.txt' is created, | |
# due to imports | |
if os.path.splitext(path)[1] == '.py' and uses_pycache: | |
self.add(cache_from_source(path)) | |
def add_pth(self, pth_file, entry): | |
pth_file = normalize_path(pth_file) | |
if self._permitted(pth_file): | |
if pth_file not in self.pth: | |
self.pth[pth_file] = UninstallPthEntries(pth_file) | |
self.pth[pth_file].add(entry) | |
else: | |
self._refuse.add(pth_file) | |
def _stash(self, path): | |
return os.path.join( | |
self.save_dir.path, os.path.splitdrive(path)[1].lstrip(os.path.sep) | |
) | |
def remove(self, auto_confirm=False, verbose=False): | |
"""Remove paths in ``self.paths`` with confirmation (unless | |
``auto_confirm`` is True).""" | |
if not self.paths: | |
logger.info( | |
"Can't uninstall '%s'. No files were found to uninstall.", | |
self.dist.project_name, | |
) | |
return | |
dist_name_version = ( | |
self.dist.project_name + "-" + self.dist.version | |
) | |
logger.info('Uninstalling %s:', dist_name_version) | |
with indent_log(): | |
if auto_confirm or self._allowed_to_proceed(verbose): | |
self.save_dir.create() | |
for path in sorted(compact(self.paths)): | |
new_path = self._stash(path) | |
logger.debug('Removing file or directory %s', path) | |
self._moved_paths.append(path) | |
renames(path, new_path) | |
for pth in self.pth.values(): | |
pth.remove() | |
logger.info('Successfully uninstalled %s', dist_name_version) | |
def _allowed_to_proceed(self, verbose): | |
"""Display which files would be deleted and prompt for confirmation | |
""" | |
def _display(msg, paths): | |
if not paths: | |
return | |
logger.info(msg) | |
with indent_log(): | |
for path in sorted(compact(paths)): | |
logger.info(path) | |
if not verbose: | |
will_remove, will_skip = compress_for_output_listing(self.paths) | |
else: | |
# In verbose mode, display all the files that are going to be | |
# deleted. | |
will_remove = list(self.paths) | |
will_skip = set() | |
_display('Would remove:', will_remove) | |
_display('Would not remove (might be manually added):', will_skip) | |
_display('Would not remove (outside of prefix):', self._refuse) | |
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' | |
def rollback(self): | |
"""Rollback the changes previously made by remove().""" | |
if self.save_dir.path is None: | |
logger.error( | |
"Can't roll back %s; was not uninstalled", | |
self.dist.project_name, | |
) | |
return False | |
logger.info('Rolling back uninstall of %s', self.dist.project_name) | |
for path in self._moved_paths: | |
tmp_path = self._stash(path) | |
logger.debug('Replacing %s', path) | |
renames(tmp_path, path) | |
for pth in self.pth.values(): | |
pth.rollback() | |
def commit(self): | |
"""Remove temporary save dir: rollback will no longer be possible.""" | |
self.save_dir.cleanup() | |
self._moved_paths = [] | |
@classmethod | |
def from_dist(cls, dist): | |
dist_path = normalize_path(dist.location) | |
if not dist_is_local(dist): | |
logger.info( | |
"Not uninstalling %s at %s, outside environment %s", | |
dist.key, | |
dist_path, | |
sys.prefix, | |
) | |
return cls(dist) | |
if dist_path in {p for p in {sysconfig.get_path("stdlib"), | |
sysconfig.get_path("platstdlib")} | |
if p}: | |
logger.info( | |
"Not uninstalling %s at %s, as it is in the standard library.", | |
dist.key, | |
dist_path, | |
) | |
return cls(dist) | |
paths_to_remove = cls(dist) | |
develop_egg_link = egg_link_path(dist) | |
develop_egg_link_egg_info = '{}.egg-info'.format( | |
pkg_resources.to_filename(dist.project_name)) | |
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) | |
# Special case for distutils installed package | |
distutils_egg_info = getattr(dist._provider, 'path', None) | |
# Uninstall cases order do matter as in the case of 2 installs of the | |
# same package, pip needs to uninstall the currently detected version | |
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and | |
not dist.egg_info.endswith(develop_egg_link_egg_info)): | |
# if dist.egg_info.endswith(develop_egg_link_egg_info), we | |
# are in fact in the develop_egg_link case | |
paths_to_remove.add(dist.egg_info) | |
if dist.has_metadata('installed-files.txt'): | |
for installed_file in dist.get_metadata( | |
'installed-files.txt').splitlines(): | |
path = os.path.normpath( | |
os.path.join(dist.egg_info, installed_file) | |
) | |
paths_to_remove.add(path) | |
# FIXME: need a test for this elif block | |
# occurs with --single-version-externally-managed/--record outside | |
# of pip | |
elif dist.has_metadata('top_level.txt'): | |
if dist.has_metadata('namespace_packages.txt'): | |
namespaces = dist.get_metadata('namespace_packages.txt') | |
else: | |
namespaces = [] | |
for top_level_pkg in [ | |
p for p | |
in dist.get_metadata('top_level.txt').splitlines() | |
if p and p not in namespaces]: | |
path = os.path.join(dist.location, top_level_pkg) | |
paths_to_remove.add(path) | |
paths_to_remove.add(path + '.py') | |
paths_to_remove.add(path + '.pyc') | |
paths_to_remove.add(path + '.pyo') | |
elif distutils_egg_info: | |
raise UninstallationError( | |
"Cannot uninstall {!r}. It is a distutils installed project " | |
"and thus we cannot accurately determine which files belong " | |
"to it which would lead to only a partial uninstall.".format( | |
dist.project_name, | |
) | |
) | |
elif dist.location.endswith('.egg'): | |
# package installed by easy_install | |
# We cannot match on dist.egg_name because it can slightly vary | |
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg | |
paths_to_remove.add(dist.location) | |
easy_install_egg = os.path.split(dist.location)[1] | |
easy_install_pth = os.path.join(os.path.dirname(dist.location), | |
'easy-install.pth') | |
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) | |
elif egg_info_exists and dist.egg_info.endswith('.dist-info'): | |
for path in uninstallation_paths(dist): | |
paths_to_remove.add(path) | |
elif develop_egg_link: | |
# develop egg | |
with open(develop_egg_link, 'r') as fh: | |
link_pointer = os.path.normcase(fh.readline().strip()) | |
assert (link_pointer == dist.location), ( | |
'Egg-link %s does not match installed location of %s ' | |
'(at %s)' % (link_pointer, dist.project_name, dist.location) | |
) | |
paths_to_remove.add(develop_egg_link) | |
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), | |
'easy-install.pth') | |
paths_to_remove.add_pth(easy_install_pth, dist.location) | |
else: | |
logger.debug( | |
'Not sure how to uninstall: %s - Check: %s', | |
dist, dist.location, | |
) | |
# find distutils scripts= scripts | |
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): | |
for script in dist.metadata_listdir('scripts'): | |
if dist_in_usersite(dist): | |
bin_dir = bin_user | |
else: | |
bin_dir = bin_py | |
paths_to_remove.add(os.path.join(bin_dir, script)) | |
if WINDOWS: | |
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') | |
# find console_scripts | |
_scripts_to_remove = [] | |
console_scripts = dist.get_entry_map(group='console_scripts') | |
for name in console_scripts.keys(): | |
_scripts_to_remove.extend(_script_names(dist, name, False)) | |
# find gui_scripts | |
gui_scripts = dist.get_entry_map(group='gui_scripts') | |
for name in gui_scripts.keys(): | |
_scripts_to_remove.extend(_script_names(dist, name, True)) | |
for s in _scripts_to_remove: | |
paths_to_remove.add(s) | |
return paths_to_remove | |
class UninstallPthEntries(object): | |
def __init__(self, pth_file): | |
if not os.path.isfile(pth_file): | |
raise UninstallationError( | |
"Cannot remove entries from nonexistent file %s" % pth_file | |
) | |
self.file = pth_file | |
self.entries = set() | |
self._saved_lines = None | |
def add(self, entry): | |
entry = os.path.normcase(entry) | |
# On Windows, os.path.normcase converts the entry to use | |
# backslashes. This is correct for entries that describe absolute | |
# paths outside of site-packages, but all the others use forward | |
# slashes. | |
if WINDOWS and not os.path.splitdrive(entry)[0]: | |
entry = entry.replace('\\', '/') | |
self.entries.add(entry) | |
def remove(self): | |
logger.debug('Removing pth entries from %s:', self.file) | |
with open(self.file, 'rb') as fh: | |
# windows uses '\r\n' with py3k, but uses '\n' with py2.x | |
lines = fh.readlines() | |
self._saved_lines = lines | |
if any(b'\r\n' in line for line in lines): | |
endline = '\r\n' | |
else: | |
endline = '\n' | |
# handle missing trailing newline | |
if lines and not lines[-1].endswith(endline.encode("utf-8")): | |
lines[-1] = lines[-1] + endline.encode("utf-8") | |
for entry in self.entries: | |
try: | |
logger.debug('Removing entry: %s', entry) | |
lines.remove((entry + endline).encode("utf-8")) | |
except ValueError: | |
pass | |
with open(self.file, 'wb') as fh: | |
fh.writelines(lines) | |
def rollback(self): | |
if self._saved_lines is None: | |
logger.error( | |
'Cannot roll back changes to %s, none were made', self.file | |
) | |
return False | |
logger.debug('Rolling %s back to previous state', self.file) | |
with open(self.file, 'wb') as fh: | |
fh.writelines(self._saved_lines) | |
return True |
"""Dependency Resolution | |
The dependency resolution in pip is performed as follows: | |
for top-level requirements: | |
a. only one spec allowed per project, regardless of conflicts or not. | |
otherwise a "double requirement" exception is raised | |
b. they override sub-dependency requirements. | |
for sub-dependencies | |
a. "first found, wins" (where the order is breadth first) | |
""" | |
import logging | |
from collections import defaultdict | |
from itertools import chain | |
from pip._internal.exceptions import ( | |
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, | |
UnsupportedPythonVersion, | |
) | |
from pip._internal.req.req_install import InstallRequirement | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import dist_in_usersite, ensure_dir | |
from pip._internal.utils.packaging import check_dist_requires_python | |
logger = logging.getLogger(__name__) | |
class Resolver(object): | |
"""Resolves which packages need to be installed/uninstalled to perform \ | |
the requested operation without breaking the requirements of any package. | |
""" | |
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} | |
def __init__(self, preparer, session, finder, wheel_cache, use_user_site, | |
ignore_dependencies, ignore_installed, ignore_requires_python, | |
force_reinstall, isolated, upgrade_strategy): | |
super(Resolver, self).__init__() | |
assert upgrade_strategy in self._allowed_strategies | |
self.preparer = preparer | |
self.finder = finder | |
self.session = session | |
# NOTE: This would eventually be replaced with a cache that can give | |
# information about both sdist and wheels transparently. | |
self.wheel_cache = wheel_cache | |
self.require_hashes = None # This is set in resolve | |
self.upgrade_strategy = upgrade_strategy | |
self.force_reinstall = force_reinstall | |
self.isolated = isolated | |
self.ignore_dependencies = ignore_dependencies | |
self.ignore_installed = ignore_installed | |
self.ignore_requires_python = ignore_requires_python | |
self.use_user_site = use_user_site | |
self._discovered_dependencies = defaultdict(list) | |
def resolve(self, requirement_set): | |
"""Resolve what operations need to be done | |
As a side-effect of this method, the packages (and their dependencies) | |
are downloaded, unpacked and prepared for installation. This | |
preparation is done by ``pip.operations.prepare``. | |
Once PyPI has static dependency metadata available, it would be | |
possible to move the preparation to become a step separated from | |
dependency resolution. | |
""" | |
# make the wheelhouse | |
if self.preparer.wheel_download_dir: | |
ensure_dir(self.preparer.wheel_download_dir) | |
# If any top-level requirement has a hash specified, enter | |
# hash-checking mode, which requires hashes from all. | |
root_reqs = ( | |
requirement_set.unnamed_requirements + | |
list(requirement_set.requirements.values()) | |
) | |
self.require_hashes = ( | |
requirement_set.require_hashes or | |
any(req.has_hash_options for req in root_reqs) | |
) | |
# Display where finder is looking for packages | |
locations = self.finder.get_formatted_locations() | |
if locations: | |
logger.info(locations) | |
# Actually prepare the files, and collect any exceptions. Most hash | |
# exceptions cannot be checked ahead of time, because | |
# req.populate_link() needs to be called before we can make decisions | |
# based on link type. | |
discovered_reqs = [] | |
hash_errors = HashErrors() | |
for req in chain(root_reqs, discovered_reqs): | |
try: | |
discovered_reqs.extend( | |
self._resolve_one(requirement_set, req) | |
) | |
except HashError as exc: | |
exc.req = req | |
hash_errors.append(exc) | |
if hash_errors: | |
raise hash_errors | |
def _is_upgrade_allowed(self, req): | |
if self.upgrade_strategy == "to-satisfy-only": | |
return False | |
elif self.upgrade_strategy == "eager": | |
return True | |
else: | |
assert self.upgrade_strategy == "only-if-needed" | |
return req.is_direct | |
def _set_req_to_reinstall(self, req): | |
""" | |
Set a requirement to be installed. | |
""" | |
# Don't uninstall the conflict if doing a user install and the | |
# conflict is not a user install. | |
if not self.use_user_site or dist_in_usersite(req.satisfied_by): | |
req.conflicts_with = req.satisfied_by | |
req.satisfied_by = None | |
# XXX: Stop passing requirement_set for options | |
def _check_skip_installed(self, req_to_install): | |
"""Check if req_to_install should be skipped. | |
This will check if the req is installed, and whether we should upgrade | |
or reinstall it, taking into account all the relevant user options. | |
After calling this req_to_install will only have satisfied_by set to | |
None if the req_to_install is to be upgraded/reinstalled etc. Any | |
other value will be a dist recording the current thing installed that | |
satisfies the requirement. | |
Note that for vcs urls and the like we can't assess skipping in this | |
routine - we simply identify that we need to pull the thing down, | |
then later on it is pulled down and introspected to assess upgrade/ | |
reinstalls etc. | |
:return: A text reason for why it was skipped, or None. | |
""" | |
if self.ignore_installed: | |
return None | |
req_to_install.check_if_exists(self.use_user_site) | |
if not req_to_install.satisfied_by: | |
return None | |
if self.force_reinstall: | |
self._set_req_to_reinstall(req_to_install) | |
return None | |
if not self._is_upgrade_allowed(req_to_install): | |
if self.upgrade_strategy == "only-if-needed": | |
return 'not upgraded as not directly required' | |
return 'already satisfied' | |
# Check for the possibility of an upgrade. For link-based | |
# requirements we have to pull the tree down and inspect to assess | |
# the version #, so it's handled way down. | |
if not req_to_install.link: | |
try: | |
self.finder.find_requirement(req_to_install, upgrade=True) | |
except BestVersionAlreadyInstalled: | |
# Then the best version is installed. | |
return 'already up-to-date' | |
except DistributionNotFound: | |
# No distribution found, so we squash the error. It will | |
# be raised later when we re-try later to do the install. | |
# Why don't we just raise here? | |
pass | |
self._set_req_to_reinstall(req_to_install) | |
return None | |
def _get_abstract_dist_for(self, req): | |
"""Takes a InstallRequirement and returns a single AbstractDist \ | |
representing a prepared variant of the same. | |
""" | |
assert self.require_hashes is not None, ( | |
"require_hashes should have been set in Resolver.resolve()" | |
) | |
if req.editable: | |
return self.preparer.prepare_editable_requirement( | |
req, self.require_hashes, self.use_user_site, self.finder, | |
) | |
# satisfied_by is only evaluated by calling _check_skip_installed, | |
# so it must be None here. | |
assert req.satisfied_by is None | |
skip_reason = self._check_skip_installed(req) | |
if req.satisfied_by: | |
return self.preparer.prepare_installed_requirement( | |
req, self.require_hashes, skip_reason | |
) | |
upgrade_allowed = self._is_upgrade_allowed(req) | |
abstract_dist = self.preparer.prepare_linked_requirement( | |
req, self.session, self.finder, upgrade_allowed, | |
self.require_hashes | |
) | |
# NOTE | |
# The following portion is for determining if a certain package is | |
# going to be re-installed/upgraded or not and reporting to the user. | |
# This should probably get cleaned up in a future refactor. | |
# req.req is only avail after unpack for URL | |
# pkgs repeat check_if_exists to uninstall-on-upgrade | |
# (#14) | |
if not self.ignore_installed: | |
req.check_if_exists(self.use_user_site) | |
if req.satisfied_by: | |
should_modify = ( | |
self.upgrade_strategy != "to-satisfy-only" or | |
self.force_reinstall or | |
self.ignore_installed or | |
req.link.scheme == 'file' | |
) | |
if should_modify: | |
self._set_req_to_reinstall(req) | |
else: | |
logger.info( | |
'Requirement already satisfied (use --upgrade to upgrade):' | |
' %s', req, | |
) | |
return abstract_dist | |
def _resolve_one(self, requirement_set, req_to_install): | |
"""Prepare a single requirements file. | |
:return: A list of additional InstallRequirements to also install. | |
""" | |
# Tell user what we are doing for this requirement: | |
# obtain (editable), skipping, processing (local url), collecting | |
# (remote url or package name) | |
if req_to_install.constraint or req_to_install.prepared: | |
return [] | |
req_to_install.prepared = True | |
# register tmp src for cleanup in case something goes wrong | |
requirement_set.reqs_to_cleanup.append(req_to_install) | |
abstract_dist = self._get_abstract_dist_for(req_to_install) | |
# Parse and return dependencies | |
dist = abstract_dist.dist(self.finder) | |
try: | |
check_dist_requires_python(dist) | |
except UnsupportedPythonVersion as err: | |
if self.ignore_requires_python: | |
logger.warning(err.args[0]) | |
else: | |
raise | |
more_reqs = [] | |
def add_req(subreq, extras_requested): | |
sub_install_req = InstallRequirement.from_req( | |
str(subreq), | |
req_to_install, | |
isolated=self.isolated, | |
wheel_cache=self.wheel_cache, | |
) | |
parent_req_name = req_to_install.name | |
to_scan_again, add_to_parent = requirement_set.add_requirement( | |
sub_install_req, | |
parent_req_name=parent_req_name, | |
extras_requested=extras_requested, | |
) | |
if parent_req_name and add_to_parent: | |
self._discovered_dependencies[parent_req_name].append( | |
add_to_parent | |
) | |
more_reqs.extend(to_scan_again) | |
with indent_log(): | |
# We add req_to_install before its dependencies, so that we | |
# can refer to it when adding dependencies. | |
if not requirement_set.has_requirement(req_to_install.name): | |
# 'unnamed' requirements will get added here | |
req_to_install.is_direct = True | |
requirement_set.add_requirement( | |
req_to_install, parent_req_name=None, | |
) | |
if not self.ignore_dependencies: | |
if req_to_install.extras: | |
logger.debug( | |
"Installing extra requirements: %r", | |
','.join(req_to_install.extras), | |
) | |
missing_requested = sorted( | |
set(req_to_install.extras) - set(dist.extras) | |
) | |
for missing in missing_requested: | |
logger.warning( | |
'%s does not provide the extra \'%s\'', | |
dist, missing | |
) | |
available_requested = sorted( | |
set(dist.extras) & set(req_to_install.extras) | |
) | |
for subreq in dist.requires(available_requested): | |
add_req(subreq, extras_requested=available_requested) | |
if not req_to_install.editable and not req_to_install.satisfied_by: | |
# XXX: --no-install leads this to report 'Successfully | |
# downloaded' for only non-editable reqs, even though we took | |
# action on them. | |
requirement_set.successfully_downloaded.append(req_to_install) | |
return more_reqs | |
def get_installation_order(self, req_set): | |
"""Create the installation order. | |
The installation order is topological - requirements are installed | |
before the requiring thing. We break cycles at an arbitrary point, | |
and make no other guarantees. | |
""" | |
# The current implementation, which we may change at any point | |
# installs the user specified things in the order given, except when | |
# dependencies must come earlier to achieve topological order. | |
order = [] | |
ordered_reqs = set() | |
def schedule(req): | |
if req.satisfied_by or req in ordered_reqs: | |
return | |
if req.constraint: | |
return | |
ordered_reqs.add(req) | |
for dep in self._discovered_dependencies[req.name]: | |
schedule(dep) | |
order.append(req) | |
for install_req in req_set.requirements.values(): | |
schedule(install_req) | |
return order |
from __future__ import absolute_import | |
SUCCESS = 0 | |
ERROR = 1 | |
UNKNOWN_ERROR = 2 | |
VIRTUALENV_NOT_FOUND = 3 | |
PREVIOUS_BUILD_DIR_ERROR = 4 | |
NO_MATCHES_FOUND = 23 |
""" | |
This code was taken from https://github.com/ActiveState/appdirs and modified | |
to suit our purposes. | |
""" | |
from __future__ import absolute_import | |
import os | |
import sys | |
from pip._vendor.six import PY2, text_type | |
from pip._internal.compat import WINDOWS, expanduser | |
def user_cache_dir(appname): | |
r""" | |
Return full path to the user-specific cache dir for this application. | |
"appname" is the name of application. | |
Typical user cache directories are: | |
macOS: ~/Library/Caches/<AppName> | |
Unix: ~/.cache/<AppName> (XDG default) | |
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache | |
On Windows the only suggestion in the MSDN docs is that local settings go | |
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the | |
non-roaming app data dir (the default returned by `user_data_dir`). Apps | |
typically put cache data somewhere *under* the given dir here. Some | |
examples: | |
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache | |
...\Acme\SuperApp\Cache\1.0 | |
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. | |
""" | |
if WINDOWS: | |
# Get the base path | |
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) | |
# When using Python 2, return paths as bytes on Windows like we do on | |
# other operating systems. See helper function docs for more details. | |
if PY2 and isinstance(path, text_type): | |
path = _win_path_to_bytes(path) | |
# Add our app name and Cache directory to it | |
path = os.path.join(path, appname, "Cache") | |
elif sys.platform == "darwin": | |
# Get the base path | |
path = expanduser("~/Library/Caches") | |
# Add our app name to it | |
path = os.path.join(path, appname) | |
else: | |
# Get the base path | |
path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) | |
# Add our app name to it | |
path = os.path.join(path, appname) | |
return path | |
def user_data_dir(appname, roaming=False): | |
r""" | |
Return full path to the user-specific data dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"roaming" (boolean, default False) can be set True to use the Windows | |
roaming appdata directory. That means that for users on a Windows | |
network setup for roaming profiles, this user data will be | |
sync'd on login. See | |
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | |
for a discussion of issues. | |
Typical user data directories are: | |
macOS: ~/Library/Application Support/<AppName> | |
if it exists, else ~/.config/<AppName> | |
Unix: ~/.local/share/<AppName> # or in | |
$XDG_DATA_HOME, if defined | |
Win XP (not roaming): C:\Documents and Settings\<username>\ ... | |
...Application Data\<AppName> | |
Win XP (roaming): C:\Documents and Settings\<username>\Local ... | |
...Settings\Application Data\<AppName> | |
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> | |
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> | |
For Unix, we follow the XDG spec and support $XDG_DATA_HOME. | |
That means, by default "~/.local/share/<AppName>". | |
""" | |
if WINDOWS: | |
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" | |
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) | |
elif sys.platform == "darwin": | |
path = os.path.join( | |
expanduser('~/Library/Application Support/'), | |
appname, | |
) if os.path.isdir(os.path.join( | |
expanduser('~/Library/Application Support/'), | |
appname, | |
) | |
) else os.path.join( | |
expanduser('~/.config/'), | |
appname, | |
) | |
else: | |
path = os.path.join( | |
os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), | |
appname, | |
) | |
return path | |
def user_config_dir(appname, roaming=True): | |
"""Return full path to the user-specific config dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"roaming" (boolean, default True) can be set False to not use the | |
Windows roaming appdata directory. That means that for users on a | |
Windows network setup for roaming profiles, this user data will be | |
sync'd on login. See | |
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | |
for a discussion of issues. | |
Typical user data directories are: | |
macOS: same as user_data_dir | |
Unix: ~/.config/<AppName> | |
Win *: same as user_data_dir | |
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. | |
That means, by default "~/.config/<AppName>". | |
""" | |
if WINDOWS: | |
path = user_data_dir(appname, roaming=roaming) | |
elif sys.platform == "darwin": | |
path = user_data_dir(appname) | |
else: | |
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) | |
path = os.path.join(path, appname) | |
return path | |
# for the discussion regarding site_config_dirs locations | |
# see <https://github.com/pypa/pip/issues/1733> | |
def site_config_dirs(appname): | |
r"""Return a list of potential user-shared config dirs for this application. | |
"appname" is the name of application. | |
Typical user config directories are: | |
macOS: /Library/Application Support/<AppName>/ | |
Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in | |
$XDG_CONFIG_DIRS | |
Win XP: C:\Documents and Settings\All Users\Application ... | |
...Data\<AppName>\ | |
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory | |
on Vista.) | |
Win 7: Hidden, but writeable on Win 7: | |
C:\ProgramData\<AppName>\ | |
""" | |
if WINDOWS: | |
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) | |
pathlist = [os.path.join(path, appname)] | |
elif sys.platform == 'darwin': | |
pathlist = [os.path.join('/Library/Application Support', appname)] | |
else: | |
# try looking in $XDG_CONFIG_DIRS | |
xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') | |
if xdg_config_dirs: | |
pathlist = [ | |
os.path.join(expanduser(x), appname) | |
for x in xdg_config_dirs.split(os.pathsep) | |
] | |
else: | |
pathlist = [] | |
# always look in /etc directly as well | |
pathlist.append('/etc') | |
return pathlist | |
# -- Windows support functions -- | |
def _get_win_folder_from_registry(csidl_name): | |
""" | |
This is a fallback technique at best. I'm not sure if using the | |
registry for this guarantees us the correct answer for all CSIDL_* | |
names. | |
""" | |
import _winreg | |
shell_folder_name = { | |
"CSIDL_APPDATA": "AppData", | |
"CSIDL_COMMON_APPDATA": "Common AppData", | |
"CSIDL_LOCAL_APPDATA": "Local AppData", | |
}[csidl_name] | |
key = _winreg.OpenKey( | |
_winreg.HKEY_CURRENT_USER, | |
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | |
) | |
directory, _type = _winreg.QueryValueEx(key, shell_folder_name) | |
return directory | |
def _get_win_folder_with_ctypes(csidl_name): | |
csidl_const = { | |
"CSIDL_APPDATA": 26, | |
"CSIDL_COMMON_APPDATA": 35, | |
"CSIDL_LOCAL_APPDATA": 28, | |
}[csidl_name] | |
buf = ctypes.create_unicode_buffer(1024) | |
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) | |
# Downgrade to short path name if have highbit chars. See | |
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. | |
has_high_char = False | |
for c in buf: | |
if ord(c) > 255: | |
has_high_char = True | |
break | |
if has_high_char: | |
buf2 = ctypes.create_unicode_buffer(1024) | |
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): | |
buf = buf2 | |
return buf.value | |
if WINDOWS: | |
try: | |
import ctypes | |
_get_win_folder = _get_win_folder_with_ctypes | |
except ImportError: | |
_get_win_folder = _get_win_folder_from_registry | |
def _win_path_to_bytes(path): | |
"""Encode Windows paths to bytes. Only used on Python 2. | |
Motivation is to be consistent with other operating systems where paths | |
are also returned as bytes. This avoids problems mixing bytes and Unicode | |
elsewhere in the codebase. For more details and discussion see | |
<https://github.com/pypa/pip/issues/3463>. | |
If encoding using ASCII and MBCS fails, return the original Unicode path. | |
""" | |
for encoding in ('ASCII', 'MBCS'): | |
try: | |
return path.encode(encoding) | |
except (UnicodeEncodeError, LookupError): | |
pass | |
return path |
""" | |
A module that implements tooling to enable easy warnings about deprecations. | |
""" | |
from __future__ import absolute_import | |
import logging | |
import warnings | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import Any | |
class PipDeprecationWarning(Warning): | |
pass | |
class Pending(object): | |
pass | |
class RemovedInPip11Warning(PipDeprecationWarning): | |
pass | |
class RemovedInPip12Warning(PipDeprecationWarning, Pending): | |
pass | |
# Warnings <-> Logging Integration | |
_warnings_showwarning = None # type: Any | |
def _showwarning(message, category, filename, lineno, file=None, line=None): | |
if file is not None: | |
if _warnings_showwarning is not None: | |
_warnings_showwarning( | |
message, category, filename, lineno, file, line, | |
) | |
else: | |
if issubclass(category, PipDeprecationWarning): | |
# We use a specially named logger which will handle all of the | |
# deprecation messages for pip. | |
logger = logging.getLogger("pip._internal.deprecations") | |
# This is purposely using the % formatter here instead of letting | |
# the logging module handle the interpolation. This is because we | |
# want it to appear as if someone typed this entire message out. | |
log_message = "DEPRECATION: %s" % message | |
# PipDeprecationWarnings that are Pending still have at least 2 | |
# versions to go until they are removed so they can just be | |
# warnings. Otherwise, they will be removed in the very next | |
# version of pip. We want these to be more obvious so we use the | |
# ERROR logging level. | |
if issubclass(category, Pending): | |
logger.warning(log_message) | |
else: | |
logger.error(log_message) | |
else: | |
_warnings_showwarning( | |
message, category, filename, lineno, file, line, | |
) | |
def install_warning_logger(): | |
# Enable our Deprecation Warnings | |
warnings.simplefilter("default", PipDeprecationWarning, append=True) | |
global _warnings_showwarning | |
if _warnings_showwarning is None: | |
_warnings_showwarning = warnings.showwarning | |
warnings.showwarning = _showwarning |
import codecs | |
import locale | |
import re | |
import sys | |
BOMS = [ | |
(codecs.BOM_UTF8, 'utf8'), | |
(codecs.BOM_UTF16, 'utf16'), | |
(codecs.BOM_UTF16_BE, 'utf16-be'), | |
(codecs.BOM_UTF16_LE, 'utf16-le'), | |
(codecs.BOM_UTF32, 'utf32'), | |
(codecs.BOM_UTF32_BE, 'utf32-be'), | |
(codecs.BOM_UTF32_LE, 'utf32-le'), | |
] | |
ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') | |
def auto_decode(data): | |
"""Check a bytes string for a BOM to correctly detect the encoding | |
Fallback to locale.getpreferredencoding(False) like open() on Python3""" | |
for bom, encoding in BOMS: | |
if data.startswith(bom): | |
return data[len(bom):].decode(encoding) | |
# Lets check the first two lines as in PEP263 | |
for line in data.split(b'\n')[:2]: | |
if line[0:1] == b'#' and ENCODING_RE.search(line): | |
encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') | |
return data.decode(encoding) | |
return data.decode( | |
locale.getpreferredencoding(False) or sys.getdefaultencoding(), | |
) |
import os | |
import os.path | |
from pip._internal.compat import get_path_uid | |
def check_path_owner(path): | |
# If we don't have a way to check the effective uid of this process, then | |
# we'll just assume that we own the directory. | |
if not hasattr(os, "geteuid"): | |
return True | |
previous = None | |
while path != previous: | |
if os.path.lexists(path): | |
# Check if path is writable by current user. | |
if os.geteuid() == 0: | |
# Special handling for root user in order to handle properly | |
# cases where users use sudo without -H flag. | |
try: | |
path_uid = get_path_uid(path) | |
except OSError: | |
return False | |
return path_uid == 0 | |
else: | |
return os.access(path, os.W_OK) | |
else: | |
previous, path = path, os.path.dirname(path) |
from __future__ import absolute_import | |
import ctypes | |
import re | |
import warnings | |
def glibc_version_string(): | |
"Returns glibc version string, or None if not using glibc." | |
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen | |
# manpage says, "If filename is NULL, then the returned handle is for the | |
# main program". This way we can let the linker do the work to figure out | |
# which libc our process is actually using. | |
process_namespace = ctypes.CDLL(None) | |
try: | |
gnu_get_libc_version = process_namespace.gnu_get_libc_version | |
except AttributeError: | |
# Symbol doesn't exist -> therefore, we are not linked to | |
# glibc. | |
return None | |
# Call gnu_get_libc_version, which returns a string like "2.5" | |
gnu_get_libc_version.restype = ctypes.c_char_p | |
version_str = gnu_get_libc_version() | |
# py2 / py3 compatibility: | |
if not isinstance(version_str, str): | |
version_str = version_str.decode("ascii") | |
return version_str | |
# Separated out from have_compatible_glibc for easier unit testing | |
def check_glibc_version(version_str, required_major, minimum_minor): | |
# Parse string and check against requested version. | |
# | |
# We use a regexp instead of str.split because we want to discard any | |
# random junk that might come after the minor version -- this might happen | |
# in patched/forked versions of glibc (e.g. Linaro's version of glibc | |
# uses version strings like "2.20-2014.11"). See gh-3588. | |
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) | |
if not m: | |
warnings.warn("Expected glibc version with 2 components major.minor," | |
" got: %s" % version_str, RuntimeWarning) | |
return False | |
return (int(m.group("major")) == required_major and | |
int(m.group("minor")) >= minimum_minor) | |
def have_compatible_glibc(required_major, minimum_minor): | |
version_str = glibc_version_string() | |
if version_str is None: | |
return False | |
return check_glibc_version(version_str, required_major, minimum_minor) | |
# platform.libc_ver regularly returns completely nonsensical glibc | |
# versions. E.g. on my computer, platform says: | |
# | |
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' | |
# ('glibc', '2.7') | |
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' | |
# ('glibc', '2.9') | |
# | |
# But the truth is: | |
# | |
# ~$ ldd --version | |
# ldd (Debian GLIBC 2.22-11) 2.22 | |
# | |
# This is unfortunate, because it means that the linehaul data on libc | |
# versions that was generated by pip 8.1.2 and earlier is useless and | |
# misleading. Solution: instead of using platform, use our code that actually | |
# works. | |
def libc_ver(): | |
"""Try to determine the glibc version | |
Returns a tuple of strings (lib, version) which default to empty strings | |
in case the lookup fails. | |
""" | |
glibc_version = glibc_version_string() | |
if glibc_version is None: | |
return ("", "") | |
else: | |
return ("glibc", glibc_version) |
from __future__ import absolute_import | |
import hashlib | |
from pip._vendor.six import iteritems, iterkeys, itervalues | |
from pip._internal.exceptions import ( | |
HashMismatch, HashMissing, InstallationError, | |
) | |
from pip._internal.utils.misc import read_chunks | |
# The recommended hash algo of the moment. Change this whenever the state of | |
# the art changes; it won't hurt backward compatibility. | |
FAVORITE_HASH = 'sha256' | |
# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` | |
# Currently, those are the ones at least as collision-resistant as sha256. | |
STRONG_HASHES = ['sha256', 'sha384', 'sha512'] | |
class Hashes(object): | |
"""A wrapper that builds multiple hashes at once and checks them against | |
known-good values | |
""" | |
def __init__(self, hashes=None): | |
""" | |
:param hashes: A dict of algorithm names pointing to lists of allowed | |
hex digests | |
""" | |
self._allowed = {} if hashes is None else hashes | |
def check_against_chunks(self, chunks): | |
"""Check good hashes against ones built from iterable of chunks of | |
data. | |
Raise HashMismatch if none match. | |
""" | |
gots = {} | |
for hash_name in iterkeys(self._allowed): | |
try: | |
gots[hash_name] = hashlib.new(hash_name) | |
except (ValueError, TypeError): | |
raise InstallationError('Unknown hash name: %s' % hash_name) | |
for chunk in chunks: | |
for hash in itervalues(gots): | |
hash.update(chunk) | |
for hash_name, got in iteritems(gots): | |
if got.hexdigest() in self._allowed[hash_name]: | |
return | |
self._raise(gots) | |
def _raise(self, gots): | |
raise HashMismatch(self._allowed, gots) | |
def check_against_file(self, file): | |
"""Check good hashes against a file-like object | |
Raise HashMismatch if none match. | |
""" | |
return self.check_against_chunks(read_chunks(file)) | |
def check_against_path(self, path): | |
with open(path, 'rb') as file: | |
return self.check_against_file(file) | |
def __nonzero__(self): | |
"""Return whether I know any known-good hashes.""" | |
return bool(self._allowed) | |
def __bool__(self): | |
return self.__nonzero__() | |
class MissingHashes(Hashes): | |
"""A workalike for Hashes used when we're missing a hash for a requirement | |
It computes the actual hash of the requirement and raises a HashMissing | |
exception showing it to the user. | |
""" | |
def __init__(self): | |
"""Don't offer the ``hashes`` kwarg.""" | |
# Pass our favorite hash in to generate a "gotten hash". With the | |
# empty list, it will never match, so an error will always raise. | |
super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) | |
def _raise(self, gots): | |
raise HashMissing(gots[FAVORITE_HASH].hexdigest()) |
from __future__ import absolute_import | |
import contextlib | |
import logging | |
import logging.handlers | |
import os | |
from pip._internal.compat import WINDOWS | |
from pip._internal.utils.misc import ensure_dir | |
try: | |
import threading | |
except ImportError: | |
import dummy_threading as threading # type: ignore | |
try: | |
from pip._vendor import colorama | |
# Lots of different errors can come from this, including SystemError and | |
# ImportError. | |
except Exception: | |
colorama = None | |
_log_state = threading.local() | |
_log_state.indentation = 0 | |
@contextlib.contextmanager | |
def indent_log(num=2): | |
""" | |
A context manager which will cause the log output to be indented for any | |
log messages emitted inside it. | |
""" | |
_log_state.indentation += num | |
try: | |
yield | |
finally: | |
_log_state.indentation -= num | |
def get_indentation(): | |
return getattr(_log_state, 'indentation', 0) | |
class IndentingFormatter(logging.Formatter): | |
def format(self, record): | |
""" | |
Calls the standard formatter, but will indent all of the log messages | |
by our current indentation level. | |
""" | |
formatted = logging.Formatter.format(self, record) | |
formatted = "".join([ | |
(" " * get_indentation()) + line | |
for line in formatted.splitlines(True) | |
]) | |
return formatted | |
def _color_wrap(*colors): | |
def wrapped(inp): | |
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) | |
return wrapped | |
class ColorizedStreamHandler(logging.StreamHandler): | |
# Don't build up a list of colors if we don't have colorama | |
if colorama: | |
COLORS = [ | |
# This needs to be in order from highest logging level to lowest. | |
(logging.ERROR, _color_wrap(colorama.Fore.RED)), | |
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), | |
] | |
else: | |
COLORS = [] | |
def __init__(self, stream=None, no_color=None): | |
logging.StreamHandler.__init__(self, stream) | |
self._no_color = no_color | |
if WINDOWS and colorama: | |
self.stream = colorama.AnsiToWin32(self.stream) | |
def should_color(self): | |
# Don't colorize things if we do not have colorama or if told not to | |
if not colorama or self._no_color: | |
return False | |
real_stream = ( | |
self.stream if not isinstance(self.stream, colorama.AnsiToWin32) | |
else self.stream.wrapped | |
) | |
# If the stream is a tty we should color it | |
if hasattr(real_stream, "isatty") and real_stream.isatty(): | |
return True | |
# If we have an ASNI term we should color it | |
if os.environ.get("TERM") == "ANSI": | |
return True | |
# If anything else we should not color it | |
return False | |
def format(self, record): | |
msg = logging.StreamHandler.format(self, record) | |
if self.should_color(): | |
for level, color in self.COLORS: | |
if record.levelno >= level: | |
msg = color(msg) | |
break | |
return msg | |
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): | |
def _open(self): | |
ensure_dir(os.path.dirname(self.baseFilename)) | |
return logging.handlers.RotatingFileHandler._open(self) | |
class MaxLevelFilter(logging.Filter): | |
def __init__(self, level): | |
self.level = level | |
def filter(self, record): | |
return record.levelno < self.level |
from __future__ import absolute_import | |
import contextlib | |
import errno | |
import io | |
import locale | |
# we have a submodule named 'logging' which would shadow this if we used the | |
# regular name: | |
import logging as std_logging | |
import os | |
import posixpath | |
import re | |
import shutil | |
import stat | |
import subprocess | |
import sys | |
import tarfile | |
import zipfile | |
from collections import deque | |
from pip._vendor import pkg_resources | |
# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is | |
# why we ignore the type on this import. | |
from pip._vendor.retrying import retry # type: ignore | |
from pip._vendor.six import PY2 | |
from pip._vendor.six.moves import input | |
from pip._internal.compat import console_to_str, expanduser, stdlib_pkgs | |
from pip._internal.exceptions import InstallationError | |
from pip._internal.locations import ( | |
running_under_virtualenv, site_packages, user_site, virtualenv_no_global, | |
write_delete_marker_file, | |
) | |
if PY2: | |
from io import BytesIO as StringIO | |
else: | |
from io import StringIO | |
__all__ = ['rmtree', 'display_path', 'backup_dir', | |
'ask', 'splitext', | |
'format_size', 'is_installable_dir', | |
'is_svn_page', 'file_contents', | |
'split_leading_dir', 'has_leading_dir', | |
'normalize_path', | |
'renames', 'get_prog', | |
'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', | |
'captured_stdout', 'ensure_dir', | |
'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', | |
'get_installed_version'] | |
logger = std_logging.getLogger(__name__) | |
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') | |
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') | |
ZIP_EXTENSIONS = ('.zip', '.whl') | |
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') | |
ARCHIVE_EXTENSIONS = ( | |
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) | |
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS | |
try: | |
import bz2 # noqa | |
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS | |
except ImportError: | |
logger.debug('bz2 module is not available') | |
try: | |
# Only for Python 3.3+ | |
import lzma # noqa | |
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS | |
except ImportError: | |
logger.debug('lzma module is not available') | |
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): | |
try: | |
return __import__(pkg_or_module_string) | |
except ImportError: | |
raise ExceptionType(*args, **kwargs) | |
def ensure_dir(path): | |
"""os.path.makedirs without EEXIST.""" | |
try: | |
os.makedirs(path) | |
except OSError as e: | |
if e.errno != errno.EEXIST: | |
raise | |
def get_prog(): | |
try: | |
prog = os.path.basename(sys.argv[0]) | |
if prog in ('__main__.py', '-c'): | |
return "%s -m pip" % sys.executable | |
else: | |
return prog | |
except (AttributeError, TypeError, IndexError): | |
pass | |
return 'pip' | |
# Retry every half second for up to 3 seconds | |
@retry(stop_max_delay=3000, wait_fixed=500) | |
def rmtree(dir, ignore_errors=False): | |
shutil.rmtree(dir, ignore_errors=ignore_errors, | |
onerror=rmtree_errorhandler) | |
def rmtree_errorhandler(func, path, exc_info): | |
"""On Windows, the files in .svn are read-only, so when rmtree() tries to | |
remove them, an exception is thrown. We catch that here, remove the | |
read-only attribute, and hopefully continue without problems.""" | |
# if file type currently read only | |
if os.stat(path).st_mode & stat.S_IREAD: | |
# convert to read/write | |
os.chmod(path, stat.S_IWRITE) | |
# use the original function to repeat the operation | |
func(path) | |
return | |
else: | |
raise | |
def display_path(path): | |
"""Gives the display value for a given path, making it relative to cwd | |
if possible.""" | |
path = os.path.normcase(os.path.abspath(path)) | |
if sys.version_info[0] == 2: | |
path = path.decode(sys.getfilesystemencoding(), 'replace') | |
path = path.encode(sys.getdefaultencoding(), 'replace') | |
if path.startswith(os.getcwd() + os.path.sep): | |
path = '.' + path[len(os.getcwd()):] | |
return path | |
def backup_dir(dir, ext='.bak'): | |
"""Figure out the name of a directory to back up the given dir to | |
(adding .bak, .bak2, etc)""" | |
n = 1 | |
extension = ext | |
while os.path.exists(dir + extension): | |
n += 1 | |
extension = ext + str(n) | |
return dir + extension | |
def ask_path_exists(message, options): | |
for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): | |
if action in options: | |
return action | |
return ask(message, options) | |
def ask(message, options): | |
"""Ask the message interactively, with the given possible responses""" | |
while 1: | |
if os.environ.get('PIP_NO_INPUT'): | |
raise Exception( | |
'No input was expected ($PIP_NO_INPUT set); question: %s' % | |
message | |
) | |
response = input(message) | |
response = response.strip().lower() | |
if response not in options: | |
print( | |
'Your response (%r) was not one of the expected responses: ' | |
'%s' % (response, ', '.join(options)) | |
) | |
else: | |
return response | |
def format_size(bytes): | |
if bytes > 1000 * 1000: | |
return '%.1fMB' % (bytes / 1000.0 / 1000) | |
elif bytes > 10 * 1000: | |
return '%ikB' % (bytes / 1000) | |
elif bytes > 1000: | |
return '%.1fkB' % (bytes / 1000.0) | |
else: | |
return '%ibytes' % bytes | |
def is_installable_dir(path): | |
"""Return True if `path` is a directory containing a setup.py file.""" | |
if not os.path.isdir(path): | |
return False | |
setup_py = os.path.join(path, 'setup.py') | |
if os.path.isfile(setup_py): | |
return True | |
return False | |
def is_svn_page(html): | |
""" | |
Returns true if the page appears to be the index page of an svn repository | |
""" | |
return (re.search(r'<title>[^<]*Revision \d+:', html) and | |
re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) | |
def file_contents(filename): | |
with open(filename, 'rb') as fp: | |
return fp.read().decode('utf-8') | |
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): | |
"""Yield pieces of data from a file-like object until EOF.""" | |
while True: | |
chunk = file.read(size) | |
if not chunk: | |
break | |
yield chunk | |
def split_leading_dir(path): | |
path = path.lstrip('/').lstrip('\\') | |
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or | |
'\\' not in path): | |
return path.split('/', 1) | |
elif '\\' in path: | |
return path.split('\\', 1) | |
else: | |
return path, '' | |
def has_leading_dir(paths): | |
"""Returns true if all the paths have the same leading path name | |
(i.e., everything is in one subdirectory in an archive)""" | |
common_prefix = None | |
for path in paths: | |
prefix, rest = split_leading_dir(path) | |
if not prefix: | |
return False | |
elif common_prefix is None: | |
common_prefix = prefix | |
elif prefix != common_prefix: | |
return False | |
return True | |
def normalize_path(path, resolve_symlinks=True): | |
""" | |
Convert a path to its canonical, case-normalized, absolute version. | |
""" | |
path = expanduser(path) | |
if resolve_symlinks: | |
path = os.path.realpath(path) | |
else: | |
path = os.path.abspath(path) | |
return os.path.normcase(path) | |
def splitext(path): | |
"""Like os.path.splitext, but take off .tar too""" | |
base, ext = posixpath.splitext(path) | |
if base.lower().endswith('.tar'): | |
ext = base[-4:] + ext | |
base = base[:-4] | |
return base, ext | |
def renames(old, new): | |
"""Like os.renames(), but handles renaming across devices.""" | |
# Implementation borrowed from os.renames(). | |
head, tail = os.path.split(new) | |
if head and tail and not os.path.exists(head): | |
os.makedirs(head) | |
shutil.move(old, new) | |
head, tail = os.path.split(old) | |
if head and tail: | |
try: | |
os.removedirs(head) | |
except OSError: | |
pass | |
def is_local(path): | |
""" | |
Return True if path is within sys.prefix, if we're running in a virtualenv. | |
If we're not in a virtualenv, all paths are considered "local." | |
""" | |
if not running_under_virtualenv(): | |
return True | |
return normalize_path(path).startswith(normalize_path(sys.prefix)) | |
def dist_is_local(dist): | |
""" | |
Return True if given Distribution object is installed locally | |
(i.e. within current virtualenv). | |
Always True if we're not in a virtualenv. | |
""" | |
return is_local(dist_location(dist)) | |
def dist_in_usersite(dist): | |
""" | |
Return True if given Distribution is installed in user site. | |
""" | |
norm_path = normalize_path(dist_location(dist)) | |
return norm_path.startswith(normalize_path(user_site)) | |
def dist_in_site_packages(dist): | |
""" | |
Return True if given Distribution is installed in | |
sysconfig.get_python_lib(). | |
""" | |
return normalize_path( | |
dist_location(dist) | |
).startswith(normalize_path(site_packages)) | |
def dist_is_editable(dist): | |
"""Is distribution an editable install?""" | |
for path_item in sys.path: | |
egg_link = os.path.join(path_item, dist.project_name + '.egg-link') | |
if os.path.isfile(egg_link): | |
return True | |
return False | |
def get_installed_distributions(local_only=True, | |
skip=stdlib_pkgs, | |
include_editables=True, | |
editables_only=False, | |
user_only=False): | |
""" | |
Return a list of installed Distribution objects. | |
If ``local_only`` is True (default), only return installations | |
local to the current virtualenv, if in a virtualenv. | |
``skip`` argument is an iterable of lower-case project names to | |
ignore; defaults to stdlib_pkgs | |
If ``include_editables`` is False, don't report editables. | |
If ``editables_only`` is True , only report editables. | |
If ``user_only`` is True , only report installations in the user | |
site directory. | |
""" | |
if local_only: | |
local_test = dist_is_local | |
else: | |
def local_test(d): | |
return True | |
if include_editables: | |
def editable_test(d): | |
return True | |
else: | |
def editable_test(d): | |
return not dist_is_editable(d) | |
if editables_only: | |
def editables_only_test(d): | |
return dist_is_editable(d) | |
else: | |
def editables_only_test(d): | |
return True | |
if user_only: | |
user_test = dist_in_usersite | |
else: | |
def user_test(d): | |
return True | |
return [d for d in pkg_resources.working_set | |
if local_test(d) and | |
d.key not in skip and | |
editable_test(d) and | |
editables_only_test(d) and | |
user_test(d) | |
] | |
def egg_link_path(dist): | |
""" | |
Return the path for the .egg-link file if it exists, otherwise, None. | |
There's 3 scenarios: | |
1) not in a virtualenv | |
try to find in site.USER_SITE, then site_packages | |
2) in a no-global virtualenv | |
try to find in site_packages | |
3) in a yes-global virtualenv | |
try to find in site_packages, then site.USER_SITE | |
(don't look in global location) | |
For #1 and #3, there could be odd cases, where there's an egg-link in 2 | |
locations. | |
This method will just return the first one found. | |
""" | |
sites = [] | |
if running_under_virtualenv(): | |
if virtualenv_no_global(): | |
sites.append(site_packages) | |
else: | |
sites.append(site_packages) | |
if user_site: | |
sites.append(user_site) | |
else: | |
if user_site: | |
sites.append(user_site) | |
sites.append(site_packages) | |
for site in sites: | |
egglink = os.path.join(site, dist.project_name) + '.egg-link' | |
if os.path.isfile(egglink): | |
return egglink | |
def dist_location(dist): | |
""" | |
Get the site-packages location of this distribution. Generally | |
this is dist.location, except in the case of develop-installed | |
packages, where dist.location is the source code location, and we | |
want to know where the egg-link file is. | |
""" | |
egg_link = egg_link_path(dist) | |
if egg_link: | |
return egg_link | |
return dist.location | |
def current_umask(): | |
"""Get the current umask which involves having to set it temporarily.""" | |
mask = os.umask(0) | |
os.umask(mask) | |
return mask | |
def unzip_file(filename, location, flatten=True): | |
""" | |
Unzip the file (with path `filename`) to the destination `location`. All | |
files are written based on system defaults and umask (i.e. permissions are | |
not preserved), except that regular file members with any execute | |
permissions (user, group, or world) have "chmod +x" applied after being | |
written. Note that for windows, any execute changes using os.chmod are | |
no-ops per the python docs. | |
""" | |
ensure_dir(location) | |
zipfp = open(filename, 'rb') | |
try: | |
zip = zipfile.ZipFile(zipfp, allowZip64=True) | |
leading = has_leading_dir(zip.namelist()) and flatten | |
for info in zip.infolist(): | |
name = info.filename | |
data = zip.read(name) | |
fn = name | |
if leading: | |
fn = split_leading_dir(name)[1] | |
fn = os.path.join(location, fn) | |
dir = os.path.dirname(fn) | |
if fn.endswith('/') or fn.endswith('\\'): | |
# A directory | |
ensure_dir(fn) | |
else: | |
ensure_dir(dir) | |
fp = open(fn, 'wb') | |
try: | |
fp.write(data) | |
finally: | |
fp.close() | |
mode = info.external_attr >> 16 | |
# if mode and regular file and any execute permissions for | |
# user/group/world? | |
if mode and stat.S_ISREG(mode) and mode & 0o111: | |
# make dest file have execute for user/group/world | |
# (chmod +x) no-op on windows per python docs | |
os.chmod(fn, (0o777 - current_umask() | 0o111)) | |
finally: | |
zipfp.close() | |
def untar_file(filename, location): | |
""" | |
Untar the file (with path `filename`) to the destination `location`. | |
All files are written based on system defaults and umask (i.e. permissions | |
are not preserved), except that regular file members with any execute | |
permissions (user, group, or world) have "chmod +x" applied after being | |
written. Note that for windows, any execute changes using os.chmod are | |
no-ops per the python docs. | |
""" | |
ensure_dir(location) | |
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): | |
mode = 'r:gz' | |
elif filename.lower().endswith(BZ2_EXTENSIONS): | |
mode = 'r:bz2' | |
elif filename.lower().endswith(XZ_EXTENSIONS): | |
mode = 'r:xz' | |
elif filename.lower().endswith('.tar'): | |
mode = 'r' | |
else: | |
logger.warning( | |
'Cannot determine compression type for file %s', filename, | |
) | |
mode = 'r:*' | |
tar = tarfile.open(filename, mode) | |
try: | |
# note: python<=2.5 doesn't seem to know about pax headers, filter them | |
leading = has_leading_dir([ | |
member.name for member in tar.getmembers() | |
if member.name != 'pax_global_header' | |
]) | |
for member in tar.getmembers(): | |
fn = member.name | |
if fn == 'pax_global_header': | |
continue | |
if leading: | |
fn = split_leading_dir(fn)[1] | |
path = os.path.join(location, fn) | |
if member.isdir(): | |
ensure_dir(path) | |
elif member.issym(): | |
try: | |
tar._extract_member(member, path) | |
except Exception as exc: | |
# Some corrupt tar files seem to produce this | |
# (specifically bad symlinks) | |
logger.warning( | |
'In the tar file %s the member %s is invalid: %s', | |
filename, member.name, exc, | |
) | |
continue | |
else: | |
try: | |
fp = tar.extractfile(member) | |
except (KeyError, AttributeError) as exc: | |
# Some corrupt tar files seem to produce this | |
# (specifically bad symlinks) | |
logger.warning( | |
'In the tar file %s the member %s is invalid: %s', | |
filename, member.name, exc, | |
) | |
continue | |
ensure_dir(os.path.dirname(path)) | |
with open(path, 'wb') as destfp: | |
shutil.copyfileobj(fp, destfp) | |
fp.close() | |
# Update the timestamp (useful for cython compiled files) | |
tar.utime(member, path) | |
# member have any execute permissions for user/group/world? | |
if member.mode & 0o111: | |
# make dest file have execute for user/group/world | |
# no-op on windows per python docs | |
os.chmod(path, (0o777 - current_umask() | 0o111)) | |
finally: | |
tar.close() | |
def unpack_file(filename, location, content_type, link): | |
filename = os.path.realpath(filename) | |
if (content_type == 'application/zip' or | |
filename.lower().endswith(ZIP_EXTENSIONS) or | |
zipfile.is_zipfile(filename)): | |
unzip_file( | |
filename, | |
location, | |
flatten=not filename.endswith('.whl') | |
) | |
elif (content_type == 'application/x-gzip' or | |
tarfile.is_tarfile(filename) or | |
filename.lower().endswith( | |
TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): | |
untar_file(filename, location) | |
elif (content_type and content_type.startswith('text/html') and | |
is_svn_page(file_contents(filename))): | |
# We don't really care about this | |
from pip._internal.vcs.subversion import Subversion | |
Subversion('svn+' + link.url).unpack(location) | |
else: | |
# FIXME: handle? | |
# FIXME: magic signatures? | |
logger.critical( | |
'Cannot unpack file %s (downloaded from %s, content-type: %s); ' | |
'cannot detect archive format', | |
filename, location, content_type, | |
) | |
raise InstallationError( | |
'Cannot determine archive format of %s' % location | |
) | |
def call_subprocess(cmd, show_stdout=True, cwd=None, | |
on_returncode='raise', | |
command_desc=None, | |
extra_environ=None, unset_environ=None, spinner=None): | |
""" | |
Args: | |
unset_environ: an iterable of environment variable names to unset | |
prior to calling subprocess.Popen(). | |
""" | |
if unset_environ is None: | |
unset_environ = [] | |
# This function's handling of subprocess output is confusing and I | |
# previously broke it terribly, so as penance I will write a long comment | |
# explaining things. | |
# | |
# The obvious thing that affects output is the show_stdout= | |
# kwarg. show_stdout=True means, let the subprocess write directly to our | |
# stdout. Even though it is nominally the default, it is almost never used | |
# inside pip (and should not be used in new code without a very good | |
# reason); as of 2016-02-22 it is only used in a few places inside the VCS | |
# wrapper code. Ideally we should get rid of it entirely, because it | |
# creates a lot of complexity here for a rarely used feature. | |
# | |
# Most places in pip set show_stdout=False. What this means is: | |
# - We connect the child stdout to a pipe, which we read. | |
# - By default, we hide the output but show a spinner -- unless the | |
# subprocess exits with an error, in which case we show the output. | |
# - If the --verbose option was passed (= loglevel is DEBUG), then we show | |
# the output unconditionally. (But in this case we don't want to show | |
# the output a second time if it turns out that there was an error.) | |
# | |
# stderr is always merged with stdout (even if show_stdout=True). | |
if show_stdout: | |
stdout = None | |
else: | |
stdout = subprocess.PIPE | |
if command_desc is None: | |
cmd_parts = [] | |
for part in cmd: | |
if ' ' in part or '\n' in part or '"' in part or "'" in part: | |
part = '"%s"' % part.replace('"', '\\"') | |
cmd_parts.append(part) | |
command_desc = ' '.join(cmd_parts) | |
logger.debug("Running command %s", command_desc) | |
env = os.environ.copy() | |
if extra_environ: | |
env.update(extra_environ) | |
for name in unset_environ: | |
env.pop(name, None) | |
try: | |
proc = subprocess.Popen( | |
cmd, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, | |
stdout=stdout, cwd=cwd, env=env, | |
) | |
proc.stdin.close() | |
except Exception as exc: | |
logger.critical( | |
"Error %s while executing command %s", exc, command_desc, | |
) | |
raise | |
all_output = [] | |
if stdout is not None: | |
while True: | |
line = console_to_str(proc.stdout.readline()) | |
if not line: | |
break | |
line = line.rstrip() | |
all_output.append(line + '\n') | |
if logger.getEffectiveLevel() <= std_logging.DEBUG: | |
# Show the line immediately | |
logger.debug(line) | |
else: | |
# Update the spinner | |
if spinner is not None: | |
spinner.spin() | |
try: | |
proc.wait() | |
finally: | |
if proc.stdout: | |
proc.stdout.close() | |
if spinner is not None: | |
if proc.returncode: | |
spinner.finish("error") | |
else: | |
spinner.finish("done") | |
if proc.returncode: | |
if on_returncode == 'raise': | |
if (logger.getEffectiveLevel() > std_logging.DEBUG and | |
not show_stdout): | |
logger.info( | |
'Complete output from command %s:', command_desc, | |
) | |
logger.info( | |
''.join(all_output) + | |
'\n----------------------------------------' | |
) | |
raise InstallationError( | |
'Command "%s" failed with error code %s in %s' | |
% (command_desc, proc.returncode, cwd)) | |
elif on_returncode == 'warn': | |
logger.warning( | |
'Command "%s" had error code %s in %s', | |
command_desc, proc.returncode, cwd, | |
) | |
elif on_returncode == 'ignore': | |
pass | |
else: | |
raise ValueError('Invalid value: on_returncode=%s' % | |
repr(on_returncode)) | |
if not show_stdout: | |
return ''.join(all_output) | |
def read_text_file(filename): | |
"""Return the contents of *filename*. | |
Try to decode the file contents with utf-8, the preferred system encoding | |
(e.g., cp1252 on some Windows machines), and latin1, in that order. | |
Decoding a byte string with latin1 will never raise an error. In the worst | |
case, the returned string will contain some garbage characters. | |
""" | |
with open(filename, 'rb') as fp: | |
data = fp.read() | |
encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] | |
for enc in encodings: | |
try: | |
data = data.decode(enc) | |
except UnicodeDecodeError: | |
continue | |
break | |
assert type(data) != bytes # Latin1 should have worked. | |
return data | |
def _make_build_dir(build_dir): | |
os.makedirs(build_dir) | |
write_delete_marker_file(build_dir) | |
class FakeFile(object): | |
"""Wrap a list of lines in an object with readline() to make | |
ConfigParser happy.""" | |
def __init__(self, lines): | |
self._gen = (l for l in lines) | |
def readline(self): | |
try: | |
try: | |
return next(self._gen) | |
except NameError: | |
return self._gen.next() | |
except StopIteration: | |
return '' | |
def __iter__(self): | |
return self._gen | |
class StreamWrapper(StringIO): | |
@classmethod | |
def from_stream(cls, orig_stream): | |
cls.orig_stream = orig_stream | |
return cls() | |
# compileall.compile_dir() needs stdout.encoding to print to stdout | |
@property | |
def encoding(self): | |
return self.orig_stream.encoding | |
@contextlib.contextmanager | |
def captured_output(stream_name): | |
"""Return a context manager used by captured_stdout/stdin/stderr | |
that temporarily replaces the sys stream *stream_name* with a StringIO. | |
Taken from Lib/support/__init__.py in the CPython repo. | |
""" | |
orig_stdout = getattr(sys, stream_name) | |
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) | |
try: | |
yield getattr(sys, stream_name) | |
finally: | |
setattr(sys, stream_name, orig_stdout) | |
def captured_stdout(): | |
"""Capture the output of sys.stdout: | |
with captured_stdout() as stdout: | |
print('hello') | |
self.assertEqual(stdout.getvalue(), 'hello\n') | |
Taken from Lib/support/__init__.py in the CPython repo. | |
""" | |
return captured_output('stdout') | |
class cached_property(object): | |
"""A property that is only computed once per instance and then replaces | |
itself with an ordinary attribute. Deleting the attribute resets the | |
property. | |
Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 | |
""" | |
def __init__(self, func): | |
self.__doc__ = getattr(func, '__doc__') | |
self.func = func | |
def __get__(self, obj, cls): | |
if obj is None: | |
# We're being accessed from the class itself, not from an object | |
return self | |
value = obj.__dict__[self.func.__name__] = self.func(obj) | |
return value | |
def get_installed_version(dist_name, lookup_dirs=None): | |
"""Get the installed version of dist_name avoiding pkg_resources cache""" | |
# Create a requirement that we'll look for inside of setuptools. | |
req = pkg_resources.Requirement.parse(dist_name) | |
# We want to avoid having this cached, so we need to construct a new | |
# working set each time. | |
if lookup_dirs is None: | |
working_set = pkg_resources.WorkingSet() | |
else: | |
working_set = pkg_resources.WorkingSet(lookup_dirs) | |
# Get the installed distribution from our working set | |
dist = working_set.find(req) | |
# Check to see if we got an installed distribution or not, if we did | |
# we want to return it's version. | |
return dist.version if dist else None | |
def consume(iterator): | |
"""Consume an iterable at C speed.""" | |
deque(iterator, maxlen=0) | |
# Simulates an enum | |
def enum(*sequential, **named): | |
enums = dict(zip(sequential, range(len(sequential))), **named) | |
reverse = {value: key for key, value in enums.items()} | |
enums['reverse_mapping'] = reverse | |
return type('Enum', (), enums) |
from __future__ import absolute_import | |
import datetime | |
import json | |
import logging | |
import os.path | |
import sys | |
from pip._vendor import lockfile | |
from pip._vendor.packaging import version as packaging_version | |
from pip._internal.compat import WINDOWS | |
from pip._internal.index import PackageFinder | |
from pip._internal.locations import USER_CACHE_DIR, running_under_virtualenv | |
from pip._internal.utils.filesystem import check_path_owner | |
from pip._internal.utils.misc import ensure_dir, get_installed_version | |
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" | |
logger = logging.getLogger(__name__) | |
class VirtualenvSelfCheckState(object): | |
def __init__(self): | |
self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") | |
# Load the existing state | |
try: | |
with open(self.statefile_path) as statefile: | |
self.state = json.load(statefile) | |
except (IOError, ValueError): | |
self.state = {} | |
def save(self, pypi_version, current_time): | |
# Attempt to write out our version check file | |
with open(self.statefile_path, "w") as statefile: | |
json.dump( | |
{ | |
"last_check": current_time.strftime(SELFCHECK_DATE_FMT), | |
"pypi_version": pypi_version, | |
}, | |
statefile, | |
sort_keys=True, | |
separators=(",", ":") | |
) | |
class GlobalSelfCheckState(object): | |
def __init__(self): | |
self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") | |
# Load the existing state | |
try: | |
with open(self.statefile_path) as statefile: | |
self.state = json.load(statefile)[sys.prefix] | |
except (IOError, ValueError, KeyError): | |
self.state = {} | |
def save(self, pypi_version, current_time): | |
# Check to make sure that we own the directory | |
if not check_path_owner(os.path.dirname(self.statefile_path)): | |
return | |
# Now that we've ensured the directory is owned by this user, we'll go | |
# ahead and make sure that all our directories are created. | |
ensure_dir(os.path.dirname(self.statefile_path)) | |
# Attempt to write out our version check file | |
with lockfile.LockFile(self.statefile_path): | |
if os.path.exists(self.statefile_path): | |
with open(self.statefile_path) as statefile: | |
state = json.load(statefile) | |
else: | |
state = {} | |
state[sys.prefix] = { | |
"last_check": current_time.strftime(SELFCHECK_DATE_FMT), | |
"pypi_version": pypi_version, | |
} | |
with open(self.statefile_path, "w") as statefile: | |
json.dump(state, statefile, sort_keys=True, | |
separators=(",", ":")) | |
def load_selfcheck_statefile(): | |
if running_under_virtualenv(): | |
return VirtualenvSelfCheckState() | |
else: | |
return GlobalSelfCheckState() | |
def pip_version_check(session, options): | |
"""Check for an update for pip. | |
Limit the frequency of checks to once per week. State is stored either in | |
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix | |
of the pip script path. | |
""" | |
installed_version = get_installed_version("pip") | |
if not installed_version: | |
return | |
pip_version = packaging_version.parse(installed_version) | |
pypi_version = None | |
try: | |
state = load_selfcheck_statefile() | |
current_time = datetime.datetime.utcnow() | |
# Determine if we need to refresh the state | |
if "last_check" in state.state and "pypi_version" in state.state: | |
last_check = datetime.datetime.strptime( | |
state.state["last_check"], | |
SELFCHECK_DATE_FMT | |
) | |
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: | |
pypi_version = state.state["pypi_version"] | |
# Refresh the version if we need to or just see if we need to warn | |
if pypi_version is None: | |
# Lets use PackageFinder to see what the latest pip version is | |
finder = PackageFinder( | |
find_links=options.find_links, | |
index_urls=[options.index_url] + options.extra_index_urls, | |
allow_all_prereleases=False, # Explicitly set to False | |
trusted_hosts=options.trusted_hosts, | |
process_dependency_links=options.process_dependency_links, | |
session=session, | |
) | |
all_candidates = finder.find_all_candidates("pip") | |
if not all_candidates: | |
return | |
pypi_version = str( | |
max(all_candidates, key=lambda c: c.version).version | |
) | |
# save that we've performed a check | |
state.save(pypi_version, current_time) | |
remote_version = packaging_version.parse(pypi_version) | |
# Determine if our pypi_version is older | |
if (pip_version < remote_version and | |
pip_version.base_version != remote_version.base_version): | |
# Advise "python -m pip" on Windows to avoid issues | |
# with overwriting pip.exe. | |
if WINDOWS: | |
pip_cmd = "python -m pip" | |
else: | |
pip_cmd = "pip" | |
logger.warning( | |
"You are using pip version %s, however version %s is " | |
"available.\nYou should consider upgrading via the " | |
"'%s install --upgrade pip' command.", | |
pip_version, pypi_version, pip_cmd | |
) | |
except Exception: | |
logger.debug( | |
"There was an error checking the latest version of pip", | |
exc_info=True, | |
) |
from __future__ import absolute_import | |
import logging | |
import sys | |
from email.parser import FeedParser # type: ignore | |
from pip._vendor import pkg_resources | |
from pip._vendor.packaging import specifiers, version | |
from pip._internal import exceptions | |
logger = logging.getLogger(__name__) | |
def check_requires_python(requires_python): | |
""" | |
Check if the python version in use match the `requires_python` specifier. | |
Returns `True` if the version of python in use matches the requirement. | |
Returns `False` if the version of python in use does not matches the | |
requirement. | |
Raises an InvalidSpecifier if `requires_python` have an invalid format. | |
""" | |
if requires_python is None: | |
# The package provides no information | |
return True | |
requires_python_specifier = specifiers.SpecifierSet(requires_python) | |
# We only use major.minor.micro | |
python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) | |
return python_version in requires_python_specifier | |
def get_metadata(dist): | |
if (isinstance(dist, pkg_resources.DistInfoDistribution) and | |
dist.has_metadata('METADATA')): | |
return dist.get_metadata('METADATA') | |
elif dist.has_metadata('PKG-INFO'): | |
return dist.get_metadata('PKG-INFO') | |
def check_dist_requires_python(dist): | |
metadata = get_metadata(dist) | |
feed_parser = FeedParser() | |
feed_parser.feed(metadata) | |
pkg_info_dict = feed_parser.close() | |
requires_python = pkg_info_dict.get('Requires-Python') | |
try: | |
if not check_requires_python(requires_python): | |
raise exceptions.UnsupportedPythonVersion( | |
"%s requires Python '%s' but the running Python is %s" % ( | |
dist.project_name, | |
requires_python, | |
'.'.join(map(str, sys.version_info[:3])),) | |
) | |
except specifiers.InvalidSpecifier as e: | |
logger.warning( | |
"Package %s has an invalid Requires-Python entry %s - %s", | |
dist.project_name, requires_python, e, | |
) | |
return | |
def get_installer(dist): | |
if dist.has_metadata('INSTALLER'): | |
for line in dist.get_metadata_lines('INSTALLER'): | |
if line.strip(): | |
return line.strip() | |
return '' |
# Shim to wrap setup.py invocation with setuptools | |
SETUPTOOLS_SHIM = ( | |
"import setuptools, tokenize;__file__=%r;" | |
"f=getattr(tokenize, 'open', open)(__file__);" | |
"code=f.read().replace('\\r\\n', '\\n');" | |
"f.close();" | |
"exec(compile(code, __file__, 'exec'))" | |
) |
from __future__ import absolute_import | |
import logging | |
import os.path | |
import tempfile | |
from pip._internal.utils.misc import rmtree | |
logger = logging.getLogger(__name__) | |
class TempDirectory(object): | |
"""Helper class that owns and cleans up a temporary directory. | |
This class can be used as a context manager or as an OO representation of a | |
temporary directory. | |
Attributes: | |
path | |
Location to the created temporary directory or None | |
delete | |
Whether the directory should be deleted when exiting | |
(when used as a contextmanager) | |
Methods: | |
create() | |
Creates a temporary directory and stores its path in the path | |
attribute. | |
cleanup() | |
Deletes the temporary directory and sets path attribute to None | |
When used as a context manager, a temporary directory is created on | |
entering the context and, if the delete attribute is True, on exiting the | |
context the created directory is deleted. | |
""" | |
def __init__(self, path=None, delete=None, kind="temp"): | |
super(TempDirectory, self).__init__() | |
if path is None and delete is None: | |
# If we were not given an explicit directory, and we were not given | |
# an explicit delete option, then we'll default to deleting. | |
delete = True | |
self.path = path | |
self.delete = delete | |
self.kind = kind | |
def __repr__(self): | |
return "<{} {!r}>".format(self.__class__.__name__, self.path) | |
def __enter__(self): | |
self.create() | |
return self | |
def __exit__(self, exc, value, tb): | |
if self.delete: | |
self.cleanup() | |
def create(self): | |
"""Create a temporary directory and store it's path in self.path | |
""" | |
if self.path is not None: | |
logger.debug( | |
"Skipped creation of temporary directory: {}".format(self.path) | |
) | |
return | |
# We realpath here because some systems have their default tmpdir | |
# symlinked to another directory. This tends to confuse build | |
# scripts, so we canonicalize the path by traversing potential | |
# symlinks here. | |
self.path = os.path.realpath( | |
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind)) | |
) | |
logger.debug("Created temporary directory: {}".format(self.path)) | |
def cleanup(self): | |
"""Remove the temporary directory created and reset state | |
""" | |
if self.path is not None and os.path.exists(self.path): | |
rmtree(self.path) | |
self.path = None |
"""For neatly implementing static typing in pip. | |
`mypy` - the static type analysis tool we use - uses the `typing` module, which | |
provides core functionality fundamental to mypy's functioning. | |
Generally, `typing` would be imported at runtime and used in that fashion - | |
it acts as a no-op at runtime and does not have any run-time overhead by | |
design. | |
As it turns out, `typing` is not vendorable - it uses separate sources for | |
Python 2/Python 3. Thus, this codebase can not expect it to be present. | |
To work around this, mypy allows the typing import to be behind a False-y | |
optional to prevent it from running at runtime and type-comments can be used | |
to remove the need for the types to be accessible directly during runtime. | |
This module provides the False-y guard in a nicely named fashion so that a | |
curious maintainer can reach here to read this. | |
In pip, all static-typing related imports should be guarded as follows: | |
from pip.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import ... | |
Ref: https://github.com/python/mypy/issues/3216 | |
""" | |
MYPY_CHECK_RUNNING = False |
from __future__ import absolute_import, division | |
import contextlib | |
import itertools | |
import logging | |
import sys | |
import time | |
from signal import SIGINT, default_int_handler, signal | |
from pip._vendor import six | |
from pip._vendor.progress.bar import ( | |
Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar, | |
ShadyBar, | |
) | |
from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin | |
from pip._vendor.progress.spinner import Spinner | |
from pip._internal.compat import WINDOWS | |
from pip._internal.utils.logging import get_indentation | |
from pip._internal.utils.misc import format_size | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import Any | |
try: | |
from pip._vendor import colorama | |
# Lots of different errors can come from this, including SystemError and | |
# ImportError. | |
except Exception: | |
colorama = None | |
logger = logging.getLogger(__name__) | |
def _select_progress_class(preferred, fallback): | |
encoding = getattr(preferred.file, "encoding", None) | |
# If we don't know what encoding this file is in, then we'll just assume | |
# that it doesn't support unicode and use the ASCII bar. | |
if not encoding: | |
return fallback | |
# Collect all of the possible characters we want to use with the preferred | |
# bar. | |
characters = [ | |
getattr(preferred, "empty_fill", six.text_type()), | |
getattr(preferred, "fill", six.text_type()), | |
] | |
characters += list(getattr(preferred, "phases", [])) | |
# Try to decode the characters we're using for the bar using the encoding | |
# of the given file, if this works then we'll assume that we can use the | |
# fancier bar and if not we'll fall back to the plaintext bar. | |
try: | |
six.text_type().join(characters).encode(encoding) | |
except UnicodeEncodeError: | |
return fallback | |
else: | |
return preferred | |
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any | |
class InterruptibleMixin(object): | |
""" | |
Helper to ensure that self.finish() gets called on keyboard interrupt. | |
This allows downloads to be interrupted without leaving temporary state | |
(like hidden cursors) behind. | |
This class is similar to the progress library's existing SigIntMixin | |
helper, but as of version 1.2, that helper has the following problems: | |
1. It calls sys.exit(). | |
2. It discards the existing SIGINT handler completely. | |
3. It leaves its own handler in place even after an uninterrupted finish, | |
which will have unexpected delayed effects if the user triggers an | |
unrelated keyboard interrupt some time after a progress-displaying | |
download has already completed, for example. | |
""" | |
def __init__(self, *args, **kwargs): | |
""" | |
Save the original SIGINT handler for later. | |
""" | |
super(InterruptibleMixin, self).__init__(*args, **kwargs) | |
self.original_handler = signal(SIGINT, self.handle_sigint) | |
# If signal() returns None, the previous handler was not installed from | |
# Python, and we cannot restore it. This probably should not happen, | |
# but if it does, we must restore something sensible instead, at least. | |
# The least bad option should be Python's default SIGINT handler, which | |
# just raises KeyboardInterrupt. | |
if self.original_handler is None: | |
self.original_handler = default_int_handler | |
def finish(self): | |
""" | |
Restore the original SIGINT handler after finishing. | |
This should happen regardless of whether the progress display finishes | |
normally, or gets interrupted. | |
""" | |
super(InterruptibleMixin, self).finish() | |
signal(SIGINT, self.original_handler) | |
def handle_sigint(self, signum, frame): | |
""" | |
Call self.finish() before delegating to the original SIGINT handler. | |
This handler should only be in place while the progress display is | |
active. | |
""" | |
self.finish() | |
self.original_handler(signum, frame) | |
class SilentBar(Bar): | |
def update(self): | |
pass | |
class BlueEmojiBar(IncrementalBar): | |
suffix = "%(percent)d%%" | |
bar_prefix = " " | |
bar_suffix = " " | |
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any | |
class DownloadProgressMixin(object): | |
def __init__(self, *args, **kwargs): | |
super(DownloadProgressMixin, self).__init__(*args, **kwargs) | |
self.message = (" " * (get_indentation() + 2)) + self.message | |
@property | |
def downloaded(self): | |
return format_size(self.index) | |
@property | |
def download_speed(self): | |
# Avoid zero division errors... | |
if self.avg == 0.0: | |
return "..." | |
return format_size(1 / self.avg) + "/s" | |
@property | |
def pretty_eta(self): | |
if self.eta: | |
return "eta %s" % self.eta_td | |
return "" | |
def iter(self, it, n=1): | |
for x in it: | |
yield x | |
self.next(n) | |
self.finish() | |
class WindowsMixin(object): | |
def __init__(self, *args, **kwargs): | |
# The Windows terminal does not support the hide/show cursor ANSI codes | |
# even with colorama. So we'll ensure that hide_cursor is False on | |
# Windows. | |
# This call neds to go before the super() call, so that hide_cursor | |
# is set in time. The base progress bar class writes the "hide cursor" | |
# code to the terminal in its init, so if we don't set this soon | |
# enough, we get a "hide" with no corresponding "show"... | |
if WINDOWS and self.hide_cursor: | |
self.hide_cursor = False | |
super(WindowsMixin, self).__init__(*args, **kwargs) | |
# Check if we are running on Windows and we have the colorama module, | |
# if we do then wrap our file with it. | |
if WINDOWS and colorama: | |
self.file = colorama.AnsiToWin32(self.file) | |
# The progress code expects to be able to call self.file.isatty() | |
# but the colorama.AnsiToWin32() object doesn't have that, so we'll | |
# add it. | |
self.file.isatty = lambda: self.file.wrapped.isatty() | |
# The progress code expects to be able to call self.file.flush() | |
# but the colorama.AnsiToWin32() object doesn't have that, so we'll | |
# add it. | |
self.file.flush = lambda: self.file.wrapped.flush() | |
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, | |
DownloadProgressMixin): | |
file = sys.stdout | |
message = "%(percent)d%%" | |
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" | |
# NOTE: The "type: ignore" comments on the following classes are there to | |
# work around https://github.com/python/typing/issues/241 | |
class DefaultDownloadProgressBar(BaseDownloadProgressBar, | |
_BaseBar): # type: ignore | |
pass | |
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore | |
pass | |
class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore | |
IncrementalBar): | |
pass | |
class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore | |
ChargingBar): | |
pass | |
class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore | |
pass | |
class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore | |
FillingSquaresBar): | |
pass | |
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore | |
FillingCirclesBar): | |
pass | |
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore | |
BlueEmojiBar): | |
pass | |
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, | |
DownloadProgressMixin, WritelnMixin, Spinner): | |
file = sys.stdout | |
suffix = "%(downloaded)s %(download_speed)s" | |
def next_phase(self): | |
if not hasattr(self, "_phaser"): | |
self._phaser = itertools.cycle(self.phases) | |
return next(self._phaser) | |
def update(self): | |
message = self.message % self | |
phase = self.next_phase() | |
suffix = self.suffix % self | |
line = ''.join([ | |
message, | |
" " if message else "", | |
phase, | |
" " if suffix else "", | |
suffix, | |
]) | |
self.writeln(line) | |
BAR_TYPES = { | |
"off": (DownloadSilentBar, DownloadSilentBar), | |
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner), | |
"ascii": (DownloadIncrementalBar, DownloadProgressSpinner), | |
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), | |
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) | |
} | |
def DownloadProgressProvider(progress_bar, max=None): | |
if max is None or max == 0: | |
return BAR_TYPES[progress_bar][1]().iter | |
else: | |
return BAR_TYPES[progress_bar][0](max=max).iter | |
################################################################ | |
# Generic "something is happening" spinners | |
# | |
# We don't even try using progress.spinner.Spinner here because it's actually | |
# simpler to reimplement from scratch than to coerce their code into doing | |
# what we need. | |
################################################################ | |
@contextlib.contextmanager | |
def hidden_cursor(file): | |
# The Windows terminal does not support the hide/show cursor ANSI codes, | |
# even via colorama. So don't even try. | |
if WINDOWS: | |
yield | |
# We don't want to clutter the output with control characters if we're | |
# writing to a file, or if the user is running with --quiet. | |
# See https://github.com/pypa/pip/issues/3418 | |
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: | |
yield | |
else: | |
file.write(HIDE_CURSOR) | |
try: | |
yield | |
finally: | |
file.write(SHOW_CURSOR) | |
class RateLimiter(object): | |
def __init__(self, min_update_interval_seconds): | |
self._min_update_interval_seconds = min_update_interval_seconds | |
self._last_update = 0 | |
def ready(self): | |
now = time.time() | |
delta = now - self._last_update | |
return delta >= self._min_update_interval_seconds | |
def reset(self): | |
self._last_update = time.time() | |
class InteractiveSpinner(object): | |
def __init__(self, message, file=None, spin_chars="-\\|/", | |
# Empirically, 8 updates/second looks nice | |
min_update_interval_seconds=0.125): | |
self._message = message | |
if file is None: | |
file = sys.stdout | |
self._file = file | |
self._rate_limiter = RateLimiter(min_update_interval_seconds) | |
self._finished = False | |
self._spin_cycle = itertools.cycle(spin_chars) | |
self._file.write(" " * get_indentation() + self._message + " ... ") | |
self._width = 0 | |
def _write(self, status): | |
assert not self._finished | |
# Erase what we wrote before by backspacing to the beginning, writing | |
# spaces to overwrite the old text, and then backspacing again | |
backup = "\b" * self._width | |
self._file.write(backup + " " * self._width + backup) | |
# Now we have a blank slate to add our status | |
self._file.write(status) | |
self._width = len(status) | |
self._file.flush() | |
self._rate_limiter.reset() | |
def spin(self): | |
if self._finished: | |
return | |
if not self._rate_limiter.ready(): | |
return | |
self._write(next(self._spin_cycle)) | |
def finish(self, final_status): | |
if self._finished: | |
return | |
self._write(final_status) | |
self._file.write("\n") | |
self._file.flush() | |
self._finished = True | |
# Used for dumb terminals, non-interactive installs (no tty), etc. | |
# We still print updates occasionally (once every 60 seconds by default) to | |
# act as a keep-alive for systems like Travis-CI that take lack-of-output as | |
# an indication that a task has frozen. | |
class NonInteractiveSpinner(object): | |
def __init__(self, message, min_update_interval_seconds=60): | |
self._message = message | |
self._finished = False | |
self._rate_limiter = RateLimiter(min_update_interval_seconds) | |
self._update("started") | |
def _update(self, status): | |
assert not self._finished | |
self._rate_limiter.reset() | |
logger.info("%s: %s", self._message, status) | |
def spin(self): | |
if self._finished: | |
return | |
if not self._rate_limiter.ready(): | |
return | |
self._update("still running...") | |
def finish(self, final_status): | |
if self._finished: | |
return | |
self._update("finished with status '%s'" % (final_status,)) | |
self._finished = True | |
@contextlib.contextmanager | |
def open_spinner(message): | |
# Interactive spinner goes directly to sys.stdout rather than being routed | |
# through the logging system, but it acts like it has level INFO, | |
# i.e. it's only displayed if we're at level INFO or better. | |
# Non-interactive spinner goes through the logging system, so it is always | |
# in sync with logging configuration. | |
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: | |
spinner = InteractiveSpinner(message) | |
else: | |
spinner = NonInteractiveSpinner(message) | |
try: | |
with hidden_cursor(sys.stdout): | |
yield spinner | |
except KeyboardInterrupt: | |
spinner.finish("canceled") | |
raise | |
except Exception: | |
spinner.finish("error") | |
raise | |
else: | |
spinner.finish("done") |
"""Handles all VCS (version control) support""" | |
from __future__ import absolute_import | |
import copy | |
import errno | |
import logging | |
import os | |
import shutil | |
import sys | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._internal.exceptions import BadCommand | |
from pip._internal.utils.misc import ( | |
display_path, backup_dir, call_subprocess, rmtree, ask_path_exists, | |
) | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
if MYPY_CHECK_RUNNING: | |
from typing import Dict, Optional, Tuple | |
from pip._internal.basecommand import Command | |
__all__ = ['vcs', 'get_src_requirement'] | |
logger = logging.getLogger(__name__) | |
class RevOptions(object): | |
""" | |
Encapsulates a VCS-specific revision to install, along with any VCS | |
install options. | |
Instances of this class should be treated as if immutable. | |
""" | |
def __init__(self, vcs, rev=None, extra_args=None): | |
""" | |
Args: | |
vcs: a VersionControl object. | |
rev: the name of the revision to install. | |
extra_args: a list of extra options. | |
""" | |
if extra_args is None: | |
extra_args = [] | |
self.extra_args = extra_args | |
self.rev = rev | |
self.vcs = vcs | |
def __repr__(self): | |
return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev) | |
@property | |
def arg_rev(self): | |
if self.rev is None: | |
return self.vcs.default_arg_rev | |
return self.rev | |
def to_args(self): | |
""" | |
Return the VCS-specific command arguments. | |
""" | |
args = [] | |
rev = self.arg_rev | |
if rev is not None: | |
args += self.vcs.get_base_rev_args(rev) | |
args += self.extra_args | |
return args | |
def to_display(self): | |
if not self.rev: | |
return '' | |
return ' (to revision {})'.format(self.rev) | |
def make_new(self, rev): | |
""" | |
Make a copy of the current instance, but with a new rev. | |
Args: | |
rev: the name of the revision for the new object. | |
""" | |
return self.vcs.make_rev_options(rev, extra_args=self.extra_args) | |
class VcsSupport(object): | |
_registry = {} # type: Dict[str, Command] | |
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] | |
def __init__(self): | |
# Register more schemes with urlparse for various version control | |
# systems | |
urllib_parse.uses_netloc.extend(self.schemes) | |
# Python >= 2.7.4, 3.3 doesn't have uses_fragment | |
if getattr(urllib_parse, 'uses_fragment', None): | |
urllib_parse.uses_fragment.extend(self.schemes) | |
super(VcsSupport, self).__init__() | |
def __iter__(self): | |
return self._registry.__iter__() | |
@property | |
def backends(self): | |
return list(self._registry.values()) | |
@property | |
def dirnames(self): | |
return [backend.dirname for backend in self.backends] | |
@property | |
def all_schemes(self): | |
schemes = [] | |
for backend in self.backends: | |
schemes.extend(backend.schemes) | |
return schemes | |
def register(self, cls): | |
if not hasattr(cls, 'name'): | |
logger.warning('Cannot register VCS %s', cls.__name__) | |
return | |
if cls.name not in self._registry: | |
self._registry[cls.name] = cls | |
logger.debug('Registered VCS backend: %s', cls.name) | |
def unregister(self, cls=None, name=None): | |
if name in self._registry: | |
del self._registry[name] | |
elif cls in self._registry.values(): | |
del self._registry[cls.name] | |
else: | |
logger.warning('Cannot unregister because no class or name given') | |
def get_backend_name(self, location): | |
""" | |
Return the name of the version control backend if found at given | |
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') | |
""" | |
for vc_type in self._registry.values(): | |
if vc_type.controls_location(location): | |
logger.debug('Determine that %s uses VCS: %s', | |
location, vc_type.name) | |
return vc_type.name | |
return None | |
def get_backend(self, name): | |
name = name.lower() | |
if name in self._registry: | |
return self._registry[name] | |
def get_backend_from_location(self, location): | |
vc_type = self.get_backend_name(location) | |
if vc_type: | |
return self.get_backend(vc_type) | |
return None | |
vcs = VcsSupport() | |
class VersionControl(object): | |
name = '' | |
dirname = '' | |
# List of supported schemes for this Version Control | |
schemes = () # type: Tuple[str, ...] | |
# Iterable of environment variable names to pass to call_subprocess(). | |
unset_environ = () # type: Tuple[str, ...] | |
default_arg_rev = None # type: Optional[str] | |
def __init__(self, url=None, *args, **kwargs): | |
self.url = url | |
super(VersionControl, self).__init__(*args, **kwargs) | |
def get_base_rev_args(self, rev): | |
""" | |
Return the base revision arguments for a vcs command. | |
Args: | |
rev: the name of a revision to install. Cannot be None. | |
""" | |
raise NotImplementedError | |
def make_rev_options(self, rev=None, extra_args=None): | |
""" | |
Return a RevOptions object. | |
Args: | |
rev: the name of a revision to install. | |
extra_args: a list of extra options. | |
""" | |
return RevOptions(self, rev, extra_args=extra_args) | |
def _is_local_repository(self, repo): | |
""" | |
posix absolute paths start with os.path.sep, | |
win32 ones start with drive (like c:\\folder) | |
""" | |
drive, tail = os.path.splitdrive(repo) | |
return repo.startswith(os.path.sep) or drive | |
# See issue #1083 for why this method was introduced: | |
# https://github.com/pypa/pip/issues/1083 | |
def translate_egg_surname(self, surname): | |
# For example, Django has branches of the form "stable/1.7.x". | |
return surname.replace('/', '_') | |
def export(self, location): | |
""" | |
Export the repository at the url to the destination location | |
i.e. only download the files, without vcs informations | |
""" | |
raise NotImplementedError | |
def get_url_rev(self): | |
""" | |
Returns the correct repository URL and revision by parsing the given | |
repository URL | |
""" | |
error_message = ( | |
"Sorry, '%s' is a malformed VCS url. " | |
"The format is <vcs>+<protocol>://<url>, " | |
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" | |
) | |
assert '+' in self.url, error_message % self.url | |
url = self.url.split('+', 1)[1] | |
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) | |
rev = None | |
if '@' in path: | |
path, rev = path.rsplit('@', 1) | |
url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) | |
return url, rev | |
def get_info(self, location): | |
""" | |
Returns (url, revision), where both are strings | |
""" | |
assert not location.rstrip('/').endswith(self.dirname), \ | |
'Bad directory: %s' % location | |
return self.get_url(location), self.get_revision(location) | |
def normalize_url(self, url): | |
""" | |
Normalize a URL for comparison by unquoting it and removing any | |
trailing slash. | |
""" | |
return urllib_parse.unquote(url).rstrip('/') | |
def compare_urls(self, url1, url2): | |
""" | |
Compare two repo URLs for identity, ignoring incidental differences. | |
""" | |
return (self.normalize_url(url1) == self.normalize_url(url2)) | |
def obtain(self, dest): | |
""" | |
Called when installing or updating an editable package, takes the | |
source path of the checkout. | |
""" | |
raise NotImplementedError | |
def switch(self, dest, url, rev_options): | |
""" | |
Switch the repo at ``dest`` to point to ``URL``. | |
Args: | |
rev_options: a RevOptions object. | |
""" | |
raise NotImplementedError | |
def update(self, dest, rev_options): | |
""" | |
Update an already-existing repo to the given ``rev_options``. | |
Args: | |
rev_options: a RevOptions object. | |
""" | |
raise NotImplementedError | |
def is_commit_id_equal(self, dest, name): | |
""" | |
Return whether the id of the current commit equals the given name. | |
Args: | |
dest: the repository directory. | |
name: a string name. | |
""" | |
raise NotImplementedError | |
def check_destination(self, dest, url, rev_options): | |
""" | |
Prepare a location to receive a checkout/clone. | |
Return True if the location is ready for (and requires) a | |
checkout/clone, False otherwise. | |
Args: | |
rev_options: a RevOptions object. | |
""" | |
checkout = True | |
prompt = False | |
rev_display = rev_options.to_display() | |
if os.path.exists(dest): | |
checkout = False | |
if os.path.exists(os.path.join(dest, self.dirname)): | |
existing_url = self.get_url(dest) | |
if self.compare_urls(existing_url, url): | |
logger.debug( | |
'%s in %s exists, and has correct URL (%s)', | |
self.repo_name.title(), | |
display_path(dest), | |
url, | |
) | |
if not self.is_commit_id_equal(dest, rev_options.rev): | |
logger.info( | |
'Updating %s %s%s', | |
display_path(dest), | |
self.repo_name, | |
rev_display, | |
) | |
self.update(dest, rev_options) | |
else: | |
logger.info( | |
'Skipping because already up-to-date.') | |
else: | |
logger.warning( | |
'%s %s in %s exists with URL %s', | |
self.name, | |
self.repo_name, | |
display_path(dest), | |
existing_url, | |
) | |
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', | |
('s', 'i', 'w', 'b')) | |
else: | |
logger.warning( | |
'Directory %s already exists, and is not a %s %s.', | |
dest, | |
self.name, | |
self.repo_name, | |
) | |
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) | |
if prompt: | |
logger.warning( | |
'The plan is to install the %s repository %s', | |
self.name, | |
url, | |
) | |
response = ask_path_exists('What to do? %s' % prompt[0], | |
prompt[1]) | |
if response == 's': | |
logger.info( | |
'Switching %s %s to %s%s', | |
self.repo_name, | |
display_path(dest), | |
url, | |
rev_display, | |
) | |
self.switch(dest, url, rev_options) | |
elif response == 'i': | |
# do nothing | |
pass | |
elif response == 'w': | |
logger.warning('Deleting %s', display_path(dest)) | |
rmtree(dest) | |
checkout = True | |
elif response == 'b': | |
dest_dir = backup_dir(dest) | |
logger.warning( | |
'Backing up %s to %s', display_path(dest), dest_dir, | |
) | |
shutil.move(dest, dest_dir) | |
checkout = True | |
elif response == 'a': | |
sys.exit(-1) | |
return checkout | |
def unpack(self, location): | |
""" | |
Clean up current location and download the url repository | |
(and vcs infos) into location | |
""" | |
if os.path.exists(location): | |
rmtree(location) | |
self.obtain(location) | |
def get_src_requirement(self, dist, location): | |
""" | |
Return a string representing the requirement needed to | |
redownload the files currently present in location, something | |
like: | |
{repository_url}@{revision}#egg={project_name}-{version_identifier} | |
""" | |
raise NotImplementedError | |
def get_url(self, location): | |
""" | |
Return the url used at location | |
Used in get_info or check_destination | |
""" | |
raise NotImplementedError | |
def get_revision(self, location): | |
""" | |
Return the current commit id of the files at the given location. | |
""" | |
raise NotImplementedError | |
def run_command(self, cmd, show_stdout=True, cwd=None, | |
on_returncode='raise', | |
command_desc=None, | |
extra_environ=None, spinner=None): | |
""" | |
Run a VCS subcommand | |
This is simply a wrapper around call_subprocess that adds the VCS | |
command name, and checks that the VCS is available | |
""" | |
cmd = [self.name] + cmd | |
try: | |
return call_subprocess(cmd, show_stdout, cwd, | |
on_returncode, | |
command_desc, extra_environ, | |
unset_environ=self.unset_environ, | |
spinner=spinner) | |
except OSError as e: | |
# errno.ENOENT = no such file or directory | |
# In other words, the VCS executable isn't available | |
if e.errno == errno.ENOENT: | |
raise BadCommand( | |
'Cannot find command %r - do you have ' | |
'%r installed and in your ' | |
'PATH?' % (self.name, self.name)) | |
else: | |
raise # re-raise exception if a different error occurred | |
@classmethod | |
def controls_location(cls, location): | |
""" | |
Check if a location is controlled by the vcs. | |
It is meant to be overridden to implement smarter detection | |
mechanisms for specific vcs. | |
""" | |
logger.debug('Checking in %s for %s (%s)...', | |
location, cls.dirname, cls.name) | |
path = os.path.join(location, cls.dirname) | |
return os.path.exists(path) | |
def get_src_requirement(dist, location): | |
version_control = vcs.get_backend_from_location(location) | |
if version_control: | |
try: | |
return version_control().get_src_requirement(dist, | |
location) | |
except BadCommand: | |
logger.warning( | |
'cannot determine version of editable source in %s ' | |
'(%s command not found in path)', | |
location, | |
version_control.name, | |
) | |
return dist.as_requirement() | |
logger.warning( | |
'cannot determine version of editable source in %s (is not SVN ' | |
'checkout, Git clone, Mercurial clone or Bazaar branch)', | |
location, | |
) | |
return dist.as_requirement() |
from __future__ import absolute_import | |
import logging | |
import os | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._internal.download import path_to_url | |
from pip._internal.utils.misc import display_path, rmtree | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.vcs import VersionControl, vcs | |
logger = logging.getLogger(__name__) | |
class Bazaar(VersionControl): | |
name = 'bzr' | |
dirname = '.bzr' | |
repo_name = 'branch' | |
schemes = ( | |
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', | |
'bzr+lp', | |
) | |
def __init__(self, url=None, *args, **kwargs): | |
super(Bazaar, self).__init__(url, *args, **kwargs) | |
# This is only needed for python <2.7.5 | |
# Register lp but do not expose as a scheme to support bzr+lp. | |
if getattr(urllib_parse, 'uses_fragment', None): | |
urllib_parse.uses_fragment.extend(['lp']) | |
def get_base_rev_args(self, rev): | |
return ['-r', rev] | |
def export(self, location): | |
""" | |
Export the Bazaar repository at the url to the destination location | |
""" | |
# Remove the location to make sure Bazaar can export it correctly | |
if os.path.exists(location): | |
rmtree(location) | |
with TempDirectory(kind="export") as temp_dir: | |
self.unpack(temp_dir.path) | |
self.run_command( | |
['export', location], | |
cwd=temp_dir.path, show_stdout=False, | |
) | |
def switch(self, dest, url, rev_options): | |
self.run_command(['switch', url], cwd=dest) | |
def update(self, dest, rev_options): | |
cmd_args = ['pull', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
def obtain(self, dest): | |
url, rev = self.get_url_rev() | |
rev_options = self.make_rev_options(rev) | |
if self.check_destination(dest, url, rev_options): | |
rev_display = rev_options.to_display() | |
logger.info( | |
'Checking out %s%s to %s', | |
url, | |
rev_display, | |
display_path(dest), | |
) | |
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest] | |
self.run_command(cmd_args) | |
def get_url_rev(self): | |
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it | |
url, rev = super(Bazaar, self).get_url_rev() | |
if url.startswith('ssh://'): | |
url = 'bzr+' + url | |
return url, rev | |
def get_url(self, location): | |
urls = self.run_command(['info'], show_stdout=False, cwd=location) | |
for line in urls.splitlines(): | |
line = line.strip() | |
for x in ('checkout of branch: ', | |
'parent branch: '): | |
if line.startswith(x): | |
repo = line.split(x)[1] | |
if self._is_local_repository(repo): | |
return path_to_url(repo) | |
return repo | |
return None | |
def get_revision(self, location): | |
revision = self.run_command( | |
['revno'], show_stdout=False, cwd=location, | |
) | |
return revision.splitlines()[-1] | |
def get_src_requirement(self, dist, location): | |
repo = self.get_url(location) | |
if not repo: | |
return None | |
if not repo.lower().startswith('bzr:'): | |
repo = 'bzr+' + repo | |
egg_project_name = dist.egg_name().split('-', 1)[0] | |
current_rev = self.get_revision(location) | |
return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) | |
def is_commit_id_equal(self, dest, name): | |
"""Always assume the versions don't match""" | |
return False | |
vcs.register(Bazaar) |
from __future__ import absolute_import | |
import logging | |
import os.path | |
import re | |
from pip._vendor.packaging.version import parse as parse_version | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._vendor.six.moves.urllib import request as urllib_request | |
from pip._internal.compat import samefile | |
from pip._internal.exceptions import BadCommand | |
from pip._internal.utils.misc import display_path | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.vcs import VersionControl, vcs | |
urlsplit = urllib_parse.urlsplit | |
urlunsplit = urllib_parse.urlunsplit | |
logger = logging.getLogger(__name__) | |
HASH_REGEX = re.compile('[a-fA-F0-9]{40}') | |
def looks_like_hash(sha): | |
return bool(HASH_REGEX.match(sha)) | |
class Git(VersionControl): | |
name = 'git' | |
dirname = '.git' | |
repo_name = 'clone' | |
schemes = ( | |
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', | |
) | |
# Prevent the user's environment variables from interfering with pip: | |
# https://github.com/pypa/pip/issues/1130 | |
unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') | |
default_arg_rev = 'HEAD' | |
def __init__(self, url=None, *args, **kwargs): | |
# Works around an apparent Git bug | |
# (see http://article.gmane.org/gmane.comp.version-control.git/146500) | |
if url: | |
scheme, netloc, path, query, fragment = urlsplit(url) | |
if scheme.endswith('file'): | |
initial_slashes = path[:-len(path.lstrip('/'))] | |
newpath = ( | |
initial_slashes + | |
urllib_request.url2pathname(path) | |
.replace('\\', '/').lstrip('/') | |
) | |
url = urlunsplit((scheme, netloc, newpath, query, fragment)) | |
after_plus = scheme.find('+') + 1 | |
url = scheme[:after_plus] + urlunsplit( | |
(scheme[after_plus:], netloc, newpath, query, fragment), | |
) | |
super(Git, self).__init__(url, *args, **kwargs) | |
def get_base_rev_args(self, rev): | |
return [rev] | |
def get_git_version(self): | |
VERSION_PFX = 'git version ' | |
version = self.run_command(['version'], show_stdout=False) | |
if version.startswith(VERSION_PFX): | |
version = version[len(VERSION_PFX):].split()[0] | |
else: | |
version = '' | |
# get first 3 positions of the git version becasue | |
# on windows it is x.y.z.windows.t, and this parses as | |
# LegacyVersion which always smaller than a Version. | |
version = '.'.join(version.split('.')[:3]) | |
return parse_version(version) | |
def export(self, location): | |
"""Export the Git repository at the url to the destination location""" | |
if not location.endswith('/'): | |
location = location + '/' | |
with TempDirectory(kind="export") as temp_dir: | |
self.unpack(temp_dir.path) | |
self.run_command( | |
['checkout-index', '-a', '-f', '--prefix', location], | |
show_stdout=False, cwd=temp_dir.path | |
) | |
def get_revision_sha(self, dest, rev): | |
""" | |
Return a commit hash for the given revision if it names a remote | |
branch or tag. Otherwise, return None. | |
Args: | |
dest: the repository directory. | |
rev: the revision name. | |
""" | |
# Pass rev to pre-filter the list. | |
output = self.run_command(['show-ref', rev], cwd=dest, | |
show_stdout=False, on_returncode='ignore') | |
refs = {} | |
for line in output.strip().splitlines(): | |
try: | |
sha, ref = line.split() | |
except ValueError: | |
# Include the offending line to simplify troubleshooting if | |
# this error ever occurs. | |
raise ValueError('unexpected show-ref line: {!r}'.format(line)) | |
refs[ref] = sha | |
branch_ref = 'refs/remotes/origin/{}'.format(rev) | |
tag_ref = 'refs/tags/{}'.format(rev) | |
return refs.get(branch_ref) or refs.get(tag_ref) | |
def check_rev_options(self, dest, rev_options): | |
"""Check the revision options before checkout. | |
Returns a new RevOptions object for the SHA1 of the branch or tag | |
if found. | |
Args: | |
rev_options: a RevOptions object. | |
""" | |
rev = rev_options.arg_rev | |
sha = self.get_revision_sha(dest, rev) | |
if sha is not None: | |
return rev_options.make_new(sha) | |
# Do not show a warning for the common case of something that has | |
# the form of a Git commit hash. | |
if not looks_like_hash(rev): | |
logger.warning( | |
"Did not find branch or tag '%s', assuming revision or ref.", | |
rev, | |
) | |
return rev_options | |
def is_commit_id_equal(self, dest, name): | |
""" | |
Return whether the current commit hash equals the given name. | |
Args: | |
dest: the repository directory. | |
name: a string name. | |
""" | |
if not name: | |
# Then avoid an unnecessary subprocess call. | |
return False | |
return self.get_revision(dest) == name | |
def switch(self, dest, url, rev_options): | |
self.run_command(['config', 'remote.origin.url', url], cwd=dest) | |
cmd_args = ['checkout', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
self.update_submodules(dest) | |
def update(self, dest, rev_options): | |
# First fetch changes from the default remote | |
if self.get_git_version() >= parse_version('1.9.0'): | |
# fetch tags in addition to everything else | |
self.run_command(['fetch', '-q', '--tags'], cwd=dest) | |
else: | |
self.run_command(['fetch', '-q'], cwd=dest) | |
# Then reset to wanted revision (maybe even origin/master) | |
rev_options = self.check_rev_options(dest, rev_options) | |
cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
#: update submodules | |
self.update_submodules(dest) | |
def obtain(self, dest): | |
url, rev = self.get_url_rev() | |
rev_options = self.make_rev_options(rev) | |
if self.check_destination(dest, url, rev_options): | |
rev_display = rev_options.to_display() | |
logger.info( | |
'Cloning %s%s to %s', url, rev_display, display_path(dest), | |
) | |
self.run_command(['clone', '-q', url, dest]) | |
if rev: | |
rev_options = self.check_rev_options(dest, rev_options) | |
# Only do a checkout if the current commit id doesn't match | |
# the requested revision. | |
if not self.is_commit_id_equal(dest, rev_options.rev): | |
rev = rev_options.rev | |
# Only fetch the revision if it's a ref | |
if rev.startswith('refs/'): | |
self.run_command( | |
['fetch', '-q', url] + rev_options.to_args(), | |
cwd=dest, | |
) | |
# Change the revision to the SHA of the ref we fetched | |
rev = 'FETCH_HEAD' | |
self.run_command(['checkout', '-q', rev], cwd=dest) | |
#: repo may contain submodules | |
self.update_submodules(dest) | |
def get_url(self, location): | |
"""Return URL of the first remote encountered.""" | |
remotes = self.run_command( | |
['config', '--get-regexp', r'remote\..*\.url'], | |
show_stdout=False, cwd=location, | |
) | |
remotes = remotes.splitlines() | |
found_remote = remotes[0] | |
for remote in remotes: | |
if remote.startswith('remote.origin.url '): | |
found_remote = remote | |
break | |
url = found_remote.split(' ')[1] | |
return url.strip() | |
def get_revision(self, location): | |
current_rev = self.run_command( | |
['rev-parse', 'HEAD'], show_stdout=False, cwd=location, | |
) | |
return current_rev.strip() | |
def _get_subdirectory(self, location): | |
"""Return the relative path of setup.py to the git repo root.""" | |
# find the repo root | |
git_dir = self.run_command(['rev-parse', '--git-dir'], | |
show_stdout=False, cwd=location).strip() | |
if not os.path.isabs(git_dir): | |
git_dir = os.path.join(location, git_dir) | |
root_dir = os.path.join(git_dir, '..') | |
# find setup.py | |
orig_location = location | |
while not os.path.exists(os.path.join(location, 'setup.py')): | |
last_location = location | |
location = os.path.dirname(location) | |
if location == last_location: | |
# We've traversed up to the root of the filesystem without | |
# finding setup.py | |
logger.warning( | |
"Could not find setup.py for directory %s (tried all " | |
"parent directories)", | |
orig_location, | |
) | |
return None | |
# relative path of setup.py to repo root | |
if samefile(root_dir, location): | |
return None | |
return os.path.relpath(location, root_dir) | |
def get_src_requirement(self, dist, location): | |
repo = self.get_url(location) | |
if not repo.lower().startswith('git:'): | |
repo = 'git+' + repo | |
egg_project_name = dist.egg_name().split('-', 1)[0] | |
if not repo: | |
return None | |
current_rev = self.get_revision(location) | |
req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) | |
subdirectory = self._get_subdirectory(location) | |
if subdirectory: | |
req += '&subdirectory=' + subdirectory | |
return req | |
def get_url_rev(self): | |
""" | |
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. | |
That's required because although they use SSH they sometimes doesn't | |
work with a ssh:// scheme (e.g. Github). But we need a scheme for | |
parsing. Hence we remove it again afterwards and return it as a stub. | |
""" | |
if '://' not in self.url: | |
assert 'file:' not in self.url | |
self.url = self.url.replace('git+', 'git+ssh://') | |
url, rev = super(Git, self).get_url_rev() | |
url = url.replace('ssh://', '') | |
else: | |
url, rev = super(Git, self).get_url_rev() | |
return url, rev | |
def update_submodules(self, location): | |
if not os.path.exists(os.path.join(location, '.gitmodules')): | |
return | |
self.run_command( | |
['submodule', 'update', '--init', '--recursive', '-q'], | |
cwd=location, | |
) | |
@classmethod | |
def controls_location(cls, location): | |
if super(Git, cls).controls_location(location): | |
return True | |
try: | |
r = cls().run_command(['rev-parse'], | |
cwd=location, | |
show_stdout=False, | |
on_returncode='ignore') | |
return not r | |
except BadCommand: | |
logger.debug("could not determine if %s is under git control " | |
"because git is not available", location) | |
return False | |
vcs.register(Git) |
from __future__ import absolute_import | |
import logging | |
import os | |
from pip._vendor.six.moves import configparser | |
from pip._internal.download import path_to_url | |
from pip._internal.utils.misc import display_path | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.vcs import VersionControl, vcs | |
logger = logging.getLogger(__name__) | |
class Mercurial(VersionControl): | |
name = 'hg' | |
dirname = '.hg' | |
repo_name = 'clone' | |
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') | |
def get_base_rev_args(self, rev): | |
return [rev] | |
def export(self, location): | |
"""Export the Hg repository at the url to the destination location""" | |
with TempDirectory(kind="export") as temp_dir: | |
self.unpack(temp_dir.path) | |
self.run_command( | |
['archive', location], show_stdout=False, cwd=temp_dir.path | |
) | |
def switch(self, dest, url, rev_options): | |
repo_config = os.path.join(dest, self.dirname, 'hgrc') | |
config = configparser.SafeConfigParser() | |
try: | |
config.read(repo_config) | |
config.set('paths', 'default', url) | |
with open(repo_config, 'w') as config_file: | |
config.write(config_file) | |
except (OSError, configparser.NoSectionError) as exc: | |
logger.warning( | |
'Could not switch Mercurial repository to %s: %s', url, exc, | |
) | |
else: | |
cmd_args = ['update', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
def update(self, dest, rev_options): | |
self.run_command(['pull', '-q'], cwd=dest) | |
cmd_args = ['update', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
def obtain(self, dest): | |
url, rev = self.get_url_rev() | |
rev_options = self.make_rev_options(rev) | |
if self.check_destination(dest, url, rev_options): | |
rev_display = rev_options.to_display() | |
logger.info( | |
'Cloning hg %s%s to %s', | |
url, | |
rev_display, | |
display_path(dest), | |
) | |
self.run_command(['clone', '--noupdate', '-q', url, dest]) | |
cmd_args = ['update', '-q'] + rev_options.to_args() | |
self.run_command(cmd_args, cwd=dest) | |
def get_url(self, location): | |
url = self.run_command( | |
['showconfig', 'paths.default'], | |
show_stdout=False, cwd=location).strip() | |
if self._is_local_repository(url): | |
url = path_to_url(url) | |
return url.strip() | |
def get_revision(self, location): | |
current_revision = self.run_command( | |
['parents', '--template={rev}'], | |
show_stdout=False, cwd=location).strip() | |
return current_revision | |
def get_revision_hash(self, location): | |
current_rev_hash = self.run_command( | |
['parents', '--template={node}'], | |
show_stdout=False, cwd=location).strip() | |
return current_rev_hash | |
def get_src_requirement(self, dist, location): | |
repo = self.get_url(location) | |
if not repo.lower().startswith('hg:'): | |
repo = 'hg+' + repo | |
egg_project_name = dist.egg_name().split('-', 1)[0] | |
if not repo: | |
return None | |
current_rev_hash = self.get_revision_hash(location) | |
return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name) | |
def is_commit_id_equal(self, dest, name): | |
"""Always assume the versions don't match""" | |
return False | |
vcs.register(Mercurial) |
from __future__ import absolute_import | |
import logging | |
import os | |
import re | |
from pip._vendor.six.moves.urllib import parse as urllib_parse | |
from pip._internal.index import Link | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import display_path, rmtree | |
from pip._internal.vcs import VersionControl, vcs | |
_svn_xml_url_re = re.compile('url="([^"]+)"') | |
_svn_rev_re = re.compile(r'committed-rev="(\d+)"') | |
_svn_url_re = re.compile(r'URL: (.+)') | |
_svn_revision_re = re.compile(r'Revision: (.+)') | |
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') | |
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') | |
logger = logging.getLogger(__name__) | |
class Subversion(VersionControl): | |
name = 'svn' | |
dirname = '.svn' | |
repo_name = 'checkout' | |
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') | |
def get_base_rev_args(self, rev): | |
return ['-r', rev] | |
def get_info(self, location): | |
"""Returns (url, revision), where both are strings""" | |
assert not location.rstrip('/').endswith(self.dirname), \ | |
'Bad directory: %s' % location | |
output = self.run_command( | |
['info', location], | |
show_stdout=False, | |
extra_environ={'LANG': 'C'}, | |
) | |
match = _svn_url_re.search(output) | |
if not match: | |
logger.warning( | |
'Cannot determine URL of svn checkout %s', | |
display_path(location), | |
) | |
logger.debug('Output that cannot be parsed: \n%s', output) | |
return None, None | |
url = match.group(1).strip() | |
match = _svn_revision_re.search(output) | |
if not match: | |
logger.warning( | |
'Cannot determine revision of svn checkout %s', | |
display_path(location), | |
) | |
logger.debug('Output that cannot be parsed: \n%s', output) | |
return url, None | |
return url, match.group(1) | |
def export(self, location): | |
"""Export the svn repository at the url to the destination location""" | |
url, rev = self.get_url_rev() | |
rev_options = get_rev_options(self, url, rev) | |
url = self.remove_auth_from_url(url) | |
logger.info('Exporting svn repository %s to %s', url, location) | |
with indent_log(): | |
if os.path.exists(location): | |
# Subversion doesn't like to check out over an existing | |
# directory --force fixes this, but was only added in svn 1.5 | |
rmtree(location) | |
cmd_args = ['export'] + rev_options.to_args() + [url, location] | |
self.run_command(cmd_args, show_stdout=False) | |
def switch(self, dest, url, rev_options): | |
cmd_args = ['switch'] + rev_options.to_args() + [url, dest] | |
self.run_command(cmd_args) | |
def update(self, dest, rev_options): | |
cmd_args = ['update'] + rev_options.to_args() + [dest] | |
self.run_command(cmd_args) | |
def obtain(self, dest): | |
url, rev = self.get_url_rev() | |
rev_options = get_rev_options(self, url, rev) | |
url = self.remove_auth_from_url(url) | |
if self.check_destination(dest, url, rev_options): | |
rev_display = rev_options.to_display() | |
logger.info( | |
'Checking out %s%s to %s', | |
url, | |
rev_display, | |
display_path(dest), | |
) | |
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest] | |
self.run_command(cmd_args) | |
def get_location(self, dist, dependency_links): | |
for url in dependency_links: | |
egg_fragment = Link(url).egg_fragment | |
if not egg_fragment: | |
continue | |
if '-' in egg_fragment: | |
# FIXME: will this work when a package has - in the name? | |
key = '-'.join(egg_fragment.split('-')[:-1]).lower() | |
else: | |
key = egg_fragment | |
if key == dist.key: | |
return url.split('#', 1)[0] | |
return None | |
def get_revision(self, location): | |
""" | |
Return the maximum revision for all files under a given location | |
""" | |
# Note: taken from setuptools.command.egg_info | |
revision = 0 | |
for base, dirs, files in os.walk(location): | |
if self.dirname not in dirs: | |
dirs[:] = [] | |
continue # no sense walking uncontrolled subdirs | |
dirs.remove(self.dirname) | |
entries_fn = os.path.join(base, self.dirname, 'entries') | |
if not os.path.exists(entries_fn): | |
# FIXME: should we warn? | |
continue | |
dirurl, localrev = self._get_svn_url_rev(base) | |
if base == location: | |
base = dirurl + '/' # save the root url | |
elif not dirurl or not dirurl.startswith(base): | |
dirs[:] = [] | |
continue # not part of the same svn tree, skip it | |
revision = max(revision, localrev) | |
return revision | |
def get_url_rev(self): | |
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it | |
url, rev = super(Subversion, self).get_url_rev() | |
if url.startswith('ssh://'): | |
url = 'svn+' + url | |
return url, rev | |
def get_url(self, location): | |
# In cases where the source is in a subdirectory, not alongside | |
# setup.py we have to look up in the location until we find a real | |
# setup.py | |
orig_location = location | |
while not os.path.exists(os.path.join(location, 'setup.py')): | |
last_location = location | |
location = os.path.dirname(location) | |
if location == last_location: | |
# We've traversed up to the root of the filesystem without | |
# finding setup.py | |
logger.warning( | |
"Could not find setup.py for directory %s (tried all " | |
"parent directories)", | |
orig_location, | |
) | |
return None | |
return self._get_svn_url_rev(location)[0] | |
def _get_svn_url_rev(self, location): | |
from pip._internal.exceptions import InstallationError | |
entries_path = os.path.join(location, self.dirname, 'entries') | |
if os.path.exists(entries_path): | |
with open(entries_path) as f: | |
data = f.read() | |
else: # subversion >= 1.7 does not have the 'entries' file | |
data = '' | |
if (data.startswith('8') or | |
data.startswith('9') or | |
data.startswith('10')): | |
data = list(map(str.splitlines, data.split('\n\x0c\n'))) | |
del data[0][0] # get rid of the '8' | |
url = data[0][3] | |
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] | |
elif data.startswith('<?xml'): | |
match = _svn_xml_url_re.search(data) | |
if not match: | |
raise ValueError('Badly formatted data: %r' % data) | |
url = match.group(1) # get repository URL | |
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] | |
else: | |
try: | |
# subversion >= 1.7 | |
xml = self.run_command( | |
['info', '--xml', location], | |
show_stdout=False, | |
) | |
url = _svn_info_xml_url_re.search(xml).group(1) | |
revs = [ | |
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) | |
] | |
except InstallationError: | |
url, revs = None, [] | |
if revs: | |
rev = max(revs) | |
else: | |
rev = 0 | |
return url, rev | |
def get_src_requirement(self, dist, location): | |
repo = self.get_url(location) | |
if repo is None: | |
return None | |
# FIXME: why not project name? | |
egg_project_name = dist.egg_name().split('-', 1)[0] | |
rev = self.get_revision(location) | |
return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) | |
def is_commit_id_equal(self, dest, name): | |
"""Always assume the versions don't match""" | |
return False | |
@staticmethod | |
def remove_auth_from_url(url): | |
# Return a copy of url with 'username:password@' removed. | |
# username/pass params are passed to subversion through flags | |
# and are not recognized in the url. | |
# parsed url | |
purl = urllib_parse.urlsplit(url) | |
stripped_netloc = \ | |
purl.netloc.split('@')[-1] | |
# stripped url | |
url_pieces = ( | |
purl.scheme, stripped_netloc, purl.path, purl.query, purl.fragment | |
) | |
surl = urllib_parse.urlunsplit(url_pieces) | |
return surl | |
def get_rev_options(vcs, url, rev): | |
""" | |
Return a RevOptions object. | |
""" | |
r = urllib_parse.urlsplit(url) | |
if hasattr(r, 'username'): | |
# >= Python-2.5 | |
username, password = r.username, r.password | |
else: | |
netloc = r[1] | |
if '@' in netloc: | |
auth = netloc.split('@')[0] | |
if ':' in auth: | |
username, password = auth.split(':', 1) | |
else: | |
username, password = auth, None | |
else: | |
username, password = None, None | |
extra_args = [] | |
if username: | |
extra_args += ['--username', username] | |
if password: | |
extra_args += ['--password', password] | |
return vcs.make_rev_options(rev, extra_args=extra_args) | |
vcs.register(Subversion) |
""" | |
Support for installing and building the "wheel" binary package format. | |
""" | |
from __future__ import absolute_import | |
import collections | |
import compileall | |
import copy | |
import csv | |
import hashlib | |
import logging | |
import os.path | |
import re | |
import shutil | |
import stat | |
import sys | |
import warnings | |
from base64 import urlsafe_b64encode | |
from email.parser import Parser | |
from pip._vendor import pkg_resources | |
from pip._vendor.distlib.scripts import ScriptMaker | |
from pip._vendor.packaging.utils import canonicalize_name | |
from pip._vendor.six import StringIO | |
from pip._internal import pep425tags | |
from pip._internal.build_env import BuildEnvironment | |
from pip._internal.download import path_to_url, unpack_url | |
from pip._internal.exceptions import ( | |
InstallationError, InvalidWheelFilename, UnsupportedWheel, | |
) | |
from pip._internal.locations import ( | |
PIP_DELETE_MARKER_FILENAME, distutils_scheme, | |
) | |
from pip._internal.utils.logging import indent_log | |
from pip._internal.utils.misc import ( | |
call_subprocess, captured_stdout, ensure_dir, read_chunks, | |
) | |
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM | |
from pip._internal.utils.temp_dir import TempDirectory | |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING | |
from pip._internal.utils.ui import open_spinner | |
if MYPY_CHECK_RUNNING: | |
from typing import Dict, List, Optional | |
wheel_ext = '.whl' | |
VERSION_COMPATIBLE = (1, 0) | |
logger = logging.getLogger(__name__) | |
def rehash(path, algo='sha256', blocksize=1 << 20): | |
"""Return (hash, length) for path using hashlib.new(algo)""" | |
h = hashlib.new(algo) | |
length = 0 | |
with open(path, 'rb') as f: | |
for block in read_chunks(f, size=blocksize): | |
length += len(block) | |
h.update(block) | |
digest = 'sha256=' + urlsafe_b64encode( | |
h.digest() | |
).decode('latin1').rstrip('=') | |
return (digest, length) | |
def open_for_csv(name, mode): | |
if sys.version_info[0] < 3: | |
nl = {} | |
bin = 'b' | |
else: | |
nl = {'newline': ''} | |
bin = '' | |
return open(name, mode + bin, **nl) | |
def fix_script(path): | |
"""Replace #!python with #!/path/to/python | |
Return True if file was changed.""" | |
# XXX RECORD hashes will need to be updated | |
if os.path.isfile(path): | |
with open(path, 'rb') as script: | |
firstline = script.readline() | |
if not firstline.startswith(b'#!python'): | |
return False | |
exename = sys.executable.encode(sys.getfilesystemencoding()) | |
firstline = b'#!' + exename + os.linesep.encode("ascii") | |
rest = script.read() | |
with open(path, 'wb') as script: | |
script.write(firstline) | |
script.write(rest) | |
return True | |
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?) | |
\.dist-info$""", re.VERBOSE) | |
def root_is_purelib(name, wheeldir): | |
""" | |
Return True if the extracted wheel in wheeldir should go into purelib. | |
""" | |
name_folded = name.replace("-", "_") | |
for item in os.listdir(wheeldir): | |
match = dist_info_re.match(item) | |
if match and match.group('name') == name_folded: | |
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: | |
for line in wheel: | |
line = line.lower().rstrip() | |
if line == "root-is-purelib: true": | |
return True | |
return False | |
def get_entrypoints(filename): | |
if not os.path.exists(filename): | |
return {}, {} | |
# This is done because you can pass a string to entry_points wrappers which | |
# means that they may or may not be valid INI files. The attempt here is to | |
# strip leading and trailing whitespace in order to make them valid INI | |
# files. | |
with open(filename) as fp: | |
data = StringIO() | |
for line in fp: | |
data.write(line.strip()) | |
data.write("\n") | |
data.seek(0) | |
# get the entry points and then the script names | |
entry_points = pkg_resources.EntryPoint.parse_map(data) | |
console = entry_points.get('console_scripts', {}) | |
gui = entry_points.get('gui_scripts', {}) | |
def _split_ep(s): | |
"""get the string representation of EntryPoint, remove space and split | |
on '='""" | |
return str(s).replace(" ", "").split("=") | |
# convert the EntryPoint objects into strings with module:function | |
console = dict(_split_ep(v) for v in console.values()) | |
gui = dict(_split_ep(v) for v in gui.values()) | |
return console, gui | |
def message_about_scripts_not_on_PATH(scripts): | |
# type: (List[str]) -> Optional[str] | |
"""Determine if any scripts are not on PATH and format a warning. | |
Returns a warning message if one or more scripts are not on PATH, | |
otherwise None. | |
""" | |
if not scripts: | |
return None | |
# Group scripts by the path they were installed in | |
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, set] | |
for destfile in scripts: | |
parent_dir = os.path.dirname(destfile) | |
script_name = os.path.basename(destfile) | |
grouped_by_dir[parent_dir].add(script_name) | |
# We don't want to warn for directories that are on PATH. | |
not_warn_dirs = [ | |
os.path.normcase(i) for i in os.environ["PATH"].split(os.pathsep) | |
] | |
# If an executable sits with sys.executable, we don't warn for it. | |
# This covers the case of venv invocations without activating the venv. | |
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) | |
warn_for = { | |
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() | |
if os.path.normcase(parent_dir) not in not_warn_dirs | |
} | |
if not warn_for: | |
return None | |
# Format a message | |
msg_lines = [] | |
for parent_dir, scripts in warn_for.items(): | |
scripts = sorted(scripts) | |
if len(scripts) == 1: | |
start_text = "script {} is".format(scripts[0]) | |
else: | |
start_text = "scripts {} are".format( | |
", ".join(scripts[:-1]) + " and " + scripts[-1] | |
) | |
msg_lines.append( | |
"The {} installed in '{}' which is not on PATH." | |
.format(start_text, parent_dir) | |
) | |
last_line_fmt = ( | |
"Consider adding {} to PATH or, if you prefer " | |
"to suppress this warning, use --no-warn-script-location." | |
) | |
if len(msg_lines) == 1: | |
msg_lines.append(last_line_fmt.format("this directory")) | |
else: | |
msg_lines.append(last_line_fmt.format("these directories")) | |
# Returns the formatted multiline message | |
return "\n".join(msg_lines) | |
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, | |
pycompile=True, scheme=None, isolated=False, prefix=None, | |
warn_script_location=True): | |
"""Install a wheel""" | |
if not scheme: | |
scheme = distutils_scheme( | |
name, user=user, home=home, root=root, isolated=isolated, | |
prefix=prefix, | |
) | |
if root_is_purelib(name, wheeldir): | |
lib_dir = scheme['purelib'] | |
else: | |
lib_dir = scheme['platlib'] | |
info_dir = [] | |
data_dirs = [] | |
source = wheeldir.rstrip(os.path.sep) + os.path.sep | |
# Record details of the files moved | |
# installed = files copied from the wheel to the destination | |
# changed = files changed while installing (scripts #! line typically) | |
# generated = files newly generated during the install (script wrappers) | |
installed = {} | |
changed = set() | |
generated = [] | |
# Compile all of the pyc files that we're going to be installing | |
if pycompile: | |
with captured_stdout() as stdout: | |
with warnings.catch_warnings(): | |
warnings.filterwarnings('ignore') | |
compileall.compile_dir(source, force=True, quiet=True) | |
logger.debug(stdout.getvalue()) | |
def normpath(src, p): | |
return os.path.relpath(src, p).replace(os.path.sep, '/') | |
def record_installed(srcfile, destfile, modified=False): | |
"""Map archive RECORD paths to installation RECORD paths.""" | |
oldpath = normpath(srcfile, wheeldir) | |
newpath = normpath(destfile, lib_dir) | |
installed[oldpath] = newpath | |
if modified: | |
changed.add(destfile) | |
def clobber(source, dest, is_base, fixer=None, filter=None): | |
ensure_dir(dest) # common for the 'include' path | |
for dir, subdirs, files in os.walk(source): | |
basedir = dir[len(source):].lstrip(os.path.sep) | |
destdir = os.path.join(dest, basedir) | |
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): | |
continue | |
for s in subdirs: | |
destsubdir = os.path.join(dest, basedir, s) | |
if is_base and basedir == '' and destsubdir.endswith('.data'): | |
data_dirs.append(s) | |
continue | |
elif (is_base and | |
s.endswith('.dist-info') and | |
canonicalize_name(s).startswith( | |
canonicalize_name(req.name))): | |
assert not info_dir, ('Multiple .dist-info directories: ' + | |
destsubdir + ', ' + | |
', '.join(info_dir)) | |
info_dir.append(destsubdir) | |
for f in files: | |
# Skip unwanted files | |
if filter and filter(f): | |
continue | |
srcfile = os.path.join(dir, f) | |
destfile = os.path.join(dest, basedir, f) | |
# directory creation is lazy and after the file filtering above | |
# to ensure we don't install empty dirs; empty dirs can't be | |
# uninstalled. | |
ensure_dir(destdir) | |
# We use copyfile (not move, copy, or copy2) to be extra sure | |
# that we are not moving directories over (copyfile fails for | |
# directories) as well as to ensure that we are not copying | |
# over any metadata because we want more control over what | |
# metadata we actually copy over. | |
shutil.copyfile(srcfile, destfile) | |
# Copy over the metadata for the file, currently this only | |
# includes the atime and mtime. | |
st = os.stat(srcfile) | |
if hasattr(os, "utime"): | |
os.utime(destfile, (st.st_atime, st.st_mtime)) | |
# If our file is executable, then make our destination file | |
# executable. | |
if os.access(srcfile, os.X_OK): | |
st = os.stat(srcfile) | |
permissions = ( | |
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH | |
) | |
os.chmod(destfile, permissions) | |
changed = False | |
if fixer: | |
changed = fixer(destfile) | |
record_installed(srcfile, destfile, changed) | |
clobber(source, lib_dir, True) | |
assert info_dir, "%s .dist-info directory not found" % req | |
# Get the defined entry points | |
ep_file = os.path.join(info_dir[0], 'entry_points.txt') | |
console, gui = get_entrypoints(ep_file) | |
def is_entrypoint_wrapper(name): | |
# EP, EP.exe and EP-script.py are scripts generated for | |
# entry point EP by setuptools | |
if name.lower().endswith('.exe'): | |
matchname = name[:-4] | |
elif name.lower().endswith('-script.py'): | |
matchname = name[:-10] | |
elif name.lower().endswith(".pya"): | |
matchname = name[:-4] | |
else: | |
matchname = name | |
# Ignore setuptools-generated scripts | |
return (matchname in console or matchname in gui) | |
for datadir in data_dirs: | |
fixer = None | |
filter = None | |
for subdir in os.listdir(os.path.join(wheeldir, datadir)): | |
fixer = None | |
if subdir == 'scripts': | |
fixer = fix_script | |
filter = is_entrypoint_wrapper | |
source = os.path.join(wheeldir, datadir, subdir) | |
dest = scheme[subdir] | |
clobber(source, dest, False, fixer=fixer, filter=filter) | |
maker = ScriptMaker(None, scheme['scripts']) | |
# Ensure old scripts are overwritten. | |
# See https://github.com/pypa/pip/issues/1800 | |
maker.clobber = True | |
# Ensure we don't generate any variants for scripts because this is almost | |
# never what somebody wants. | |
# See https://bitbucket.org/pypa/distlib/issue/35/ | |
maker.variants = {''} | |
# This is required because otherwise distlib creates scripts that are not | |
# executable. | |
# See https://bitbucket.org/pypa/distlib/issue/32/ | |
maker.set_mode = True | |
# Simplify the script and fix the fact that the default script swallows | |
# every single stack trace. | |
# See https://bitbucket.org/pypa/distlib/issue/34/ | |
# See https://bitbucket.org/pypa/distlib/issue/33/ | |
def _get_script_text(entry): | |
if entry.suffix is None: | |
raise InstallationError( | |
"Invalid script entry point: %s for req: %s - A callable " | |
"suffix is required. Cf https://packaging.python.org/en/" | |
"latest/distributing.html#console-scripts for more " | |
"information." % (entry, req) | |
) | |
return maker.script_template % { | |
"module": entry.prefix, | |
"import_name": entry.suffix.split(".")[0], | |
"func": entry.suffix, | |
} | |
maker._get_script_text = _get_script_text | |
maker.script_template = r"""# -*- coding: utf-8 -*- | |
import re | |
import sys | |
from %(module)s import %(import_name)s | |
if __name__ == '__main__': | |
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |
sys.exit(%(func)s()) | |
""" | |
# Special case pip and setuptools to generate versioned wrappers | |
# | |
# The issue is that some projects (specifically, pip and setuptools) use | |
# code in setup.py to create "versioned" entry points - pip2.7 on Python | |
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into | |
# the wheel metadata at build time, and so if the wheel is installed with | |
# a *different* version of Python the entry points will be wrong. The | |
# correct fix for this is to enhance the metadata to be able to describe | |
# such versioned entry points, but that won't happen till Metadata 2.0 is | |
# available. | |
# In the meantime, projects using versioned entry points will either have | |
# incorrect versioned entry points, or they will not be able to distribute | |
# "universal" wheels (i.e., they will need a wheel per Python version). | |
# | |
# Because setuptools and pip are bundled with _ensurepip and virtualenv, | |
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we | |
# override the versioned entry points in the wheel and generate the | |
# correct ones. This code is purely a short-term measure until Metadata 2.0 | |
# is available. | |
# | |
# To add the level of hack in this section of code, in order to support | |
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment | |
# variable which will control which version scripts get installed. | |
# | |
# ENSUREPIP_OPTIONS=altinstall | |
# - Only pipX.Y and easy_install-X.Y will be generated and installed | |
# ENSUREPIP_OPTIONS=install | |
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note | |
# that this option is technically if ENSUREPIP_OPTIONS is set and is | |
# not altinstall | |
# DEFAULT | |
# - The default behavior is to install pip, pipX, pipX.Y, easy_install | |
# and easy_install-X.Y. | |
pip_script = console.pop('pip', None) | |
if pip_script: | |
if "ENSUREPIP_OPTIONS" not in os.environ: | |
spec = 'pip = ' + pip_script | |
generated.extend(maker.make(spec)) | |
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": | |
spec = 'pip%s = %s' % (sys.version[:1], pip_script) | |
generated.extend(maker.make(spec)) | |
spec = 'pip%s = %s' % (sys.version[:3], pip_script) | |
generated.extend(maker.make(spec)) | |
# Delete any other versioned pip entry points | |
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] | |
for k in pip_ep: | |
del console[k] | |
easy_install_script = console.pop('easy_install', None) | |
if easy_install_script: | |
if "ENSUREPIP_OPTIONS" not in os.environ: | |
spec = 'easy_install = ' + easy_install_script | |
generated.extend(maker.make(spec)) | |
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) | |
generated.extend(maker.make(spec)) | |
# Delete any other versioned easy_install entry points | |
easy_install_ep = [ | |
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) | |
] | |
for k in easy_install_ep: | |
del console[k] | |
# Generate the console and GUI entry points specified in the wheel | |
if len(console) > 0: | |
generated_console_scripts = maker.make_multiple( | |
['%s = %s' % kv for kv in console.items()] | |
) | |
generated.extend(generated_console_scripts) | |
if warn_script_location: | |
msg = message_about_scripts_not_on_PATH(generated_console_scripts) | |
if msg is not None: | |
logger.warn(msg) | |
if len(gui) > 0: | |
generated.extend( | |
maker.make_multiple( | |
['%s = %s' % kv for kv in gui.items()], | |
{'gui': True} | |
) | |
) | |
# Record pip as the installer | |
installer = os.path.join(info_dir[0], 'INSTALLER') | |
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') | |
with open(temp_installer, 'wb') as installer_file: | |
installer_file.write(b'pip\n') | |
shutil.move(temp_installer, installer) | |
generated.append(installer) | |
# Record details of all files installed | |
record = os.path.join(info_dir[0], 'RECORD') | |
temp_record = os.path.join(info_dir[0], 'RECORD.pip') | |
with open_for_csv(record, 'r') as record_in: | |
with open_for_csv(temp_record, 'w+') as record_out: | |
reader = csv.reader(record_in) | |
writer = csv.writer(record_out) | |
for row in reader: | |
row[0] = installed.pop(row[0], row[0]) | |
if row[0] in changed: | |
row[1], row[2] = rehash(row[0]) | |
writer.writerow(row) | |
for f in generated: | |
h, l = rehash(f) | |
writer.writerow((normpath(f, lib_dir), h, l)) | |
for f in installed: | |
writer.writerow((installed[f], '', '')) | |
shutil.move(temp_record, record) | |
def wheel_version(source_dir): | |
""" | |
Return the Wheel-Version of an extracted wheel, if possible. | |
Otherwise, return False if we couldn't parse / extract it. | |
""" | |
try: | |
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] | |
wheel_data = dist.get_metadata('WHEEL') | |
wheel_data = Parser().parsestr(wheel_data) | |
version = wheel_data['Wheel-Version'].strip() | |
version = tuple(map(int, version.split('.'))) | |
return version | |
except: | |
return False | |
def check_compatibility(version, name): | |
""" | |
Raises errors or warns if called with an incompatible Wheel-Version. | |
Pip should refuse to install a Wheel-Version that's a major series | |
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when | |
installing a version only minor version ahead (e.g 1.2 > 1.1). | |
version: a 2-tuple representing a Wheel-Version (Major, Minor) | |
name: name of wheel or package to raise exception about | |
:raises UnsupportedWheel: when an incompatible Wheel-Version is given | |
""" | |
if not version: | |
raise UnsupportedWheel( | |
"%s is in an unsupported or invalid wheel" % name | |
) | |
if version[0] > VERSION_COMPATIBLE[0]: | |
raise UnsupportedWheel( | |
"%s's Wheel-Version (%s) is not compatible with this version " | |
"of pip" % (name, '.'.join(map(str, version))) | |
) | |
elif version > VERSION_COMPATIBLE: | |
logger.warning( | |
'Installing from a newer Wheel-Version (%s)', | |
'.'.join(map(str, version)), | |
) | |
class Wheel(object): | |
"""A wheel file""" | |
# TODO: maybe move the install code into this class | |
wheel_file_re = re.compile( | |
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?)) | |
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) | |
\.whl|\.dist-info)$""", | |
re.VERBOSE | |
) | |
def __init__(self, filename): | |
""" | |
:raises InvalidWheelFilename: when the filename is invalid for a wheel | |
""" | |
wheel_info = self.wheel_file_re.match(filename) | |
if not wheel_info: | |
raise InvalidWheelFilename( | |
"%s is not a valid wheel filename." % filename | |
) | |
self.filename = filename | |
self.name = wheel_info.group('name').replace('_', '-') | |
# we'll assume "_" means "-" due to wheel naming scheme | |
# (https://github.com/pypa/pip/issues/1150) | |
self.version = wheel_info.group('ver').replace('_', '-') | |
self.build_tag = wheel_info.group('build') | |
self.pyversions = wheel_info.group('pyver').split('.') | |
self.abis = wheel_info.group('abi').split('.') | |
self.plats = wheel_info.group('plat').split('.') | |
# All the tag combinations from this file | |
self.file_tags = { | |
(x, y, z) for x in self.pyversions | |
for y in self.abis for z in self.plats | |
} | |
def support_index_min(self, tags=None): | |
""" | |
Return the lowest index that one of the wheel's file_tag combinations | |
achieves in the supported_tags list e.g. if there are 8 supported tags, | |
and one of the file tags is first in the list, then return 0. Returns | |
None is the wheel is not supported. | |
""" | |
if tags is None: # for mock | |
tags = pep425tags.get_supported() | |
indexes = [tags.index(c) for c in self.file_tags if c in tags] | |
return min(indexes) if indexes else None | |
def supported(self, tags=None): | |
"""Is this wheel supported on this system?""" | |
if tags is None: # for mock | |
tags = pep425tags.get_supported() | |
return bool(set(tags).intersection(self.file_tags)) | |
class WheelBuilder(object): | |
"""Build wheels from a RequirementSet.""" | |
def __init__(self, finder, preparer, wheel_cache, | |
build_options=None, global_options=None, no_clean=False): | |
self.finder = finder | |
self.preparer = preparer | |
self.wheel_cache = wheel_cache | |
self._wheel_dir = preparer.wheel_download_dir | |
self.build_options = build_options or [] | |
self.global_options = global_options or [] | |
self.no_clean = no_clean | |
def _build_one(self, req, output_dir, python_tag=None): | |
"""Build one wheel. | |
:return: The filename of the built wheel, or None if the build failed. | |
""" | |
# Install build deps into temporary directory (PEP 518) | |
with req.build_env: | |
return self._build_one_inside_env(req, output_dir, | |
python_tag=python_tag) | |
def _build_one_inside_env(self, req, output_dir, python_tag=None): | |
with TempDirectory(kind="wheel") as temp_dir: | |
if self.__build_one(req, temp_dir.path, python_tag=python_tag): | |
try: | |
wheel_name = os.listdir(temp_dir.path)[0] | |
wheel_path = os.path.join(output_dir, wheel_name) | |
shutil.move( | |
os.path.join(temp_dir.path, wheel_name), wheel_path | |
) | |
logger.info('Stored in directory: %s', output_dir) | |
return wheel_path | |
except: | |
pass | |
# Ignore return, we can't do anything else useful. | |
self._clean_one(req) | |
return None | |
def _base_setup_args(self, req): | |
# NOTE: Eventually, we'd want to also -S to the flags here, when we're | |
# isolating. Currently, it breaks Python in virtualenvs, because it | |
# relies on site.py to find parts of the standard library outside the | |
# virtualenv. | |
return [ | |
sys.executable, '-u', '-c', | |
SETUPTOOLS_SHIM % req.setup_py | |
] + list(self.global_options) | |
def __build_one(self, req, tempd, python_tag=None): | |
base_args = self._base_setup_args(req) | |
spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) | |
with open_spinner(spin_message) as spinner: | |
logger.debug('Destination directory: %s', tempd) | |
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ | |
+ self.build_options | |
if python_tag is not None: | |
wheel_args += ["--python-tag", python_tag] | |
try: | |
call_subprocess(wheel_args, cwd=req.setup_py_dir, | |
show_stdout=False, spinner=spinner) | |
return True | |
except: | |
spinner.finish("error") | |
logger.error('Failed building wheel for %s', req.name) | |
return False | |
def _clean_one(self, req): | |
base_args = self._base_setup_args(req) | |
logger.info('Running setup.py clean for %s', req.name) | |
clean_args = base_args + ['clean', '--all'] | |
try: | |
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) | |
return True | |
except: | |
logger.error('Failed cleaning build dir for %s', req.name) | |
return False | |
def build(self, requirements, session, autobuilding=False): | |
"""Build wheels. | |
:param unpack: If True, replace the sdist we built from with the | |
newly built wheel, in preparation for installation. | |
:return: True if all the wheels built correctly. | |
""" | |
from pip._internal import index | |
building_is_possible = self._wheel_dir or ( | |
autobuilding and self.wheel_cache.cache_dir | |
) | |
assert building_is_possible | |
buildset = [] | |
for req in requirements: | |
if req.constraint: | |
continue | |
if req.is_wheel: | |
if not autobuilding: | |
logger.info( | |
'Skipping %s, due to already being wheel.', req.name, | |
) | |
elif autobuilding and req.editable: | |
pass | |
elif autobuilding and not req.source_dir: | |
pass | |
elif autobuilding and req.link and not req.link.is_artifact: | |
# VCS checkout. Build wheel just for this run. | |
buildset.append((req, True)) | |
else: | |
ephem_cache = False | |
if autobuilding: | |
link = req.link | |
base, ext = link.splitext() | |
if index.egg_info_matches(base, None, link) is None: | |
# E.g. local directory. Build wheel just for this run. | |
ephem_cache = True | |
if "binary" not in index.fmt_ctl_formats( | |
self.finder.format_control, | |
canonicalize_name(req.name)): | |
logger.info( | |
"Skipping bdist_wheel for %s, due to binaries " | |
"being disabled for it.", req.name, | |
) | |
continue | |
buildset.append((req, ephem_cache)) | |
if not buildset: | |
return True | |
# Build the wheels. | |
logger.info( | |
'Building wheels for collected packages: %s', | |
', '.join([req.name for (req, _) in buildset]), | |
) | |
_cache = self.wheel_cache # shorter name | |
with indent_log(): | |
build_success, build_failure = [], [] | |
for req, ephem in buildset: | |
python_tag = None | |
if autobuilding: | |
python_tag = pep425tags.implementation_tag | |
if ephem: | |
output_dir = _cache.get_ephem_path_for_link(req.link) | |
else: | |
output_dir = _cache.get_path_for_link(req.link) | |
try: | |
ensure_dir(output_dir) | |
except OSError as e: | |
logger.warning("Building wheel for %s failed: %s", | |
req.name, e) | |
build_failure.append(req) | |
continue | |
else: | |
output_dir = self._wheel_dir | |
wheel_file = self._build_one( | |
req, output_dir, | |
python_tag=python_tag, | |
) | |
if wheel_file: | |
build_success.append(req) | |
if autobuilding: | |
# XXX: This is mildly duplicative with prepare_files, | |
# but not close enough to pull out to a single common | |
# method. | |
# The code below assumes temporary source dirs - | |
# prevent it doing bad things. | |
if req.source_dir and not os.path.exists(os.path.join( | |
req.source_dir, PIP_DELETE_MARKER_FILENAME)): | |
raise AssertionError( | |
"bad source dir - missing marker") | |
# Delete the source we built the wheel from | |
req.remove_temporary_source() | |
# set the build directory again - name is known from | |
# the work prepare_files did. | |
req.source_dir = req.build_location( | |
self.preparer.build_dir | |
) | |
# Update the link for this. | |
req.link = index.Link(path_to_url(wheel_file)) | |
assert req.link.is_wheel | |
# extract the wheel into the dir | |
unpack_url( | |
req.link, req.source_dir, None, False, | |
session=session, | |
) | |
else: | |
build_failure.append(req) | |
# notify success/failure | |
if build_success: | |
logger.info( | |
'Successfully built %s', | |
' '.join([req.name for req in build_success]), | |
) | |
if build_failure: | |
logger.info( | |
'Failed to build %s', | |
' '.join([req.name for req in build_failure]), | |
) | |
# Return True if all builds were successful | |
return len(build_failure) == 0 |
""" | |
pip._vendor is for vendoring dependencies of pip to prevent needing pip to | |
depend on something external. | |
Files inside of pip._vendor should be considered immutable and should only be | |
updated to versions from upstream. | |
""" | |
from __future__ import absolute_import | |
import glob | |
import os.path | |
import sys | |
# Downstream redistributors which have debundled our dependencies should also | |
# patch this value to be true. This will trigger the additional patching | |
# to cause things like "six" to be available as pip. | |
DEBUNDLED = False | |
# By default, look in this directory for a bunch of .whl files which we will | |
# add to the beginning of sys.path before attempting to import anything. This | |
# is done to support downstream re-distributors like Debian and Fedora who | |
# wish to create their own Wheels for our dependencies to aid in debundling. | |
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) | |
# Define a small helper function to alias our vendored modules to the real ones | |
# if the vendored ones do not exist. This idea of this was taken from | |
# https://github.com/kennethreitz/requests/pull/2567. | |
def vendored(modulename): | |
vendored_name = "{0}.{1}".format(__name__, modulename) | |
try: | |
__import__(vendored_name, globals(), locals(), level=0) | |
except ImportError: | |
try: | |
__import__(modulename, globals(), locals(), level=0) | |
except ImportError: | |
# We can just silently allow import failures to pass here. If we | |
# got to this point it means that ``import pip._vendor.whatever`` | |
# failed and so did ``import whatever``. Since we're importing this | |
# upfront in an attempt to alias imports, not erroring here will | |
# just mean we get a regular import error whenever pip *actually* | |
# tries to import one of these modules to use it, which actually | |
# gives us a better error message than we would have otherwise | |
# gotten. | |
pass | |
else: | |
sys.modules[vendored_name] = sys.modules[modulename] | |
base, head = vendored_name.rsplit(".", 1) | |
setattr(sys.modules[base], head, sys.modules[modulename]) | |
# If we're operating in a debundled setup, then we want to go ahead and trigger | |
# the aliasing of our vendored libraries as well as looking for wheels to add | |
# to our sys.path. This will cause all of this code to be a no-op typically | |
# however downstream redistributors can enable it in a consistent way across | |
# all platforms. | |
if DEBUNDLED: | |
# Actually look inside of WHEEL_DIR to find .whl files and add them to the | |
# front of our sys.path. | |
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path | |
# Actually alias all of our vendored dependencies. | |
vendored("cachecontrol") | |
vendored("colorama") | |
vendored("distlib") | |
vendored("distro") | |
vendored("html5lib") | |
vendored("lockfile") | |
vendored("six") | |
vendored("six.moves") | |
vendored("six.moves.urllib") | |
vendored("six.moves.urllib.parse") | |
vendored("packaging") | |
vendored("packaging.version") | |
vendored("packaging.specifiers") | |
vendored("pkg_resources") | |
vendored("progress") | |
vendored("pytoml") | |
vendored("retrying") | |
vendored("requests") | |
vendored("requests.packages") | |
vendored("requests.packages.urllib3") | |
vendored("requests.packages.urllib3._collections") | |
vendored("requests.packages.urllib3.connection") | |
vendored("requests.packages.urllib3.connectionpool") | |
vendored("requests.packages.urllib3.contrib") | |
vendored("requests.packages.urllib3.contrib.ntlmpool") | |
vendored("requests.packages.urllib3.contrib.pyopenssl") | |
vendored("requests.packages.urllib3.exceptions") | |
vendored("requests.packages.urllib3.fields") | |
vendored("requests.packages.urllib3.filepost") | |
vendored("requests.packages.urllib3.packages") | |
vendored("requests.packages.urllib3.packages.ordered_dict") | |
vendored("requests.packages.urllib3.packages.six") | |
vendored("requests.packages.urllib3.packages.ssl_match_hostname") | |
vendored("requests.packages.urllib3.packages.ssl_match_hostname." | |
"_implementation") | |
vendored("requests.packages.urllib3.poolmanager") | |
vendored("requests.packages.urllib3.request") | |
vendored("requests.packages.urllib3.response") | |
vendored("requests.packages.urllib3.util") | |
vendored("requests.packages.urllib3.util.connection") | |
vendored("requests.packages.urllib3.util.request") | |
vendored("requests.packages.urllib3.util.response") | |
vendored("requests.packages.urllib3.util.retry") | |
vendored("requests.packages.urllib3.util.ssl_") | |
vendored("requests.packages.urllib3.util.timeout") | |
vendored("requests.packages.urllib3.util.url") |
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
# Copyright (c) 2005-2010 ActiveState Software Inc. | |
# Copyright (c) 2013 Eddy Petrișor | |
"""Utilities for determining application-specific dirs. | |
See <http://github.com/ActiveState/appdirs> for details and usage. | |
""" | |
# Dev Notes: | |
# - MSDN on where to store app data files: | |
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 | |
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html | |
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html | |
__version_info__ = (1, 4, 3) | |
__version__ = '.'.join(map(str, __version_info__)) | |
import sys | |
import os | |
PY3 = sys.version_info[0] == 3 | |
if PY3: | |
unicode = str | |
if sys.platform.startswith('java'): | |
import platform | |
os_name = platform.java_ver()[3][0] | |
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. | |
system = 'win32' | |
elif os_name.startswith('Mac'): # "Mac OS X", etc. | |
system = 'darwin' | |
else: # "Linux", "SunOS", "FreeBSD", etc. | |
# Setting this to "linux2" is not ideal, but only Windows or Mac | |
# are actually checked for and the rest of the module expects | |
# *sys.platform* style strings. | |
system = 'linux2' | |
else: | |
system = sys.platform | |
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): | |
r"""Return full path to the user-specific data dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"roaming" (boolean, default False) can be set True to use the Windows | |
roaming appdata directory. That means that for users on a Windows | |
network setup for roaming profiles, this user data will be | |
sync'd on login. See | |
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | |
for a discussion of issues. | |
Typical user data directories are: | |
Mac OS X: ~/Library/Application Support/<AppName> | |
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined | |
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> | |
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> | |
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> | |
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> | |
For Unix, we follow the XDG spec and support $XDG_DATA_HOME. | |
That means, by default "~/.local/share/<AppName>". | |
""" | |
if system == "win32": | |
if appauthor is None: | |
appauthor = appname | |
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" | |
path = os.path.normpath(_get_win_folder(const)) | |
if appname: | |
if appauthor is not False: | |
path = os.path.join(path, appauthor, appname) | |
else: | |
path = os.path.join(path, appname) | |
elif system == 'darwin': | |
path = os.path.expanduser('~/Library/Application Support/') | |
if appname: | |
path = os.path.join(path, appname) | |
else: | |
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) | |
if appname: | |
path = os.path.join(path, appname) | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): | |
r"""Return full path to the user-shared data dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"multipath" is an optional parameter only applicable to *nix | |
which indicates that the entire list of data dirs should be | |
returned. By default, the first item from XDG_DATA_DIRS is | |
returned, or '/usr/local/share/<AppName>', | |
if XDG_DATA_DIRS is not set | |
Typical site data directories are: | |
Mac OS X: /Library/Application Support/<AppName> | |
Unix: /usr/local/share/<AppName> or /usr/share/<AppName> | |
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> | |
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) | |
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. | |
For Unix, this is using the $XDG_DATA_DIRS[0] default. | |
WARNING: Do not use this on Windows. See the Vista-Fail note above for why. | |
""" | |
if system == "win32": | |
if appauthor is None: | |
appauthor = appname | |
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) | |
if appname: | |
if appauthor is not False: | |
path = os.path.join(path, appauthor, appname) | |
else: | |
path = os.path.join(path, appname) | |
elif system == 'darwin': | |
path = os.path.expanduser('/Library/Application Support') | |
if appname: | |
path = os.path.join(path, appname) | |
else: | |
# XDG default for $XDG_DATA_DIRS | |
# only first, if multipath is False | |
path = os.getenv('XDG_DATA_DIRS', | |
os.pathsep.join(['/usr/local/share', '/usr/share'])) | |
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] | |
if appname: | |
if version: | |
appname = os.path.join(appname, version) | |
pathlist = [os.sep.join([x, appname]) for x in pathlist] | |
if multipath: | |
path = os.pathsep.join(pathlist) | |
else: | |
path = pathlist[0] | |
return path | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): | |
r"""Return full path to the user-specific config dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"roaming" (boolean, default False) can be set True to use the Windows | |
roaming appdata directory. That means that for users on a Windows | |
network setup for roaming profiles, this user data will be | |
sync'd on login. See | |
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | |
for a discussion of issues. | |
Typical user config directories are: | |
Mac OS X: same as user_data_dir | |
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined | |
Win *: same as user_data_dir | |
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. | |
That means, by default "~/.config/<AppName>". | |
""" | |
if system in ["win32", "darwin"]: | |
path = user_data_dir(appname, appauthor, None, roaming) | |
else: | |
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) | |
if appname: | |
path = os.path.join(path, appname) | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): | |
r"""Return full path to the user-shared data dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"multipath" is an optional parameter only applicable to *nix | |
which indicates that the entire list of config dirs should be | |
returned. By default, the first item from XDG_CONFIG_DIRS is | |
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set | |
Typical site config directories are: | |
Mac OS X: same as site_data_dir | |
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in | |
$XDG_CONFIG_DIRS | |
Win *: same as site_data_dir | |
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) | |
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False | |
WARNING: Do not use this on Windows. See the Vista-Fail note above for why. | |
""" | |
if system in ["win32", "darwin"]: | |
path = site_data_dir(appname, appauthor) | |
if appname and version: | |
path = os.path.join(path, version) | |
else: | |
# XDG default for $XDG_CONFIG_DIRS | |
# only first, if multipath is False | |
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') | |
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] | |
if appname: | |
if version: | |
appname = os.path.join(appname, version) | |
pathlist = [os.sep.join([x, appname]) for x in pathlist] | |
if multipath: | |
path = os.pathsep.join(pathlist) | |
else: | |
path = pathlist[0] | |
return path | |
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): | |
r"""Return full path to the user-specific cache dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"opinion" (boolean) can be False to disable the appending of | |
"Cache" to the base app data dir for Windows. See | |
discussion below. | |
Typical user cache directories are: | |
Mac OS X: ~/Library/Caches/<AppName> | |
Unix: ~/.cache/<AppName> (XDG default) | |
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache | |
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache | |
On Windows the only suggestion in the MSDN docs is that local settings go in | |
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming | |
app data dir (the default returned by `user_data_dir` above). Apps typically | |
put cache data somewhere *under* the given dir here. Some examples: | |
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache | |
...\Acme\SuperApp\Cache\1.0 | |
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. | |
This can be disabled with the `opinion=False` option. | |
""" | |
if system == "win32": | |
if appauthor is None: | |
appauthor = appname | |
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) | |
if appname: | |
if appauthor is not False: | |
path = os.path.join(path, appauthor, appname) | |
else: | |
path = os.path.join(path, appname) | |
if opinion: | |
path = os.path.join(path, "Cache") | |
elif system == 'darwin': | |
path = os.path.expanduser('~/Library/Caches') | |
if appname: | |
path = os.path.join(path, appname) | |
else: | |
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) | |
if appname: | |
path = os.path.join(path, appname) | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): | |
r"""Return full path to the user-specific state dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"roaming" (boolean, default False) can be set True to use the Windows | |
roaming appdata directory. That means that for users on a Windows | |
network setup for roaming profiles, this user data will be | |
sync'd on login. See | |
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> | |
for a discussion of issues. | |
Typical user state directories are: | |
Mac OS X: same as user_data_dir | |
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined | |
Win *: same as user_data_dir | |
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> | |
to extend the XDG spec and support $XDG_STATE_HOME. | |
That means, by default "~/.local/state/<AppName>". | |
""" | |
if system in ["win32", "darwin"]: | |
path = user_data_dir(appname, appauthor, None, roaming) | |
else: | |
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) | |
if appname: | |
path = os.path.join(path, appname) | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): | |
r"""Return full path to the user-specific log dir for this application. | |
"appname" is the name of application. | |
If None, just the system directory is returned. | |
"appauthor" (only used on Windows) is the name of the | |
appauthor or distributing body for this application. Typically | |
it is the owning company name. This falls back to appname. You may | |
pass False to disable it. | |
"version" is an optional version path element to append to the | |
path. You might want to use this if you want multiple versions | |
of your app to be able to run independently. If used, this | |
would typically be "<major>.<minor>". | |
Only applied when appname is present. | |
"opinion" (boolean) can be False to disable the appending of | |
"Logs" to the base app data dir for Windows, and "log" to the | |
base cache dir for Unix. See discussion below. | |
Typical user log directories are: | |
Mac OS X: ~/Library/Logs/<AppName> | |
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined | |
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs | |
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs | |
On Windows the only suggestion in the MSDN docs is that local settings | |
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in | |
examples of what some windows apps use for a logs dir.) | |
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` | |
value for Windows and appends "log" to the user cache dir for Unix. | |
This can be disabled with the `opinion=False` option. | |
""" | |
if system == "darwin": | |
path = os.path.join( | |
os.path.expanduser('~/Library/Logs'), | |
appname) | |
elif system == "win32": | |
path = user_data_dir(appname, appauthor, version) | |
version = False | |
if opinion: | |
path = os.path.join(path, "Logs") | |
else: | |
path = user_cache_dir(appname, appauthor, version) | |
version = False | |
if opinion: | |
path = os.path.join(path, "log") | |
if appname and version: | |
path = os.path.join(path, version) | |
return path | |
class AppDirs(object): | |
"""Convenience wrapper for getting application dirs.""" | |
def __init__(self, appname=None, appauthor=None, version=None, | |
roaming=False, multipath=False): | |
self.appname = appname | |
self.appauthor = appauthor | |
self.version = version | |
self.roaming = roaming | |
self.multipath = multipath | |
@property | |
def user_data_dir(self): | |
return user_data_dir(self.appname, self.appauthor, | |
version=self.version, roaming=self.roaming) | |
@property | |
def site_data_dir(self): | |
return site_data_dir(self.appname, self.appauthor, | |
version=self.version, multipath=self.multipath) | |
@property | |
def user_config_dir(self): | |
return user_config_dir(self.appname, self.appauthor, | |
version=self.version, roaming=self.roaming) | |
@property | |
def site_config_dir(self): | |
return site_config_dir(self.appname, self.appauthor, | |
version=self.version, multipath=self.multipath) | |
@property | |
def user_cache_dir(self): | |
return user_cache_dir(self.appname, self.appauthor, | |
version=self.version) | |
@property | |
def user_state_dir(self): | |
return user_state_dir(self.appname, self.appauthor, | |
version=self.version) | |
@property | |
def user_log_dir(self): | |
return user_log_dir(self.appname, self.appauthor, | |
version=self.version) | |
#---- internal support stuff | |
def _get_win_folder_from_registry(csidl_name): | |
"""This is a fallback technique at best. I'm not sure if using the | |
registry for this guarantees us the correct answer for all CSIDL_* | |
names. | |
""" | |
if PY3: | |
import winreg as _winreg | |
else: | |
import _winreg | |
shell_folder_name = { | |
"CSIDL_APPDATA": "AppData", | |
"CSIDL_COMMON_APPDATA": "Common AppData", | |
"CSIDL_LOCAL_APPDATA": "Local AppData", | |
}[csidl_name] | |
key = _winreg.OpenKey( | |
_winreg.HKEY_CURRENT_USER, | |
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" | |
) | |
dir, type = _winreg.QueryValueEx(key, shell_folder_name) | |
return dir | |
def _get_win_folder_with_pywin32(csidl_name): | |
from win32com.shell import shellcon, shell | |
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) | |
# Try to make this a unicode path because SHGetFolderPath does | |
# not return unicode strings when there is unicode data in the | |
# path. | |
try: | |
dir = unicode(dir) | |
# Downgrade to short path name if have highbit chars. See | |
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. | |
has_high_char = False | |
for c in dir: | |
if ord(c) > 255: | |
has_high_char = True | |
break | |
if has_high_char: | |
try: | |
import win32api | |
dir = win32api.GetShortPathName(dir) | |
except ImportError: | |
pass | |
except UnicodeError: | |
pass | |
return dir | |
def _get_win_folder_with_ctypes(csidl_name): | |
import ctypes | |
csidl_const = { | |
"CSIDL_APPDATA": 26, | |
"CSIDL_COMMON_APPDATA": 35, | |
"CSIDL_LOCAL_APPDATA": 28, | |
}[csidl_name] | |
buf = ctypes.create_unicode_buffer(1024) | |
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) | |
# Downgrade to short path name if have highbit chars. See | |
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. | |
has_high_char = False | |
for c in buf: | |
if ord(c) > 255: | |
has_high_char = True | |
break | |
if has_high_char: | |
buf2 = ctypes.create_unicode_buffer(1024) | |
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): | |
buf = buf2 | |
return buf.value | |
def _get_win_folder_with_jna(csidl_name): | |
import array | |
from com.sun import jna | |
from com.sun.jna.platform import win32 | |
buf_size = win32.WinDef.MAX_PATH * 2 | |
buf = array.zeros('c', buf_size) | |
shell = win32.Shell32.INSTANCE | |
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) | |
dir = jna.Native.toString(buf.tostring()).rstrip("\0") | |
# Downgrade to short path name if have highbit chars. See | |
# <http://bugs.activestate.com/show_bug.cgi?id=85099>. | |
has_high_char = False | |
for c in dir: | |
if ord(c) > 255: | |
has_high_char = True | |
break | |
if has_high_char: | |
buf = array.zeros('c', buf_size) | |
kernel = win32.Kernel32.INSTANCE | |
if kernel.GetShortPathName(dir, buf, buf_size): | |
dir = jna.Native.toString(buf.tostring()).rstrip("\0") | |
return dir | |
if system == "win32": | |
try: | |
from ctypes import windll | |
_get_win_folder = _get_win_folder_with_ctypes | |
except ImportError: | |
try: | |
import com.sun.jna | |
_get_win_folder = _get_win_folder_with_jna | |
except ImportError: | |
_get_win_folder = _get_win_folder_from_registry | |
#---- self test code | |
if __name__ == "__main__": | |
appname = "MyApp" | |
appauthor = "MyCompany" | |
props = ("user_data_dir", | |
"user_config_dir", | |
"user_cache_dir", | |
"user_state_dir", | |
"user_log_dir", | |
"site_data_dir", | |
"site_config_dir") | |
print("-- app dirs %s --" % __version__) | |
print("-- app dirs (with optional 'version')") | |
dirs = AppDirs(appname, appauthor, version="1.0") | |
for prop in props: | |
print("%s: %s" % (prop, getattr(dirs, prop))) | |
print("\n-- app dirs (without optional 'version')") | |
dirs = AppDirs(appname, appauthor) | |
for prop in props: | |
print("%s: %s" % (prop, getattr(dirs, prop))) | |
print("\n-- app dirs (without optional 'appauthor')") | |
dirs = AppDirs(appname) | |
for prop in props: | |
print("%s: %s" % (prop, getattr(dirs, prop))) | |
print("\n-- app dirs (with disabled 'appauthor')") | |
dirs = AppDirs(appname, appauthor=False) | |
for prop in props: | |
print("%s: %s" % (prop, getattr(dirs, prop))) |
"""CacheControl import Interface. | |
Make it easy to import from cachecontrol without long namespaces. | |
""" | |
__author__ = 'Eric Larson' | |
__email__ = '[email protected]' | |
__version__ = '0.12.4' | |
from .wrapper import CacheControl | |
from .adapter import CacheControlAdapter | |
from .controller import CacheController |
import logging | |
from pip._vendor import requests | |
from pip._vendor.cachecontrol.adapter import CacheControlAdapter | |
from pip._vendor.cachecontrol.cache import DictCache | |
from pip._vendor.cachecontrol.controller import logger | |
from argparse import ArgumentParser | |
def setup_logging(): | |
logger.setLevel(logging.DEBUG) | |
handler = logging.StreamHandler() | |
logger.addHandler(handler) | |
def get_session(): | |
adapter = CacheControlAdapter( | |
DictCache(), | |
cache_etags=True, | |
serializer=None, | |
heuristic=None, | |
) | |
sess = requests.Session() | |
sess.mount('http://', adapter) | |
sess.mount('https://', adapter) | |
sess.cache_controller = adapter.controller | |
return sess | |
def get_args(): | |
parser = ArgumentParser() | |
parser.add_argument('url', help='The URL to try and cache') | |
return parser.parse_args() | |
def main(args=None): | |
args = get_args() | |
sess = get_session() | |
# Make a request to get a response | |
resp = sess.get(args.url) | |
# Turn on logging | |
setup_logging() | |
# try setting the cache | |
sess.cache_controller.cache_response(resp.request, resp.raw) | |
# Now try to get it | |
if sess.cache_controller.cached_request(resp.request): | |
print('Cached!') | |
else: | |
print('Not cached :(') | |
if __name__ == '__main__': | |
main() |
import types | |
import functools | |
import zlib | |
from pip._vendor.requests.adapters import HTTPAdapter | |
from .controller import CacheController | |
from .cache import DictCache | |
from .filewrapper import CallbackFileWrapper | |
class CacheControlAdapter(HTTPAdapter): | |
invalidating_methods = set(['PUT', 'DELETE']) | |
def __init__(self, cache=None, | |
cache_etags=True, | |
controller_class=None, | |
serializer=None, | |
heuristic=None, | |
cacheable_methods=None, | |
*args, **kw): | |
super(CacheControlAdapter, self).__init__(*args, **kw) | |
self.cache = cache or DictCache() | |
self.heuristic = heuristic | |
self.cacheable_methods = cacheable_methods or ('GET',) | |
controller_factory = controller_class or CacheController | |
self.controller = controller_factory( | |
self.cache, | |
cache_etags=cache_etags, | |
serializer=serializer, | |
) | |
def send(self, request, cacheable_methods=None, **kw): | |
""" | |
Send a request. Use the request information to see if it | |
exists in the cache and cache the response if we need to and can. | |
""" | |
cacheable = cacheable_methods or self.cacheable_methods | |
if request.method in cacheable: | |
try: | |
cached_response = self.controller.cached_request(request) | |
except zlib.error: | |
cached_response = None | |
if cached_response: | |
return self.build_response(request, cached_response, | |
from_cache=True) | |
# check for etags and add headers if appropriate | |
request.headers.update( | |
self.controller.conditional_headers(request) | |
) | |
resp = super(CacheControlAdapter, self).send(request, **kw) | |
return resp | |
def build_response(self, request, response, from_cache=False, | |
cacheable_methods=None): | |
""" | |
Build a response by making a request or using the cache. | |
This will end up calling send and returning a potentially | |
cached response | |
""" | |
cacheable = cacheable_methods or self.cacheable_methods | |
if not from_cache and request.method in cacheable: | |
# Check for any heuristics that might update headers | |
# before trying to cache. | |
if self.heuristic: | |
response = self.heuristic.apply(response) | |
# apply any expiration heuristics | |
if response.status == 304: | |
# We must have sent an ETag request. This could mean | |
# that we've been expired already or that we simply | |
# have an etag. In either case, we want to try and | |
# update the cache if that is the case. | |
cached_response = self.controller.update_cached_response( | |
request, response | |
) | |
if cached_response is not response: | |
from_cache = True | |
# We are done with the server response, read a | |
# possible response body (compliant servers will | |
# not return one, but we cannot be 100% sure) and | |
# release the connection back to the pool. | |
response.read(decode_content=False) | |
response.release_conn() | |
response = cached_response | |
# We always cache the 301 responses | |
elif response.status == 301: | |
self.controller.cache_response(request, response) | |
else: | |
# Wrap the response file with a wrapper that will cache the | |
# response when the stream has been consumed. | |
response._fp = CallbackFileWrapper( | |
response._fp, | |
functools.partial( | |
self.controller.cache_response, | |
request, | |
response, | |
) | |
) | |
if response.chunked: | |
super_update_chunk_length = response._update_chunk_length | |
def _update_chunk_length(self): | |
super_update_chunk_length() | |
if self.chunk_left == 0: | |
self._fp._close() | |
response._update_chunk_length = types.MethodType(_update_chunk_length, response) | |
resp = super(CacheControlAdapter, self).build_response( | |
request, response | |
) | |
# See if we should invalidate the cache. | |
if request.method in self.invalidating_methods and resp.ok: | |
cache_url = self.controller.cache_url(request.url) | |
self.cache.delete(cache_url) | |
# Give the request a from_cache attr to let people use it | |
resp.from_cache = from_cache | |
return resp | |
def close(self): | |
self.cache.close() | |
super(CacheControlAdapter, self).close() |
""" | |
The cache object API for implementing caches. The default is a thread | |
safe in-memory dictionary. | |
""" | |
from threading import Lock | |
class BaseCache(object): | |
def get(self, key): | |
raise NotImplemented() | |
def set(self, key, value): | |
raise NotImplemented() | |
def delete(self, key): | |
raise NotImplemented() | |
def close(self): | |
pass | |
class DictCache(BaseCache): | |
def __init__(self, init_dict=None): | |
self.lock = Lock() | |
self.data = init_dict or {} | |
def get(self, key): | |
return self.data.get(key, None) | |
def set(self, key, value): | |
with self.lock: | |
self.data.update({key: value}) | |
def delete(self, key): | |
with self.lock: | |
if key in self.data: | |
self.data.pop(key) |
from .file_cache import FileCache # noqa | |
from .redis_cache import RedisCache # noqa |
import hashlib | |
import os | |
from textwrap import dedent | |
from ..cache import BaseCache | |
from ..controller import CacheController | |
try: | |
FileNotFoundError | |
except NameError: | |
# py2.X | |
FileNotFoundError = OSError | |
def _secure_open_write(filename, fmode): | |
# We only want to write to this file, so open it in write only mode | |
flags = os.O_WRONLY | |
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only | |
# will open *new* files. | |
# We specify this because we want to ensure that the mode we pass is the | |
# mode of the file. | |
flags |= os.O_CREAT | os.O_EXCL | |
# Do not follow symlinks to prevent someone from making a symlink that | |
# we follow and insecurely open a cache file. | |
if hasattr(os, "O_NOFOLLOW"): | |
flags |= os.O_NOFOLLOW | |
# On Windows we'll mark this file as binary | |
if hasattr(os, "O_BINARY"): | |
flags |= os.O_BINARY | |
# Before we open our file, we want to delete any existing file that is | |
# there | |
try: | |
os.remove(filename) | |
except (IOError, OSError): | |
# The file must not exist already, so we can just skip ahead to opening | |
pass | |
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a | |
# race condition happens between the os.remove and this line, that an | |
# error will be raised. Because we utilize a lockfile this should only | |
# happen if someone is attempting to attack us. | |
fd = os.open(filename, flags, fmode) | |
try: | |
return os.fdopen(fd, "wb") | |
except: | |
# An error occurred wrapping our FD in a file object | |
os.close(fd) | |
raise | |
class FileCache(BaseCache): | |
def __init__(self, directory, forever=False, filemode=0o0600, | |
dirmode=0o0700, use_dir_lock=None, lock_class=None): | |
if use_dir_lock is not None and lock_class is not None: | |
raise ValueError("Cannot use use_dir_lock and lock_class together") | |
try: | |
from pip._vendor.lockfile import LockFile | |
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile | |
except ImportError: | |
notice = dedent(""" | |
NOTE: In order to use the FileCache you must have | |
lockfile installed. You can install it via pip: | |
pip install lockfile | |
""") | |
raise ImportError(notice) | |
else: | |
if use_dir_lock: | |
lock_class = MkdirLockFile | |
elif lock_class is None: | |
lock_class = LockFile | |
self.directory = directory | |
self.forever = forever | |
self.filemode = filemode | |
self.dirmode = dirmode | |
self.lock_class = lock_class | |
@staticmethod | |
def encode(x): | |
return hashlib.sha224(x.encode()).hexdigest() | |
def _fn(self, name): | |
# NOTE: This method should not change as some may depend on it. | |
# See: https://github.com/ionrock/cachecontrol/issues/63 | |
hashed = self.encode(name) | |
parts = list(hashed[:5]) + [hashed] | |
return os.path.join(self.directory, *parts) | |
def get(self, key): | |
name = self._fn(key) | |
if not os.path.exists(name): | |
return None | |
with open(name, 'rb') as fh: | |
return fh.read() | |
def set(self, key, value): | |
name = self._fn(key) | |
# Make sure the directory exists | |
try: | |
os.makedirs(os.path.dirname(name), self.dirmode) | |
except (IOError, OSError): | |
pass | |
with self.lock_class(name) as lock: | |
# Write our actual file | |
with _secure_open_write(lock.path, self.filemode) as fh: | |
fh.write(value) | |
def delete(self, key): | |
name = self._fn(key) | |
if not self.forever: | |
try: | |
os.remove(name) | |
except FileNotFoundError: | |
pass | |
def url_to_file_path(url, filecache): | |
"""Return the file cache path based on the URL. | |
This does not ensure the file exists! | |
""" | |
key = CacheController.cache_url(url) | |
return filecache._fn(key) |
from __future__ import division | |
from datetime import datetime | |
from pip._vendor.cachecontrol.cache import BaseCache | |
def total_seconds(td): | |
"""Python 2.6 compatability""" | |
if hasattr(td, 'total_seconds'): | |
return int(td.total_seconds()) | |
ms = td.microseconds | |
secs = (td.seconds + td.days * 24 * 3600) | |
return int((ms + secs * 10**6) / 10**6) | |
class RedisCache(BaseCache): | |
def __init__(self, conn): | |
self.conn = conn | |
def get(self, key): | |
return self.conn.get(key) | |
def set(self, key, value, expires=None): | |
if not expires: | |
self.conn.set(key, value) | |
else: | |
expires = expires - datetime.utcnow() | |
self.conn.setex(key, total_seconds(expires), value) | |
def delete(self, key): | |
self.conn.delete(key) | |
def clear(self): | |
"""Helper for clearing all the keys in a database. Use with | |
caution!""" | |
for key in self.conn.keys(): | |
self.conn.delete(key) | |
def close(self): | |
"""Redis uses connection pooling, no need to close the connection.""" | |
pass |
try: | |
from urllib.parse import urljoin | |
except ImportError: | |
from urlparse import urljoin | |
try: | |
import cPickle as pickle | |
except ImportError: | |
import pickle | |
# Handle the case where the requests module has been patched to not have | |
# urllib3 bundled as part of its source. | |
try: | |
from pip._vendor.requests.packages.urllib3.response import HTTPResponse | |
except ImportError: | |
from pip._vendor.urllib3.response import HTTPResponse | |
try: | |
from pip._vendor.requests.packages.urllib3.util import is_fp_closed | |
except ImportError: | |
from pip._vendor.urllib3.util import is_fp_closed | |
# Replicate some six behaviour | |
try: | |
text_type = unicode | |
except NameError: | |
text_type = str |
""" | |
The httplib2 algorithms ported for use with requests. | |
""" | |
import logging | |
import re | |
import calendar | |
import time | |
from email.utils import parsedate_tz | |
from pip._vendor.requests.structures import CaseInsensitiveDict | |
from .cache import DictCache | |
from .serialize import Serializer | |
logger = logging.getLogger(__name__) | |
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") | |
def parse_uri(uri): | |
"""Parses a URI using the regex given in Appendix B of RFC 3986. | |
(scheme, authority, path, query, fragment) = parse_uri(uri) | |
""" | |
groups = URI.match(uri).groups() | |
return (groups[1], groups[3], groups[4], groups[6], groups[8]) | |
class CacheController(object): | |
"""An interface to see if request should cached or not. | |
""" | |
def __init__(self, cache=None, cache_etags=True, serializer=None, | |
status_codes=None): | |
self.cache = cache or DictCache() | |
self.cache_etags = cache_etags | |
self.serializer = serializer or Serializer() | |
self.cacheable_status_codes = status_codes or (200, 203, 300, 301) | |
@classmethod | |
def _urlnorm(cls, uri): | |
"""Normalize the URL to create a safe key for the cache""" | |
(scheme, authority, path, query, fragment) = parse_uri(uri) | |
if not scheme or not authority: | |
raise Exception("Only absolute URIs are allowed. uri = %s" % uri) | |
scheme = scheme.lower() | |
authority = authority.lower() | |
if not path: | |
path = "/" | |
# Could do syntax based normalization of the URI before | |
# computing the digest. See Section 6.2.2 of Std 66. | |
request_uri = query and "?".join([path, query]) or path | |
defrag_uri = scheme + "://" + authority + request_uri | |
return defrag_uri | |
@classmethod | |
def cache_url(cls, uri): | |
return cls._urlnorm(uri) | |
def parse_cache_control(self, headers): | |
known_directives = { | |
# https://tools.ietf.org/html/rfc7234#section-5.2 | |
'max-age': (int, True,), | |
'max-stale': (int, False,), | |
'min-fresh': (int, True,), | |
'no-cache': (None, False,), | |
'no-store': (None, False,), | |
'no-transform': (None, False,), | |
'only-if-cached' : (None, False,), | |
'must-revalidate': (None, False,), | |
'public': (None, False,), | |
'private': (None, False,), | |
'proxy-revalidate': (None, False,), | |
's-maxage': (int, True,) | |
} | |
cc_headers = headers.get('cache-control', | |
headers.get('Cache-Control', '')) | |
retval = {} | |
for cc_directive in cc_headers.split(','): | |
parts = cc_directive.split('=', 1) | |
directive = parts[0].strip() | |
try: | |
typ, required = known_directives[directive] | |
except KeyError: | |
logger.debug('Ignoring unknown cache-control directive: %s', | |
directive) | |
continue | |
if not typ or not required: | |
retval[directive] = None | |
if typ: | |
try: | |
retval[directive] = typ(parts[1].strip()) | |
except IndexError: | |
if required: | |
logger.debug('Missing value for cache-control ' | |
'directive: %s', directive) | |
except ValueError: | |
logger.debug('Invalid value for cache-control directive ' | |
'%s, must be %s', directive, typ.__name__) | |
return retval | |
def cached_request(self, request): | |
""" | |
Return a cached response if it exists in the cache, otherwise | |
return False. | |
""" | |
cache_url = self.cache_url(request.url) | |
logger.debug('Looking up "%s" in the cache', cache_url) | |
cc = self.parse_cache_control(request.headers) | |
# Bail out if the request insists on fresh data | |
if 'no-cache' in cc: | |
logger.debug('Request header has "no-cache", cache bypassed') | |
return False | |
if 'max-age' in cc and cc['max-age'] == 0: | |
logger.debug('Request header has "max_age" as 0, cache bypassed') | |
return False | |
# Request allows serving from the cache, let's see if we find something | |
cache_data = self.cache.get(cache_url) | |
if cache_data is None: | |
logger.debug('No cache entry available') | |
return False | |
# Check whether it can be deserialized | |
resp = self.serializer.loads(request, cache_data) | |
if not resp: | |
logger.warning('Cache entry deserialization failed, entry ignored') | |
return False | |
# If we have a cached 301, return it immediately. We don't | |
# need to test our response for other headers b/c it is | |
# intrinsically "cacheable" as it is Permanent. | |
# See: | |
# https://tools.ietf.org/html/rfc7231#section-6.4.2 | |
# | |
# Client can try to refresh the value by repeating the request | |
# with cache busting headers as usual (ie no-cache). | |
if resp.status == 301: | |
msg = ('Returning cached "301 Moved Permanently" response ' | |
'(ignoring date and etag information)') | |
logger.debug(msg) | |
return resp | |
headers = CaseInsensitiveDict(resp.headers) | |
if not headers or 'date' not in headers: | |
if 'etag' not in headers: | |
# Without date or etag, the cached response can never be used | |
# and should be deleted. | |
logger.debug('Purging cached response: no date or etag') | |
self.cache.delete(cache_url) | |
logger.debug('Ignoring cached response: no date') | |
return False | |
now = time.time() | |
date = calendar.timegm( | |
parsedate_tz(headers['date']) | |
) | |
current_age = max(0, now - date) | |
logger.debug('Current age based on date: %i', current_age) | |
# TODO: There is an assumption that the result will be a | |
# urllib3 response object. This may not be best since we | |
# could probably avoid instantiating or constructing the | |
# response until we know we need it. | |
resp_cc = self.parse_cache_control(headers) | |
# determine freshness | |
freshness_lifetime = 0 | |
# Check the max-age pragma in the cache control header | |
if 'max-age' in resp_cc: | |
freshness_lifetime = resp_cc['max-age'] | |
logger.debug('Freshness lifetime from max-age: %i', | |
freshness_lifetime) | |
# If there isn't a max-age, check for an expires header | |
elif 'expires' in headers: | |
expires = parsedate_tz(headers['expires']) | |
if expires is not None: | |
expire_time = calendar.timegm(expires) - date | |
freshness_lifetime = max(0, expire_time) | |
logger.debug("Freshness lifetime from expires: %i", | |
freshness_lifetime) | |
# Determine if we are setting freshness limit in the | |
# request. Note, this overrides what was in the response. | |
if 'max-age' in cc: | |
freshness_lifetime = cc['max-age'] | |
logger.debug('Freshness lifetime from request max-age: %i', | |
freshness_lifetime) | |
if 'min-fresh' in cc: | |
min_fresh = cc['min-fresh'] | |
# adjust our current age by our min fresh | |
current_age += min_fresh | |
logger.debug('Adjusted current age from min-fresh: %i', | |
current_age) | |
# Return entry if it is fresh enough | |
if freshness_lifetime > current_age: | |
logger.debug('The response is "fresh", returning cached response') | |
logger.debug('%i > %i', freshness_lifetime, current_age) | |
return resp | |
# we're not fresh. If we don't have an Etag, clear it out | |
if 'etag' not in headers: | |
logger.debug( | |
'The cached response is "stale" with no etag, purging' | |
) | |
self.cache.delete(cache_url) | |
# return the original handler | |
return False | |
def conditional_headers(self, request): | |
cache_url = self.cache_url(request.url) | |
resp = self.serializer.loads(request, self.cache.get(cache_url)) | |
new_headers = {} | |
if resp: | |
headers = CaseInsensitiveDict(resp.headers) | |
if 'etag' in headers: | |
new_headers['If-None-Match'] = headers['ETag'] | |
if 'last-modified' in headers: | |
new_headers['If-Modified-Since'] = headers['Last-Modified'] | |
return new_headers | |
def cache_response(self, request, response, body=None, | |
status_codes=None): | |
""" | |
Algorithm for caching requests. | |
This assumes a requests Response object. | |
""" | |
# From httplib2: Don't cache 206's since we aren't going to | |
# handle byte range requests | |
cacheable_status_codes = status_codes or self.cacheable_status_codes | |
if response.status not in cacheable_status_codes: | |
logger.debug( | |
'Status code %s not in %s', | |
response.status, | |
cacheable_status_codes | |
) | |
return | |
response_headers = CaseInsensitiveDict(response.headers) | |
# If we've been given a body, our response has a Content-Length, that | |
# Content-Length is valid then we can check to see if the body we've | |
# been given matches the expected size, and if it doesn't we'll just | |
# skip trying to cache it. | |
if (body is not None and | |
"content-length" in response_headers and | |
response_headers["content-length"].isdigit() and | |
int(response_headers["content-length"]) != len(body)): | |
return | |
cc_req = self.parse_cache_control(request.headers) | |
cc = self.parse_cache_control(response_headers) | |
cache_url = self.cache_url(request.url) | |
logger.debug('Updating cache with response from "%s"', cache_url) | |
# Delete it from the cache if we happen to have it stored there | |
no_store = False | |
if 'no-store' in cc: | |
no_store = True | |
logger.debug('Response header has "no-store"') | |
if 'no-store' in cc_req: | |
no_store = True | |
logger.debug('Request header has "no-store"') | |
if no_store and self.cache.get(cache_url): | |
logger.debug('Purging existing cache entry to honor "no-store"') | |
self.cache.delete(cache_url) | |
# If we've been given an etag, then keep the response | |
if self.cache_etags and 'etag' in response_headers: | |
logger.debug('Caching due to etag') | |
self.cache.set( | |
cache_url, | |
self.serializer.dumps(request, response, body=body), | |
) | |
# Add to the cache any 301s. We do this before looking that | |
# the Date headers. | |
elif response.status == 301: | |
logger.debug('Caching permanant redirect') | |
self.cache.set( | |
cache_url, | |
self.serializer.dumps(request, response) | |
) | |
# Add to the cache if the response headers demand it. If there | |
# is no date header then we can't do anything about expiring | |
# the cache. | |
elif 'date' in response_headers: | |
# cache when there is a max-age > 0 | |
if 'max-age' in cc and cc['max-age'] > 0: | |
logger.debug('Caching b/c date exists and max-age > 0') | |
self.cache.set( | |
cache_url, | |
self.serializer.dumps(request, response, body=body), | |
) | |
# If the request can expire, it means we should cache it | |
# in the meantime. | |
elif 'expires' in response_headers: | |
if response_headers['expires']: | |
logger.debug('Caching b/c of expires header') | |
self.cache.set( | |
cache_url, | |
self.serializer.dumps(request, response, body=body), | |
) | |
def update_cached_response(self, request, response): | |
"""On a 304 we will get a new set of headers that we want to | |
update our cached value with, assuming we have one. | |
This should only ever be called when we've sent an ETag and | |
gotten a 304 as the response. | |
""" | |
cache_url = self.cache_url(request.url) | |
cached_response = self.serializer.loads( | |
request, | |
self.cache.get(cache_url) | |
) | |
if not cached_response: | |
# we didn't have a cached response | |
return response | |
# Lets update our headers with the headers from the new request: | |
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 | |
# | |
# The server isn't supposed to send headers that would make | |
# the cached body invalid. But... just in case, we'll be sure | |
# to strip out ones we know that might be problmatic due to | |
# typical assumptions. | |
excluded_headers = [ | |
"content-length", | |
] | |
cached_response.headers.update( | |
dict((k, v) for k, v in response.headers.items() | |
if k.lower() not in excluded_headers) | |
) | |
# we want a 200 b/c we have content via the cache | |
cached_response.status = 200 | |
# update our cache | |
self.cache.set( | |
cache_url, | |
self.serializer.dumps(request, cached_response), | |
) | |
return cached_response |
from io import BytesIO | |
class CallbackFileWrapper(object): | |
""" | |
Small wrapper around a fp object which will tee everything read into a | |
buffer, and when that file is closed it will execute a callback with the | |
contents of that buffer. | |
All attributes are proxied to the underlying file object. | |
This class uses members with a double underscore (__) leading prefix so as | |
not to accidentally shadow an attribute. | |
""" | |
def __init__(self, fp, callback): | |
self.__buf = BytesIO() | |
self.__fp = fp | |
self.__callback = callback | |
def __getattr__(self, name): | |
# The vaguaries of garbage collection means that self.__fp is | |
# not always set. By using __getattribute__ and the private | |
# name[0] allows looking up the attribute value and raising an | |
# AttributeError when it doesn't exist. This stop thigns from | |
# infinitely recursing calls to getattr in the case where | |
# self.__fp hasn't been set. | |
# | |
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers | |
fp = self.__getattribute__('_CallbackFileWrapper__fp') | |
return getattr(fp, name) | |
def __is_fp_closed(self): | |
try: | |
return self.__fp.fp is None | |
except AttributeError: | |
pass | |
try: | |
return self.__fp.closed | |
except AttributeError: | |
pass | |
# We just don't cache it then. | |
# TODO: Add some logging here... | |
return False | |
def _close(self): | |
if self.__callback: | |
self.__callback(self.__buf.getvalue()) | |
# We assign this to None here, because otherwise we can get into | |
# really tricky problems where the CPython interpreter dead locks | |
# because the callback is holding a reference to something which | |
# has a __del__ method. Setting this to None breaks the cycle | |
# and allows the garbage collector to do it's thing normally. | |
self.__callback = None | |
def read(self, amt=None): | |
data = self.__fp.read(amt) | |
self.__buf.write(data) | |
if self.__is_fp_closed(): | |
self._close() | |
return data | |
def _safe_read(self, amt): | |
data = self.__fp._safe_read(amt) | |
if amt == 2 and data == b'\r\n': | |
# urllib executes this read to toss the CRLF at the end | |
# of the chunk. | |
return data | |
self.__buf.write(data) | |
if self.__is_fp_closed(): | |
self._close() | |
return data |
import calendar | |
import time | |
from email.utils import formatdate, parsedate, parsedate_tz | |
from datetime import datetime, timedelta | |
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" | |
def expire_after(delta, date=None): | |
date = date or datetime.utcnow() | |
return date + delta | |
def datetime_to_header(dt): | |
return formatdate(calendar.timegm(dt.timetuple())) | |
class BaseHeuristic(object): | |
def warning(self, response): | |
""" | |
Return a valid 1xx warning header value describing the cache | |
adjustments. | |
The response is provided too allow warnings like 113 | |
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need | |
to explicitly say response is over 24 hours old. | |
""" | |
return '110 - "Response is Stale"' | |
def update_headers(self, response): | |
"""Update the response headers with any new headers. | |
NOTE: This SHOULD always include some Warning header to | |
signify that the response was cached by the client, not | |
by way of the provided headers. | |
""" | |
return {} | |
def apply(self, response): | |
updated_headers = self.update_headers(response) | |
if updated_headers: | |
response.headers.update(updated_headers) | |
warning_header_value = self.warning(response) | |
if warning_header_value is not None: | |
response.headers.update({'Warning': warning_header_value}) | |
return response | |
class OneDayCache(BaseHeuristic): | |
""" | |
Cache the response by providing an expires 1 day in the | |
future. | |
""" | |
def update_headers(self, response): | |
headers = {} | |
if 'expires' not in response.headers: | |
date = parsedate(response.headers['date']) | |
expires = expire_after(timedelta(days=1), | |
date=datetime(*date[:6])) | |
headers['expires'] = datetime_to_header(expires) | |
headers['cache-control'] = 'public' | |
return headers | |
class ExpiresAfter(BaseHeuristic): | |
""" | |
Cache **all** requests for a defined time period. | |
""" | |
def __init__(self, **kw): | |
self.delta = timedelta(**kw) | |
def update_headers(self, response): | |
expires = expire_after(self.delta) | |
return { | |
'expires': datetime_to_header(expires), | |
'cache-control': 'public', | |
} | |
def warning(self, response): | |
tmpl = '110 - Automatically cached for %s. Response might be stale' | |
return tmpl % self.delta | |
class LastModified(BaseHeuristic): | |
""" | |
If there is no Expires header already, fall back on Last-Modified | |
using the heuristic from | |
http://tools.ietf.org/html/rfc7234#section-4.2.2 | |
to calculate a reasonable value. | |
Firefox also does something like this per | |
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ | |
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 | |
Unlike mozilla we limit this to 24-hr. | |
""" | |
cacheable_by_default_statuses = set([ | |
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 | |
]) | |
def update_headers(self, resp): | |
headers = resp.headers | |
if 'expires' in headers: | |
return {} | |
if 'cache-control' in headers and headers['cache-control'] != 'public': | |
return {} | |
if resp.status not in self.cacheable_by_default_statuses: | |
return {} | |
if 'date' not in headers or 'last-modified' not in headers: | |
return {} | |
date = calendar.timegm(parsedate_tz(headers['date'])) | |
last_modified = parsedate(headers['last-modified']) | |
if date is None or last_modified is None: | |
return {} | |
now = time.time() | |
current_age = max(0, now - date) | |
delta = date - calendar.timegm(last_modified) | |
freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) | |
if freshness_lifetime <= current_age: | |
return {} | |
expires = date + freshness_lifetime | |
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} | |
def warning(self, resp): | |
return None |
import base64 | |
import io | |
import json | |
import zlib | |
from pip._vendor import msgpack | |
from pip._vendor.requests.structures import CaseInsensitiveDict | |
from .compat import HTTPResponse, pickle, text_type | |
def _b64_decode_bytes(b): | |
return base64.b64decode(b.encode("ascii")) | |
def _b64_decode_str(s): | |
return _b64_decode_bytes(s).decode("utf8") | |
class Serializer(object): | |
def dumps(self, request, response, body=None): | |
response_headers = CaseInsensitiveDict(response.headers) | |
if body is None: | |
body = response.read(decode_content=False) | |
# NOTE: 99% sure this is dead code. I'm only leaving it | |
# here b/c I don't have a test yet to prove | |
# it. Basically, before using | |
# `cachecontrol.filewrapper.CallbackFileWrapper`, | |
# this made an effort to reset the file handle. The | |
# `CallbackFileWrapper` short circuits this code by | |
# setting the body as the content is consumed, the | |
# result being a `body` argument is *always* passed | |
# into cache_response, and in turn, | |
# `Serializer.dump`. | |
response._fp = io.BytesIO(body) | |
# NOTE: This is all a bit weird, but it's really important that on | |
# Python 2.x these objects are unicode and not str, even when | |
# they contain only ascii. The problem here is that msgpack | |
# understands the difference between unicode and bytes and we | |
# have it set to differentiate between them, however Python 2 | |
# doesn't know the difference. Forcing these to unicode will be | |
# enough to have msgpack know the difference. | |
data = { | |
u"response": { | |
u"body": body, | |
u"headers": dict( | |
(text_type(k), text_type(v)) | |
for k, v in response.headers.items() | |
), | |
u"status": response.status, | |
u"version": response.version, | |
u"reason": text_type(response.reason), | |
u"strict": response.strict, | |
u"decode_content": response.decode_content, | |
}, | |
} | |
# Construct our vary headers | |
data[u"vary"] = {} | |
if u"vary" in response_headers: | |
varied_headers = response_headers[u'vary'].split(',') | |
for header in varied_headers: | |
header = header.strip() | |
header_value = request.headers.get(header, None) | |
if header_value is not None: | |
header_value = text_type(header_value) | |
data[u"vary"][header] = header_value | |
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) | |
def loads(self, request, data): | |
# Short circuit if we've been given an empty set of data | |
if not data: | |
return | |
# Determine what version of the serializer the data was serialized | |
# with | |
try: | |
ver, data = data.split(b",", 1) | |
except ValueError: | |
ver = b"cc=0" | |
# Make sure that our "ver" is actually a version and isn't a false | |
# positive from a , being in the data stream. | |
if ver[:3] != b"cc=": | |
data = ver + data | |
ver = b"cc=0" | |
# Get the version number out of the cc=N | |
ver = ver.split(b"=", 1)[-1].decode("ascii") | |
# Dispatch to the actual load method for the given version | |
try: | |
return getattr(self, "_loads_v{0}".format(ver))(request, data) | |
except AttributeError: | |
# This is a version we don't have a loads function for, so we'll | |
# just treat it as a miss and return None | |
return | |
def prepare_response(self, request, cached): | |
"""Verify our vary headers match and construct a real urllib3 | |
HTTPResponse object. | |
""" | |
# Special case the '*' Vary value as it means we cannot actually | |
# determine if the cached response is suitable for this request. | |
if "*" in cached.get("vary", {}): | |
return | |
# Ensure that the Vary headers for the cached response match our | |
# request | |
for header, value in cached.get("vary", {}).items(): | |
if request.headers.get(header, None) != value: | |
return | |
body_raw = cached["response"].pop("body") | |
headers = CaseInsensitiveDict(data=cached['response']['headers']) | |
if headers.get('transfer-encoding', '') == 'chunked': | |
headers.pop('transfer-encoding') | |
cached['response']['headers'] = headers | |
try: | |
body = io.BytesIO(body_raw) | |
except TypeError: | |
# This can happen if cachecontrol serialized to v1 format (pickle) | |
# using Python 2. A Python 2 str(byte string) will be unpickled as | |
# a Python 3 str (unicode string), which will cause the above to | |
# fail with: | |
# | |
# TypeError: 'str' does not support the buffer interface | |
body = io.BytesIO(body_raw.encode('utf8')) | |
return HTTPResponse( | |
body=body, | |
preload_content=False, | |
**cached["response"] | |
) | |
def _loads_v0(self, request, data): | |
# The original legacy cache data. This doesn't contain enough | |
# information to construct everything we need, so we'll treat this as | |
# a miss. | |
return | |
def _loads_v1(self, request, data): | |
try: | |
cached = pickle.loads(data) | |
except ValueError: | |
return | |
return self.prepare_response(request, cached) | |
def _loads_v2(self, request, data): | |
try: | |
cached = json.loads(zlib.decompress(data).decode("utf8")) | |
except (ValueError, zlib.error): | |
return | |
# We need to decode the items that we've base64 encoded | |
cached["response"]["body"] = _b64_decode_bytes( | |
cached["response"]["body"] | |
) | |
cached["response"]["headers"] = dict( | |
(_b64_decode_str(k), _b64_decode_str(v)) | |
for k, v in cached["response"]["headers"].items() | |
) | |
cached["response"]["reason"] = _b64_decode_str( | |
cached["response"]["reason"], | |
) | |
cached["vary"] = dict( | |
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) | |
for k, v in cached["vary"].items() | |
) | |
return self.prepare_response(request, cached) | |
def _loads_v3(self, request, data): | |
# Due to Python 2 encoding issues, it's impossible to know for sure | |
# exactly how to load v3 entries, thus we'll treat these as a miss so | |
# that they get rewritten out as v4 entries. | |
return | |
def _loads_v4(self, request, data): | |
try: | |
cached = msgpack.loads(data, encoding='utf-8') | |
except ValueError: | |
return | |
return self.prepare_response(request, cached) |
from .adapter import CacheControlAdapter | |
from .cache import DictCache | |
def CacheControl(sess, | |
cache=None, | |
cache_etags=True, | |
serializer=None, | |
heuristic=None, | |
controller_class=None, | |
adapter_class=None, | |
cacheable_methods=None): | |
cache = cache or DictCache() | |
adapter_class = adapter_class or CacheControlAdapter | |
adapter = adapter_class( | |
cache, | |
cache_etags=cache_etags, | |
serializer=serializer, | |
heuristic=heuristic, | |
controller_class=controller_class, | |
cacheable_methods=cacheable_methods | |
) | |
sess.mount('http://', adapter) | |
sess.mount('https://', adapter) | |
return sess |
from .core import where, old_where | |
__version__ = "2018.01.18" |
from certifi import where | |
print(where()) |
# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA | |
# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA | |
# Label: "GlobalSign Root CA" | |
# Serial: 4835703278459707669005204 | |
# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a | |
# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c | |
# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 | |
-----BEGIN CERTIFICATE----- | |
MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG | |
A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv | |
b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw | |
MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i | |
YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT | |
aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ | |
jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp | |
xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp | |
1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG | |
snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ | |
U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 | |
9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E | |
BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B | |
AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz | |
yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE | |
38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP | |
AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad | |
DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME | |
HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== | |
-----END CERTIFICATE----- | |
# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 | |
# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 | |
# Label: "GlobalSign Root CA - R2" | |
# Serial: 4835703278459682885658125 | |
# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 | |
# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe | |
# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e | |
-----BEGIN CERTIFICATE----- | |
MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G | |
A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp | |
Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 | |
MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG | |
A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI | |
hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL | |
v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 | |
eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq | |
tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd | |
C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa | |
zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB | |
mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH | |
V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n | |
bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG | |
3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs | |
J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO | |
291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS | |
ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd | |
AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 | |
TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only | |
# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only | |
# Label: "Verisign Class 3 Public Primary Certification Authority - G3" | |
# Serial: 206684696279472310254277870180966723415 | |
# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 | |
# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 | |
# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 | |
-----BEGIN CERTIFICATE----- | |
MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw | |
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl | |
cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu | |
LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT | |
aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp | |
dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD | |
VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT | |
aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ | |
bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu | |
IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg | |
LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b | |
N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t | |
KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu | |
kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm | |
CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ | |
Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu | |
imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te | |
2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe | |
DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC | |
/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p | |
F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt | |
TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited | |
# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited | |
# Label: "Entrust.net Premium 2048 Secure Server CA" | |
# Serial: 946069240 | |
# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 | |
# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 | |
# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 | |
-----BEGIN CERTIFICATE----- | |
MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML | |
RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp | |
bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 | |
IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp | |
ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 | |
MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 | |
LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp | |
YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG | |
A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp | |
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq | |
K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe | |
sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX | |
MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT | |
XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ | |
HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH | |
4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV | |
HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub | |
j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo | |
U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf | |
zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b | |
u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ | |
bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er | |
fF6adulZkMV8gzURZVE= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust | |
# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust | |
# Label: "Baltimore CyberTrust Root" | |
# Serial: 33554617 | |
# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 | |
# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 | |
# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb | |
-----BEGIN CERTIFICATE----- | |
MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ | |
RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD | |
VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX | |
DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y | |
ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy | |
VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr | |
mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr | |
IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK | |
mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu | |
XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy | |
dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye | |
jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 | |
BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 | |
DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 | |
9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx | |
jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 | |
Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz | |
ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS | |
R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp | |
-----END CERTIFICATE----- | |
# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network | |
# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network | |
# Label: "AddTrust External Root" | |
# Serial: 1 | |
# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f | |
# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 | |
# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 | |
-----BEGIN CERTIFICATE----- | |
MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU | |
MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs | |
IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 | |
MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux | |
FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h | |
bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v | |
dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt | |
H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 | |
uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX | |
mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX | |
a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN | |
E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 | |
WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD | |
VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 | |
Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU | |
cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx | |
IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN | |
AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH | |
YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 | |
6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC | |
Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX | |
c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a | |
mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. | |
# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. | |
# Label: "Entrust Root Certification Authority" | |
# Serial: 1164660820 | |
# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 | |
# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 | |
# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c | |
-----BEGIN CERTIFICATE----- | |
MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC | |
VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 | |
Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW | |
KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl | |
cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw | |
NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw | |
NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy | |
ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV | |
BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ | |
KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo | |
Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 | |
4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 | |
KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI | |
rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi | |
94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB | |
sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi | |
gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo | |
kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE | |
vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA | |
A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t | |
O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua | |
AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP | |
9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ | |
eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m | |
0vdXcDazv/wor3ElhVsT/h5/WrQ8 | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. | |
# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. | |
# Label: "GeoTrust Global CA" | |
# Serial: 144470 | |
# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 | |
# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 | |
# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a | |
-----BEGIN CERTIFICATE----- | |
MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT | |
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i | |
YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG | |
EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg | |
R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 | |
9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq | |
fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv | |
iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU | |
1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ | |
bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW | |
MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA | |
ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l | |
uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn | |
Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS | |
tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF | |
PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un | |
hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV | |
5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. | |
# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. | |
# Label: "GeoTrust Universal CA" | |
# Serial: 1 | |
# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 | |
# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 | |
# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 | |
-----BEGIN CERTIFICATE----- | |
MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW | |
MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy | |
c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE | |
BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 | |
IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV | |
VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 | |
cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT | |
QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh | |
F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v | |
c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w | |
mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd | |
VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX | |
teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ | |
f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe | |
Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ | |
nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB | |
/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY | |
MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG | |
9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc | |
aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX | |
IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn | |
ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z | |
uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN | |
Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja | |
QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW | |
koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 | |
ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt | |
DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm | |
bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. | |
# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. | |
# Label: "GeoTrust Universal CA 2" | |
# Serial: 1 | |
# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 | |
# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 | |
# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b | |
-----BEGIN CERTIFICATE----- | |
MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW | |
MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy | |
c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD | |
VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 | |
c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC | |
AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 | |
WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG | |
FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq | |
XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL | |
se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb | |
KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd | |
IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 | |
y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt | |
hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc | |
QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 | |
Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV | |
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV | |
HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ | |
KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z | |
dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ | |
L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr | |
Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo | |
ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY | |
T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz | |
GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m | |
1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV | |
OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH | |
6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX | |
QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS | |
-----END CERTIFICATE----- | |
# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association | |
# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association | |
# Label: "Visa eCommerce Root" | |
# Serial: 25952180776285836048024890241505565794 | |
# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02 | |
# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62 | |
# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22 | |
-----BEGIN CERTIFICATE----- | |
MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr | |
MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl | |
cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv | |
bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw | |
CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h | |
dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l | |
cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h | |
2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E | |
lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV | |
ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq | |
299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t | |
vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL | |
dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD | |
AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF | |
AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR | |
zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3 | |
LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd | |
7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw | |
++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt | |
398znM/jra6O1I7mT1GvFpLgXPYHDw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=AAA Certificate Services O=Comodo CA Limited | |
# Subject: CN=AAA Certificate Services O=Comodo CA Limited | |
# Label: "Comodo AAA Services root" | |
# Serial: 1 | |
# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 | |
# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 | |
# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 | |
-----BEGIN CERTIFICATE----- | |
MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb | |
MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow | |
GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj | |
YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL | |
MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE | |
BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM | |
GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP | |
ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua | |
BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe | |
3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 | |
YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR | |
rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm | |
ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU | |
oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF | |
MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v | |
QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t | |
b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF | |
AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q | |
GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz | |
Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 | |
G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi | |
l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 | |
smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority | |
# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority | |
# Label: "QuoVadis Root CA" | |
# Serial: 985026699 | |
# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 | |
# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 | |
# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 | |
-----BEGIN CERTIFICATE----- | |
MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC | |
TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 | |
aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 | |
aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz | |
MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw | |
IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR | |
dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG | |
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp | |
li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D | |
rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ | |
WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug | |
F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU | |
xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC | |
Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv | |
dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw | |
ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl | |
IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh | |
c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy | |
ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh | |
Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI | |
KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T | |
KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq | |
y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p | |
dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD | |
VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL | |
MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk | |
fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 | |
7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R | |
cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y | |
mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW | |
xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK | |
SnQ2+Q== | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited | |
# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited | |
# Label: "QuoVadis Root CA 2" | |
# Serial: 1289 | |
# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b | |
# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 | |
# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 | |
-----BEGIN CERTIFICATE----- | |
MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x | |
GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv | |
b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV | |
BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W | |
YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa | |
GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg | |
Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J | |
WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB | |
rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp | |
+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 | |
ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i | |
Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz | |
PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og | |
/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH | |
oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI | |
yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud | |
EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 | |
A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL | |
MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT | |
ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f | |
BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn | |
g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl | |
fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K | |
WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha | |
B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc | |
hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR | |
TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD | |
mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z | |
ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y | |
4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza | |
8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited | |
# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited | |
# Label: "QuoVadis Root CA 3" | |
# Serial: 1478 | |
# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf | |
# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 | |
# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 | |
-----BEGIN CERTIFICATE----- | |
MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x | |
GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv | |
b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV | |
BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W | |
YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM | |
V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB | |
4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr | |
H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd | |
8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv | |
vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT | |
mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe | |
btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc | |
T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt | |
WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ | |
c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A | |
4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD | |
VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG | |
CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 | |
aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 | |
aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu | |
dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw | |
czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G | |
A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC | |
TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg | |
Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 | |
7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem | |
d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd | |
+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B | |
4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN | |
t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x | |
DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 | |
k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s | |
zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j | |
Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT | |
mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK | |
4SVhM7JZG+Ju1zdXtg2pEto= | |
-----END CERTIFICATE----- | |
# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 | |
# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 | |
# Label: "Security Communication Root CA" | |
# Serial: 0 | |
# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a | |
# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 | |
# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c | |
-----BEGIN CERTIFICATE----- | |
MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY | |
MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t | |
dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 | |
WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD | |
VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 | |
DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 | |
9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ | |
DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 | |
Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N | |
QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ | |
xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G | |
A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T | |
AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG | |
kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr | |
Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 | |
Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU | |
JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot | |
RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Sonera Class2 CA O=Sonera | |
# Subject: CN=Sonera Class2 CA O=Sonera | |
# Label: "Sonera Class 2 Root CA" | |
# Serial: 29 | |
# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb | |
# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 | |
# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 | |
-----BEGIN CERTIFICATE----- | |
MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP | |
MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx | |
MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV | |
BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI | |
hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o | |
Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt | |
5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s | |
3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej | |
vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu | |
8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw | |
DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG | |
MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil | |
zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ | |
3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD | |
FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 | |
Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 | |
ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M | |
-----END CERTIFICATE----- | |
# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com | |
# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com | |
# Label: "XRamp Global CA Root" | |
# Serial: 107108908803651509692980124233745014957 | |
# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 | |
# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 | |
# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 | |
-----BEGIN CERTIFICATE----- | |
MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB | |
gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk | |
MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY | |
UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx | |
NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 | |
dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy | |
dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB | |
dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 | |
38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP | |
KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q | |
DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 | |
qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa | |
JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi | |
PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P | |
BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs | |
jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 | |
eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD | |
ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR | |
vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt | |
qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa | |
IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy | |
i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ | |
O+7ETPTsJ3xCwnR8gooJybQDJbw= | |
-----END CERTIFICATE----- | |
# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority | |
# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority | |
# Label: "Go Daddy Class 2 CA" | |
# Serial: 0 | |
# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 | |
# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 | |
# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 | |
-----BEGIN CERTIFICATE----- | |
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh | |
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE | |
YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 | |
MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo | |
ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg | |
MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN | |
ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA | |
PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w | |
wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi | |
EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY | |
avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ | |
YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE | |
sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h | |
/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 | |
IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj | |
YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD | |
ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy | |
OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P | |
TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ | |
HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER | |
dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf | |
ReYNnyicsbkqWletNw+vHX/bvZ8= | |
-----END CERTIFICATE----- | |
# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority | |
# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority | |
# Label: "Starfield Class 2 CA" | |
# Serial: 0 | |
# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 | |
# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a | |
# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 | |
-----BEGIN CERTIFICATE----- | |
MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl | |
MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp | |
U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw | |
NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE | |
ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp | |
ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 | |
DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf | |
8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN | |
+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 | |
X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa | |
K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA | |
1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G | |
A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR | |
zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 | |
YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD | |
bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w | |
DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 | |
L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D | |
eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl | |
xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp | |
VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY | |
WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= | |
-----END CERTIFICATE----- | |
# Issuer: O=Government Root Certification Authority | |
# Subject: O=Government Root Certification Authority | |
# Label: "Taiwan GRCA" | |
# Serial: 42023070807708724159991140556527066870 | |
# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e | |
# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 | |
# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 | |
-----BEGIN CERTIFICATE----- | |
MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ | |
MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj | |
YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow | |
PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp | |
Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB | |
AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR | |
IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q | |
gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy | |
yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts | |
F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 | |
jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx | |
ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC | |
VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK | |
YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH | |
EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN | |
Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud | |
DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE | |
MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK | |
UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ | |
TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf | |
qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK | |
ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE | |
JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 | |
hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 | |
EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm | |
nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX | |
udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz | |
ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe | |
LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl | |
pYYsfPQS | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Assured ID Root CA" | |
# Serial: 17154717934120587862167794914071425081 | |
# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 | |
# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 | |
# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c | |
-----BEGIN CERTIFICATE----- | |
MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv | |
b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG | |
EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl | |
cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi | |
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c | |
JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP | |
mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ | |
wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 | |
VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ | |
AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB | |
AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW | |
BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun | |
pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC | |
dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf | |
fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm | |
NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx | |
H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe | |
+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Global Root CA" | |
# Serial: 10944719598952040374951832963794454346 | |
# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e | |
# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 | |
# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 | |
-----BEGIN CERTIFICATE----- | |
MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD | |
QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT | |
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j | |
b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG | |
9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB | |
CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 | |
nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt | |
43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P | |
T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 | |
gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO | |
BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR | |
TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw | |
DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr | |
hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg | |
06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF | |
PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls | |
YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk | |
CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert High Assurance EV Root CA" | |
# Serial: 3553400076410547919724730734378100087 | |
# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a | |
# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 | |
# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf | |
-----BEGIN CERTIFICATE----- | |
MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j | |
ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL | |
MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 | |
LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug | |
RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm | |
+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW | |
PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM | |
xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB | |
Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 | |
hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg | |
EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF | |
MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA | |
FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec | |
nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z | |
eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF | |
hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 | |
Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe | |
vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep | |
+OkuE6N36B9K | |
-----END CERTIFICATE----- | |
# Issuer: CN=Class 2 Primary CA O=Certplus | |
# Subject: CN=Class 2 Primary CA O=Certplus | |
# Label: "Certplus Class 2 Primary CA" | |
# Serial: 177770208045934040241468760488327595043 | |
# MD5 Fingerprint: 88:2c:8c:52:b8:a2:3c:f3:f7:bb:03:ea:ae:ac:42:0b | |
# SHA1 Fingerprint: 74:20:74:41:72:9c:dd:92:ec:79:31:d8:23:10:8d:c2:81:92:e2:bb | |
# SHA256 Fingerprint: 0f:99:3c:8a:ef:97:ba:af:56:87:14:0e:d5:9a:d1:82:1b:b4:af:ac:f0:aa:9a:58:b5:d5:7a:33:8a:3a:fb:cb | |
-----BEGIN CERTIFICATE----- | |
MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw | |
PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz | |
cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9 | |
MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz | |
IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ | |
ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR | |
VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL | |
kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd | |
EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas | |
H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0 | |
HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud | |
DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4 | |
QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu | |
Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/ | |
AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8 | |
yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR | |
FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA | |
ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB | |
kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7 | |
l7+ijrRU | |
-----END CERTIFICATE----- | |
# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. | |
# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. | |
# Label: "DST Root CA X3" | |
# Serial: 91299735575339953335919266965803778155 | |
# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 | |
# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 | |
# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 | |
-----BEGIN CERTIFICATE----- | |
MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ | |
MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT | |
DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow | |
PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD | |
Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB | |
AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O | |
rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq | |
OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b | |
xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw | |
7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD | |
aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV | |
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG | |
SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 | |
ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr | |
AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz | |
R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 | |
JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo | |
Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ | |
-----END CERTIFICATE----- | |
# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG | |
# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG | |
# Label: "SwissSign Gold CA - G2" | |
# Serial: 13492815561806991280 | |
# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 | |
# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 | |
# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 | |
-----BEGIN CERTIFICATE----- | |
MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV | |
BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln | |
biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF | |
MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT | |
d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC | |
CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 | |
76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ | |
bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c | |
6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE | |
emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd | |
MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt | |
MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y | |
MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y | |
FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi | |
aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM | |
gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB | |
qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 | |
lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn | |
8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov | |
L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 | |
45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO | |
UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 | |
O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC | |
bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv | |
GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a | |
77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC | |
hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 | |
92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp | |
Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w | |
ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt | |
Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ | |
-----END CERTIFICATE----- | |
# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG | |
# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG | |
# Label: "SwissSign Silver CA - G2" | |
# Serial: 5700383053117599563 | |
# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 | |
# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb | |
# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 | |
-----BEGIN CERTIFICATE----- | |
MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE | |
BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu | |
IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow | |
RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY | |
U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A | |
MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv | |
Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br | |
YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF | |
nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH | |
6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt | |
eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ | |
c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ | |
MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH | |
HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf | |
jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 | |
5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB | |
rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU | |
F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c | |
wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 | |
cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB | |
AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp | |
WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 | |
xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ | |
2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ | |
IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 | |
aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X | |
em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR | |
dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ | |
OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ | |
hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy | |
tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. | |
# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. | |
# Label: "GeoTrust Primary Certification Authority" | |
# Serial: 32798226551256963324313806436981982369 | |
# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf | |
# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 | |
# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c | |
-----BEGIN CERTIFICATE----- | |
MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY | |
MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo | |
R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx | |
MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK | |
Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp | |
ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | |
AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 | |
AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA | |
ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 | |
7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W | |
kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI | |
mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G | |
A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ | |
KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 | |
6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl | |
4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K | |
oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj | |
UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU | |
AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= | |
-----END CERTIFICATE----- | |
# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only | |
# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only | |
# Label: "thawte Primary Root CA" | |
# Serial: 69529181992039203566298953787712940909 | |
# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 | |
# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 | |
# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f | |
-----BEGIN CERTIFICATE----- | |
MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB | |
qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf | |
Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw | |
MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV | |
BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw | |
NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j | |
LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG | |
A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl | |
IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG | |
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs | |
W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta | |
3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk | |
6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 | |
Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J | |
NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA | |
MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP | |
r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU | |
DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz | |
YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX | |
xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 | |
/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ | |
LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 | |
jVaMaA== | |
-----END CERTIFICATE----- | |
# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only | |
# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only | |
# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" | |
# Serial: 33037644167568058970164719475676101450 | |
# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c | |
# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 | |
# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df | |
-----BEGIN CERTIFICATE----- | |
MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB | |
yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL | |
ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp | |
U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW | |
ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 | |
aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL | |
MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW | |
ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln | |
biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp | |
U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y | |
aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 | |
nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex | |
t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz | |
SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG | |
BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ | |
rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ | |
NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E | |
BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH | |
BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy | |
aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv | |
MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE | |
p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y | |
5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK | |
WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ | |
4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N | |
hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq | |
-----END CERTIFICATE----- | |
# Issuer: CN=SecureTrust CA O=SecureTrust Corporation | |
# Subject: CN=SecureTrust CA O=SecureTrust Corporation | |
# Label: "SecureTrust CA" | |
# Serial: 17199774589125277788362757014266862032 | |
# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 | |
# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 | |
# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 | |
-----BEGIN CERTIFICATE----- | |
MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI | |
MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x | |
FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz | |
MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv | |
cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN | |
AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz | |
Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO | |
0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao | |
wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj | |
7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS | |
8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT | |
BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB | |
/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg | |
JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC | |
NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 | |
6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ | |
3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm | |
D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS | |
CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR | |
3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Secure Global CA O=SecureTrust Corporation | |
# Subject: CN=Secure Global CA O=SecureTrust Corporation | |
# Label: "Secure Global CA" | |
# Serial: 9751836167731051554232119481456978597 | |
# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de | |
# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b | |
# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 | |
-----BEGIN CERTIFICATE----- | |
MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK | |
MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x | |
GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx | |
MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg | |
Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG | |
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ | |
iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa | |
/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ | |
jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI | |
HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 | |
sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w | |
gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF | |
MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw | |
KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG | |
AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L | |
URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO | |
H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm | |
I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY | |
iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc | |
f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW | |
-----END CERTIFICATE----- | |
# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited | |
# Subject: CN=COMODO Certification Authority O=COMODO CA Limited | |
# Label: "COMODO Certification Authority" | |
# Serial: 104350513648249232941998508985834464573 | |
# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 | |
# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b | |
# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 | |
-----BEGIN CERTIFICATE----- | |
MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB | |
gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G | |
A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV | |
BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw | |
MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl | |
YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P | |
RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 | |
aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 | |
UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI | |
2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 | |
Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp | |
+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ | |
DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O | |
nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW | |
/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g | |
PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u | |
QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY | |
SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv | |
IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ | |
RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 | |
zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd | |
BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB | |
ZQ== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. | |
# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. | |
# Label: "Network Solutions Certificate Authority" | |
# Serial: 116697915152937497490437556386812487904 | |
# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e | |
# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce | |
# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c | |
-----BEGIN CERTIFICATE----- | |
MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi | |
MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu | |
MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp | |
dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV | |
UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO | |
ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG | |
SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz | |
c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP | |
OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl | |
mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF | |
BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 | |
qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw | |
gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB | |
BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu | |
bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp | |
dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 | |
6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ | |
h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH | |
/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv | |
wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN | |
pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey | |
-----END CERTIFICATE----- | |
# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited | |
# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited | |
# Label: "COMODO ECC Certification Authority" | |
# Serial: 41578283867086692638256921589707938090 | |
# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 | |
# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 | |
# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 | |
-----BEGIN CERTIFICATE----- | |
MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL | |
MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE | |
BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT | |
IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw | |
MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy | |
ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N | |
T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv | |
biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR | |
FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J | |
cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW | |
BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ | |
BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm | |
fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv | |
GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= | |
-----END CERTIFICATE----- | |
# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed | |
# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed | |
# Label: "OISTE WISeKey Global Root GA CA" | |
# Serial: 86718877871133159090080555911823548314 | |
# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 | |
# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 | |
# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 | |
-----BEGIN CERTIFICATE----- | |
MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB | |
ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly | |
aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl | |
ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w | |
NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G | |
A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD | |
VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX | |
SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A | |
MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR | |
VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 | |
w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF | |
mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg | |
4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 | |
4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw | |
DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw | |
EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx | |
SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 | |
ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 | |
vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa | |
hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi | |
Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ | |
/L7fCg0= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certigna O=Dhimyotis | |
# Subject: CN=Certigna O=Dhimyotis | |
# Label: "Certigna" | |
# Serial: 18364802974209362175 | |
# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff | |
# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 | |
# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d | |
-----BEGIN CERTIFICATE----- | |
MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV | |
BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X | |
DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ | |
BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 | |
DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 | |
QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny | |
gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw | |
zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q | |
130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 | |
JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw | |
DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw | |
ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT | |
AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj | |
AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG | |
9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h | |
bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc | |
fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu | |
HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w | |
t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw | |
WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center | |
# Subject: CN=Deutsche Telekom Root CA 2 O=Deutsche Telekom AG OU=T-TeleSec Trust Center | |
# Label: "Deutsche Telekom Root CA 2" | |
# Serial: 38 | |
# MD5 Fingerprint: 74:01:4a:91:b1:08:c4:58:ce:47:cd:f0:dd:11:53:08 | |
# SHA1 Fingerprint: 85:a4:08:c0:9c:19:3e:5d:51:58:7d:cd:d6:13:30:fd:8c:de:37:bf | |
# SHA256 Fingerprint: b6:19:1a:50:d0:c3:97:7f:7d:a9:9b:cd:aa:c8:6a:22:7d:ae:b9:67:9e:c7:0b:a3:b0:c9:d9:22:71:c1:70:d3 | |
-----BEGIN CERTIFICATE----- | |
MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc | |
MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj | |
IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB | |
IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE | |
RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl | |
U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290 | |
IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU | |
ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC | |
QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr | |
rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S | |
NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc | |
QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH | |
txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP | |
BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC | |
AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp | |
tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa | |
IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl | |
6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+ | |
xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU | |
Cm26OWMohpLzGITY+9HPBVZkVw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc | |
# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc | |
# Label: "Cybertrust Global Root" | |
# Serial: 4835703278459682877484360 | |
# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 | |
# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 | |
# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 | |
-----BEGIN CERTIFICATE----- | |
MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG | |
A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh | |
bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE | |
ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS | |
b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 | |
7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS | |
J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y | |
HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP | |
t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz | |
FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY | |
XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ | |
MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw | |
hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js | |
MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA | |
A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj | |
Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx | |
XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o | |
omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc | |
A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW | |
WL1WMRJOEcgh4LMRkWXbtKaIOM5V | |
-----END CERTIFICATE----- | |
# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority | |
# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority | |
# Label: "ePKI Root Certification Authority" | |
# Serial: 28956088682735189655030529057352760477 | |
# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 | |
# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 | |
# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 | |
-----BEGIN CERTIFICATE----- | |
MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe | |
MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 | |
ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe | |
Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw | |
IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL | |
SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF | |
AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH | |
SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh | |
ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X | |
DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 | |
TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ | |
fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA | |
sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU | |
WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS | |
nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH | |
dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip | |
NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC | |
AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF | |
MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH | |
ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB | |
uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl | |
PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP | |
JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ | |
gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 | |
j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 | |
5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB | |
o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS | |
/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z | |
Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE | |
W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D | |
hNQ+IIX3Sj0rnP0qCglN6oH4EZw= | |
-----END CERTIFICATE----- | |
# Issuer: O=certSIGN OU=certSIGN ROOT CA | |
# Subject: O=certSIGN OU=certSIGN ROOT CA | |
# Label: "certSIGN ROOT CA" | |
# Serial: 35210227249154 | |
# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 | |
# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b | |
# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb | |
-----BEGIN CERTIFICATE----- | |
MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT | |
AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD | |
QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP | |
MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC | |
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do | |
0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ | |
UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d | |
RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ | |
OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv | |
JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C | |
AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O | |
BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ | |
LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY | |
MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ | |
44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I | |
Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw | |
i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN | |
9u6wWk5JRFRYX0KD | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only | |
# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only | |
# Label: "GeoTrust Primary Certification Authority - G3" | |
# Serial: 28809105769928564313984085209975885599 | |
# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 | |
# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd | |
# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 | |
-----BEGIN CERTIFICATE----- | |
MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB | |
mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT | |
MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s | |
eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv | |
cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ | |
BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg | |
MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 | |
BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg | |
LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz | |
+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm | |
hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn | |
5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W | |
JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL | |
DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC | |
huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw | |
HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB | |
AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB | |
zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN | |
kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD | |
AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH | |
SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G | |
spki4cErx5z481+oghLrGREt | |
-----END CERTIFICATE----- | |
# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only | |
# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only | |
# Label: "thawte Primary Root CA - G2" | |
# Serial: 71758320672825410020661621085256472406 | |
# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f | |
# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 | |
# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 | |
-----BEGIN CERTIFICATE----- | |
MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL | |
MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp | |
IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi | |
BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw | |
MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh | |
d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig | |
YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v | |
dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ | |
BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 | |
papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E | |
BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K | |
DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 | |
KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox | |
XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only | |
# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only | |
# Label: "thawte Primary Root CA - G3" | |
# Serial: 127614157056681299805556476275995414779 | |
# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 | |
# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 | |
# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c | |
-----BEGIN CERTIFICATE----- | |
MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB | |
rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf | |
Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw | |
MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV | |
BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa | |
Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl | |
LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u | |
MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl | |
ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz | |
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm | |
gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 | |
YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf | |
b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 | |
9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S | |
zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk | |
OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV | |
HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA | |
2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW | |
oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu | |
t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c | |
KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM | |
m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu | |
MdRAGmI0Nj81Aa6sY6A= | |
-----END CERTIFICATE----- | |
# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only | |
# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only | |
# Label: "GeoTrust Primary Certification Authority - G2" | |
# Serial: 80682863203381065782177908751794619243 | |
# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a | |
# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 | |
# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 | |
-----BEGIN CERTIFICATE----- | |
MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL | |
MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj | |
KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 | |
MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 | |
eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV | |
BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw | |
NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV | |
BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH | |
MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL | |
So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal | |
tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO | |
BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG | |
CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT | |
qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz | |
rD6ogRLQy7rQkgu2npaqBA+K | |
-----END CERTIFICATE----- | |
# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only | |
# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only | |
# Label: "VeriSign Universal Root Certification Authority" | |
# Serial: 85209574734084581917763752644031726877 | |
# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 | |
# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 | |
# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c | |
-----BEGIN CERTIFICATE----- | |
MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB | |
vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL | |
ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp | |
U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W | |
ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe | |
Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX | |
MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 | |
IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y | |
IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh | |
bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF | |
AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF | |
9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH | |
H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H | |
LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN | |
/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT | |
rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud | |
EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw | |
WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs | |
exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud | |
DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 | |
sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ | |
seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz | |
4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ | |
BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR | |
lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 | |
7M2CYfE45k+XmCpajQ== | |
-----END CERTIFICATE----- | |
# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only | |
# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only | |
# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" | |
# Serial: 63143484348153506665311985501458640051 | |
# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 | |
# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a | |
# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 | |
-----BEGIN CERTIFICATE----- | |
MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL | |
MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW | |
ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln | |
biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp | |
U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y | |
aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG | |
A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp | |
U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg | |
SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln | |
biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 | |
IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm | |
GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve | |
fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw | |
AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ | |
aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj | |
aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW | |
kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC | |
4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga | |
FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== | |
-----END CERTIFICATE----- | |
# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) | |
# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) | |
# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" | |
# Serial: 80544274841616 | |
# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 | |
# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 | |
# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 | |
-----BEGIN CERTIFICATE----- | |
MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG | |
EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 | |
MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl | |
cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR | |
dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB | |
pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM | |
b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm | |
aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz | |
IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A | |
MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT | |
lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz | |
AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 | |
VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG | |
ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 | |
BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG | |
AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M | |
U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh | |
bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C | |
+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC | |
bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F | |
uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 | |
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden | |
# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden | |
# Label: "Staat der Nederlanden Root CA - G2" | |
# Serial: 10000012 | |
# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a | |
# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 | |
# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f | |
-----BEGIN CERTIFICATE----- | |
MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO | |
TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh | |
dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX | |
DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl | |
ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv | |
b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 | |
qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp | |
uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU | |
Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE | |
pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp | |
5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M | |
UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN | |
GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy | |
5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv | |
6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK | |
eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 | |
B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ | |
BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov | |
L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV | |
HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG | |
SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS | |
CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen | |
5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 | |
IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK | |
gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL | |
+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL | |
vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm | |
bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk | |
N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC | |
Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z | |
ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post | |
# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post | |
# Label: "Hongkong Post Root CA 1" | |
# Serial: 1000 | |
# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca | |
# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 | |
# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 | |
-----BEGIN CERTIFICATE----- | |
MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx | |
FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg | |
Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG | |
A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr | |
b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | |
AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ | |
jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn | |
PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh | |
ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 | |
nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h | |
q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED | |
MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC | |
mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 | |
7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB | |
oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs | |
EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO | |
fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi | |
AmvZWg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. | |
# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. | |
# Label: "SecureSign RootCA11" | |
# Serial: 1 | |
# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 | |
# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 | |
# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 | |
-----BEGIN CERTIFICATE----- | |
MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr | |
MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG | |
A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 | |
MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp | |
Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD | |
QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz | |
i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 | |
h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV | |
MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 | |
UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni | |
8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC | |
h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD | |
VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB | |
AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm | |
KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ | |
X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr | |
QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 | |
pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN | |
QSdJQO7e5iNEOdyhIta6A/I= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. | |
# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. | |
# Label: "Microsec e-Szigno Root CA 2009" | |
# Serial: 14014712776195784473 | |
# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 | |
# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e | |
# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 | |
-----BEGIN CERTIFICATE----- | |
MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD | |
VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 | |
ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G | |
CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y | |
OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx | |
FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp | |
Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o | |
dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP | |
kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc | |
cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U | |
fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 | |
N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC | |
xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 | |
+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G | |
A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM | |
Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG | |
SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h | |
mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk | |
ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 | |
tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c | |
2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t | |
HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW | |
-----END CERTIFICATE----- | |
# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 | |
# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 | |
# Label: "GlobalSign Root CA - R3" | |
# Serial: 4835703278459759426209954 | |
# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 | |
# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad | |
# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b | |
-----BEGIN CERTIFICATE----- | |
MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G | |
A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp | |
Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 | |
MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG | |
A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI | |
hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 | |
RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT | |
gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm | |
KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd | |
QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ | |
XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw | |
DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o | |
LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU | |
RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp | |
jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK | |
6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX | |
mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs | |
Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH | |
WD9f | |
-----END CERTIFICATE----- | |
# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 | |
# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 | |
# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" | |
# Serial: 6047274297262753887 | |
# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 | |
# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa | |
# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef | |
-----BEGIN CERTIFICATE----- | |
MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE | |
BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h | |
cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy | |
MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg | |
Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi | |
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 | |
thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM | |
cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG | |
L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i | |
NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h | |
X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b | |
m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy | |
Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja | |
EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T | |
KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF | |
6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh | |
OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD | |
VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD | |
VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp | |
cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv | |
ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl | |
AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF | |
661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 | |
am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 | |
ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 | |
PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS | |
3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k | |
SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF | |
3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM | |
ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g | |
StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz | |
Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB | |
jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V | |
-----END CERTIFICATE----- | |
# Issuer: CN=Izenpe.com O=IZENPE S.A. | |
# Subject: CN=Izenpe.com O=IZENPE S.A. | |
# Label: "Izenpe.com" | |
# Serial: 917563065490389241595536686991402621 | |
# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 | |
# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 | |
# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f | |
-----BEGIN CERTIFICATE----- | |
MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 | |
MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 | |
ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD | |
VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j | |
b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq | |
scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO | |
xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H | |
LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX | |
uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD | |
yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ | |
JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q | |
rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN | |
BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L | |
hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB | |
QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ | |
HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu | |
Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg | |
QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB | |
BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx | |
MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC | |
AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA | |
A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb | |
laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 | |
awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo | |
JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw | |
LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT | |
VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk | |
LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb | |
UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ | |
QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ | |
naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls | |
QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. | |
# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. | |
# Label: "Chambers of Commerce Root - 2008" | |
# Serial: 11806822484801597146 | |
# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 | |
# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c | |
# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 | |
-----BEGIN CERTIFICATE----- | |
MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD | |
VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 | |
IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 | |
MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz | |
IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz | |
MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj | |
dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw | |
EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp | |
MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G | |
CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 | |
28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq | |
VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q | |
DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR | |
5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL | |
ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a | |
Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl | |
UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s | |
+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 | |
Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj | |
ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx | |
hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV | |
HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 | |
+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN | |
YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t | |
L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy | |
ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt | |
IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV | |
HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w | |
DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW | |
PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF | |
5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 | |
glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH | |
FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 | |
pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD | |
xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG | |
tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq | |
jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De | |
fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg | |
OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ | |
d0jQ | |
-----END CERTIFICATE----- | |
# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. | |
# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. | |
# Label: "Global Chambersign Root - 2008" | |
# Serial: 14541511773111788494 | |
# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 | |
# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c | |
# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca | |
-----BEGIN CERTIFICATE----- | |
MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD | |
VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 | |
IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 | |
MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD | |
aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx | |
MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy | |
cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG | |
A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl | |
BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI | |
hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed | |
KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 | |
G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 | |
zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 | |
ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG | |
HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 | |
Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V | |
yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e | |
beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r | |
6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh | |
wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog | |
zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW | |
BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr | |
ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp | |
ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk | |
cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt | |
YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC | |
CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow | |
KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI | |
hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ | |
UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz | |
X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x | |
fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz | |
a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd | |
Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd | |
SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O | |
AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso | |
M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge | |
v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z | |
09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B | |
-----END CERTIFICATE----- | |
# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. | |
# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. | |
# Label: "Go Daddy Root Certificate Authority - G2" | |
# Serial: 0 | |
# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 | |
# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b | |
# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da | |
-----BEGIN CERTIFICATE----- | |
MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx | |
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT | |
EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp | |
ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz | |
NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH | |
EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE | |
AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw | |
DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD | |
E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH | |
/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy | |
DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh | |
GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR | |
tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA | |
AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE | |
FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX | |
WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu | |
9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr | |
gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo | |
2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO | |
LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI | |
4uJEvlz36hz1 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. | |
# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. | |
# Label: "Starfield Root Certificate Authority - G2" | |
# Serial: 0 | |
# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 | |
# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e | |
# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 | |
-----BEGIN CERTIFICATE----- | |
MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx | |
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT | |
HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs | |
ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw | |
MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 | |
b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj | |
aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp | |
Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC | |
ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg | |
nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 | |
HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N | |
Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN | |
dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 | |
HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO | |
BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G | |
CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU | |
sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 | |
4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg | |
8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K | |
pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 | |
mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. | |
# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. | |
# Label: "Starfield Services Root Certificate Authority - G2" | |
# Serial: 0 | |
# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 | |
# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f | |
# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 | |
-----BEGIN CERTIFICATE----- | |
MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx | |
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT | |
HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs | |
ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 | |
MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD | |
VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy | |
ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy | |
dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI | |
hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p | |
OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 | |
8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K | |
Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe | |
hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk | |
6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw | |
DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q | |
AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI | |
bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB | |
ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z | |
qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd | |
iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn | |
0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN | |
sSi6 | |
-----END CERTIFICATE----- | |
# Issuer: CN=AffirmTrust Commercial O=AffirmTrust | |
# Subject: CN=AffirmTrust Commercial O=AffirmTrust | |
# Label: "AffirmTrust Commercial" | |
# Serial: 8608355977964138876 | |
# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 | |
# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 | |
# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 | |
-----BEGIN CERTIFICATE----- | |
MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE | |
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz | |
dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL | |
MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp | |
cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | |
AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP | |
Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr | |
ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL | |
MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 | |
yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr | |
VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ | |
nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ | |
KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG | |
XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj | |
vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt | |
Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g | |
N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC | |
nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= | |
-----END CERTIFICATE----- | |
# Issuer: CN=AffirmTrust Networking O=AffirmTrust | |
# Subject: CN=AffirmTrust Networking O=AffirmTrust | |
# Label: "AffirmTrust Networking" | |
# Serial: 8957382827206547757 | |
# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f | |
# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f | |
# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b | |
-----BEGIN CERTIFICATE----- | |
MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE | |
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz | |
dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL | |
MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp | |
cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC | |
AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y | |
YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua | |
kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL | |
QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp | |
6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG | |
yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i | |
QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ | |
KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO | |
tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu | |
QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ | |
Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u | |
olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 | |
x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= | |
-----END CERTIFICATE----- | |
# Issuer: CN=AffirmTrust Premium O=AffirmTrust | |
# Subject: CN=AffirmTrust Premium O=AffirmTrust | |
# Label: "AffirmTrust Premium" | |
# Serial: 7893706540734352110 | |
# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 | |
# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 | |
# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a | |
-----BEGIN CERTIFICATE----- | |
MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE | |
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz | |
dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG | |
A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U | |
cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf | |
qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ | |
JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ | |
+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS | |
s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 | |
HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 | |
70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG | |
V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S | |
qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S | |
5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia | |
C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX | |
OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE | |
FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ | |
BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 | |
KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg | |
Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B | |
8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ | |
MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc | |
0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ | |
u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF | |
u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH | |
YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 | |
GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO | |
RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e | |
KeC2uAloGRwYQw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust | |
# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust | |
# Label: "AffirmTrust Premium ECC" | |
# Serial: 8401224907861490260 | |
# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d | |
# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb | |
# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 | |
-----BEGIN CERTIFICATE----- | |
MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC | |
VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ | |
cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ | |
BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt | |
VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D | |
0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 | |
ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G | |
A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G | |
A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs | |
aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I | |
flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority | |
# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority | |
# Label: "Certum Trusted Network CA" | |
# Serial: 279744 | |
# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 | |
# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e | |
# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e | |
-----BEGIN CERTIFICATE----- | |
MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM | |
MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D | |
ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU | |
cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 | |
WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg | |
Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw | |
IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B | |
AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH | |
UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM | |
TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU | |
BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM | |
kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x | |
AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV | |
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV | |
HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y | |
sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL | |
I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 | |
J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY | |
VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI | |
03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= | |
-----END CERTIFICATE----- | |
# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA | |
# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA | |
# Label: "TWCA Root Certification Authority" | |
# Serial: 1 | |
# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 | |
# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 | |
# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 | |
-----BEGIN CERTIFICATE----- | |
MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES | |
MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU | |
V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz | |
WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO | |
LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm | |
aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB | |
AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE | |
AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH | |
K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX | |
RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z | |
rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx | |
3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV | |
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq | |
hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC | |
MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls | |
XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D | |
lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn | |
aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ | |
YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== | |
-----END CERTIFICATE----- | |
# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 | |
# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 | |
# Label: "Security Communication RootCA2" | |
# Serial: 0 | |
# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 | |
# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 | |
# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 | |
-----BEGIN CERTIFICATE----- | |
MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl | |
MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe | |
U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX | |
DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy | |
dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj | |
YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV | |
OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr | |
zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM | |
VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ | |
hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO | |
ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw | |
awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs | |
OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 | |
DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF | |
coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc | |
okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 | |
t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy | |
1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ | |
SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Label: "Hellenic Academic and Research Institutions RootCA 2011" | |
# Serial: 0 | |
# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 | |
# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d | |
# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 | |
-----BEGIN CERTIFICATE----- | |
MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix | |
RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 | |
dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p | |
YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw | |
NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK | |
EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl | |
cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl | |
c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB | |
BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz | |
dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ | |
fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns | |
bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD | |
75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP | |
FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV | |
HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp | |
5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu | |
b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA | |
A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p | |
6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 | |
TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 | |
dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys | |
Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI | |
l7WdmplNsDz4SgCbZN2fOUvRJ9e4 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 | |
# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 | |
# Label: "Actalis Authentication Root CA" | |
# Serial: 6271844772424770508 | |
# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 | |
# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac | |
# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 | |
-----BEGIN CERTIFICATE----- | |
MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE | |
BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w | |
MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 | |
IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC | |
SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 | |
ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB | |
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv | |
UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX | |
4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 | |
KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ | |
gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb | |
rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ | |
51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F | |
be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe | |
KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F | |
v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn | |
fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 | |
jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz | |
ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt | |
ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL | |
e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 | |
jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz | |
WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V | |
SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j | |
pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX | |
X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok | |
fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R | |
K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU | |
ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU | |
LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT | |
LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== | |
-----END CERTIFICATE----- | |
# Issuer: O=Trustis Limited OU=Trustis FPS Root CA | |
# Subject: O=Trustis Limited OU=Trustis FPS Root CA | |
# Label: "Trustis FPS Root CA" | |
# Serial: 36053640375399034304724988975563710553 | |
# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d | |
# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 | |
# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d | |
-----BEGIN CERTIFICATE----- | |
MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF | |
MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL | |
ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx | |
MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc | |
MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD | |
ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ | |
AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH | |
iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj | |
vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA | |
0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB | |
OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ | |
BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E | |
FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 | |
GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW | |
zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 | |
1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE | |
f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F | |
jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN | |
ZetX2fNXlrtIzYE= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 | |
# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 | |
# Label: "Buypass Class 2 Root CA" | |
# Serial: 2 | |
# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 | |
# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 | |
# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 | |
-----BEGIN CERTIFICATE----- | |
MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd | |
MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg | |
Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow | |
TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw | |
HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB | |
BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr | |
6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV | |
L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 | |
1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx | |
MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ | |
QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB | |
arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr | |
Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi | |
FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS | |
P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN | |
9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP | |
AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz | |
uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h | |
9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s | |
A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t | |
OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo | |
+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 | |
KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 | |
DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us | |
H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ | |
I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 | |
5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h | |
3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz | |
Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 | |
# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 | |
# Label: "Buypass Class 3 Root CA" | |
# Serial: 2 | |
# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec | |
# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 | |
# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d | |
-----BEGIN CERTIFICATE----- | |
MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd | |
MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg | |
Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow | |
TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw | |
HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB | |
BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y | |
ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E | |
N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 | |
tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX | |
0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c | |
/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X | |
KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY | |
zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS | |
O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D | |
34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP | |
K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 | |
AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv | |
Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj | |
QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV | |
cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS | |
IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 | |
HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa | |
O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv | |
033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u | |
dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE | |
kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 | |
3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD | |
u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq | |
4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= | |
-----END CERTIFICATE----- | |
# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | |
# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | |
# Label: "T-TeleSec GlobalRoot Class 3" | |
# Serial: 1 | |
# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef | |
# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 | |
# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd | |
-----BEGIN CERTIFICATE----- | |
MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx | |
KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd | |
BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl | |
YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 | |
OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy | |
aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 | |
ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G | |
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN | |
8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ | |
RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 | |
hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 | |
ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM | |
EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj | |
QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 | |
A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy | |
WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ | |
1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 | |
6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT | |
91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml | |
e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p | |
TpPDpFQUWw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus | |
# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus | |
# Label: "EE Certification Centre Root CA" | |
# Serial: 112324828676200291871926431888494945866 | |
# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f | |
# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 | |
# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 | |
-----BEGIN CERTIFICATE----- | |
MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 | |
MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 | |
czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG | |
CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy | |
MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl | |
ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS | |
b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB | |
AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy | |
euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO | |
bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw | |
WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d | |
MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE | |
1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD | |
VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ | |
zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB | |
BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF | |
BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV | |
v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG | |
E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u | |
uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW | |
iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v | |
GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= | |
-----END CERTIFICATE----- | |
# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH | |
# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH | |
# Label: "D-TRUST Root Class 3 CA 2 2009" | |
# Serial: 623603 | |
# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f | |
# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 | |
# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 | |
-----BEGIN CERTIFICATE----- | |
MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF | |
MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD | |
bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha | |
ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM | |
HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB | |
BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 | |
UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 | |
tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R | |
ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM | |
lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp | |
/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G | |
A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G | |
A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj | |
dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy | |
MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl | |
cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js | |
L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL | |
BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni | |
acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 | |
o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K | |
zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 | |
PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y | |
Johw1+qRzT65ysCQblrGXnRl11z+o+I= | |
-----END CERTIFICATE----- | |
# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH | |
# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH | |
# Label: "D-TRUST Root Class 3 CA 2 EV 2009" | |
# Serial: 623604 | |
# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 | |
# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 | |
# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 | |
-----BEGIN CERTIFICATE----- | |
MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF | |
MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD | |
bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw | |
NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV | |
BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI | |
hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn | |
ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 | |
3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z | |
qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR | |
p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 | |
HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw | |
ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea | |
HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw | |
Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh | |
c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E | |
RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt | |
dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku | |
Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp | |
3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 | |
nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF | |
CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na | |
xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX | |
KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 | |
-----END CERTIFICATE----- | |
# Issuer: CN=CA Disig Root R2 O=Disig a.s. | |
# Subject: CN=CA Disig Root R2 O=Disig a.s. | |
# Label: "CA Disig Root R2" | |
# Serial: 10572350602393338211 | |
# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 | |
# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 | |
# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 | |
-----BEGIN CERTIFICATE----- | |
MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV | |
BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu | |
MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy | |
MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx | |
EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw | |
ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe | |
NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH | |
PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I | |
x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe | |
QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR | |
yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO | |
QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 | |
H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ | |
QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD | |
i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs | |
nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 | |
rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud | |
DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI | |
hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM | |
tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf | |
GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb | |
lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka | |
+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal | |
TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i | |
nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 | |
gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr | |
G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os | |
zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x | |
L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL | |
-----END CERTIFICATE----- | |
# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV | |
# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV | |
# Label: "ACCVRAIZ1" | |
# Serial: 6828503384748696800 | |
# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 | |
# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 | |
# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 | |
-----BEGIN CERTIFICATE----- | |
MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE | |
AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw | |
CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ | |
BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND | |
VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb | |
qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY | |
HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo | |
G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA | |
lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr | |
IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ | |
0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH | |
k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 | |
4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO | |
m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa | |
cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl | |
uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI | |
KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls | |
ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG | |
AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 | |
VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT | |
VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG | |
CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA | |
cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA | |
QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA | |
7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA | |
cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA | |
QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA | |
czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu | |
aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt | |
aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud | |
DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF | |
BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp | |
D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU | |
JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m | |
AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD | |
vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms | |
tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH | |
7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h | |
I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA | |
h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF | |
d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H | |
pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 | |
-----END CERTIFICATE----- | |
# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA | |
# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA | |
# Label: "TWCA Global Root CA" | |
# Serial: 3262 | |
# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 | |
# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 | |
# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b | |
-----BEGIN CERTIFICATE----- | |
MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx | |
EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT | |
VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 | |
NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT | |
B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF | |
10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz | |
0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh | |
MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH | |
zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc | |
46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 | |
yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi | |
laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP | |
oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA | |
BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE | |
qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm | |
4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB | |
/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL | |
1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn | |
LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF | |
H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo | |
RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ | |
nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh | |
15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW | |
6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW | |
nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j | |
wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz | |
aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy | |
KwbQBM0= | |
-----END CERTIFICATE----- | |
# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera | |
# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera | |
# Label: "TeliaSonera Root CA v1" | |
# Serial: 199041966741090107964904287217786801558 | |
# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c | |
# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 | |
# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 | |
-----BEGIN CERTIFICATE----- | |
MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw | |
NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv | |
b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD | |
VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 | |
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F | |
VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 | |
7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X | |
Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ | |
/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs | |
81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm | |
dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe | |
Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu | |
sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 | |
pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs | |
slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ | |
arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD | |
VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG | |
9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl | |
dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx | |
0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj | |
TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed | |
Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 | |
Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI | |
OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 | |
vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW | |
t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn | |
HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx | |
SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= | |
-----END CERTIFICATE----- | |
# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi | |
# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi | |
# Label: "E-Tugra Certification Authority" | |
# Serial: 7667447206703254355 | |
# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 | |
# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 | |
# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c | |
-----BEGIN CERTIFICATE----- | |
MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV | |
BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC | |
aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV | |
BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 | |
Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz | |
MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ | |
BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp | |
em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN | |
ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 | |
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY | |
B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH | |
D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF | |
Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo | |
q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D | |
k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH | |
fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut | |
dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM | |
ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 | |
zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn | |
rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX | |
U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 | |
Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 | |
XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF | |
Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR | |
HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY | |
GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c | |
77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 | |
+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK | |
vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 | |
FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl | |
yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P | |
AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD | |
y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d | |
NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== | |
-----END CERTIFICATE----- | |
# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | |
# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center | |
# Label: "T-TeleSec GlobalRoot Class 2" | |
# Serial: 1 | |
# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a | |
# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 | |
# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 | |
-----BEGIN CERTIFICATE----- | |
MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx | |
KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd | |
BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl | |
YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 | |
OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy | |
aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 | |
ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G | |
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd | |
AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC | |
FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi | |
1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq | |
jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ | |
wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj | |
QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ | |
WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy | |
NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC | |
uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw | |
IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 | |
g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN | |
9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP | |
BSeOE6Fuwg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Atos TrustedRoot 2011 O=Atos | |
# Subject: CN=Atos TrustedRoot 2011 O=Atos | |
# Label: "Atos TrustedRoot 2011" | |
# Serial: 6643877497813316402 | |
# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 | |
# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 | |
# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 | |
-----BEGIN CERTIFICATE----- | |
MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE | |
AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG | |
EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM | |
FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC | |
REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp | |
Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM | |
VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ | |
SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ | |
4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L | |
cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi | |
eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV | |
HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG | |
A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 | |
DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j | |
vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP | |
DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc | |
maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D | |
lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv | |
KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited | |
# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited | |
# Label: "QuoVadis Root CA 1 G3" | |
# Serial: 687049649626669250736271037606554624078720034195 | |
# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab | |
# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 | |
# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 | |
-----BEGIN CERTIFICATE----- | |
MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL | |
BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | |
BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 | |
MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | |
aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV | |
wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe | |
rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 | |
68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh | |
4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp | |
UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o | |
abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc | |
3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G | |
KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt | |
hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO | |
Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt | |
zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | |
BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD | |
ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC | |
MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 | |
cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN | |
qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 | |
YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv | |
b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 | |
8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k | |
NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj | |
ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp | |
q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt | |
nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited | |
# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited | |
# Label: "QuoVadis Root CA 2 G3" | |
# Serial: 390156079458959257446133169266079962026824725800 | |
# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 | |
# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 | |
# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 | |
-----BEGIN CERTIFICATE----- | |
MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL | |
BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | |
BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 | |
MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | |
aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf | |
qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW | |
n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym | |
c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ | |
O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 | |
o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j | |
IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq | |
IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz | |
8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh | |
vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l | |
7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG | |
cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | |
BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD | |
ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 | |
AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC | |
roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga | |
W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n | |
lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE | |
+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV | |
csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd | |
dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg | |
KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM | |
HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 | |
WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M | |
-----END CERTIFICATE----- | |
# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited | |
# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited | |
# Label: "QuoVadis Root CA 3 G3" | |
# Serial: 268090761170461462463995952157327242137089239581 | |
# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 | |
# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d | |
# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 | |
-----BEGIN CERTIFICATE----- | |
MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL | |
BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc | |
BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 | |
MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM | |
aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR | |
/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu | |
FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR | |
U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c | |
ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR | |
FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k | |
A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw | |
eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl | |
sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp | |
VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q | |
A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ | |
ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | |
BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD | |
ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px | |
KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI | |
FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv | |
oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg | |
u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP | |
0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf | |
3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl | |
8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ | |
DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN | |
PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ | |
ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Assured ID Root G2" | |
# Serial: 15385348160840213938643033620894905419 | |
# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d | |
# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f | |
# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 | |
-----BEGIN CERTIFICATE----- | |
MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv | |
b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG | |
EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl | |
cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi | |
MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA | |
n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc | |
biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp | |
EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA | |
bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu | |
YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB | |
AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW | |
BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI | |
QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I | |
0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni | |
lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 | |
B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv | |
ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo | |
IhNzbM8m9Yop5w== | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Assured ID Root G3" | |
# Serial: 15459312981008553731928384953135426796 | |
# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb | |
# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 | |
# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 | |
-----BEGIN CERTIFICATE----- | |
MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw | |
CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu | |
ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg | |
RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV | |
UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu | |
Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq | |
hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf | |
Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q | |
RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ | |
BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD | |
AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY | |
JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv | |
6pZjamVFkpUBtA== | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Global Root G2" | |
# Serial: 4293743540046975378534879503202253541 | |
# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 | |
# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 | |
# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f | |
-----BEGIN CERTIFICATE----- | |
MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH | |
MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT | |
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j | |
b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG | |
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI | |
2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx | |
1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ | |
q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz | |
tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ | |
vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP | |
BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV | |
5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY | |
1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 | |
NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG | |
Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 | |
8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe | |
pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl | |
MrY= | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Global Root G3" | |
# Serial: 7089244469030293291760083333884364146 | |
# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca | |
# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e | |
# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 | |
-----BEGIN CERTIFICATE----- | |
MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw | |
CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu | |
ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe | |
Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw | |
EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x | |
IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF | |
K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG | |
fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO | |
Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd | |
BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx | |
AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ | |
oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 | |
sycX | |
-----END CERTIFICATE----- | |
# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com | |
# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com | |
# Label: "DigiCert Trusted Root G4" | |
# Serial: 7451500558977370777930084869016614236 | |
# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 | |
# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 | |
# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 | |
-----BEGIN CERTIFICATE----- | |
MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi | |
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 | |
d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg | |
RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV | |
UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu | |
Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y | |
ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If | |
xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV | |
ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO | |
DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ | |
jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ | |
CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi | |
EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM | |
fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY | |
uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK | |
chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t | |
9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB | |
hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD | |
ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 | |
SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd | |
+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc | |
fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa | |
sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N | |
cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N | |
0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie | |
4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI | |
r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 | |
/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm | |
gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ | |
-----END CERTIFICATE----- | |
# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited | |
# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited | |
# Label: "COMODO RSA Certification Authority" | |
# Serial: 101909084537582093308941363524873193117 | |
# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 | |
# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 | |
# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 | |
-----BEGIN CERTIFICATE----- | |
MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB | |
hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G | |
A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV | |
BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 | |
MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT | |
EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR | |
Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh | |
dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR | |
6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X | |
pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC | |
9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV | |
/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf | |
Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z | |
+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w | |
qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah | |
SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC | |
u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf | |
Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq | |
crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E | |
FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB | |
/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl | |
wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM | |
4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV | |
2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna | |
FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ | |
CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK | |
boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke | |
jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL | |
S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb | |
QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl | |
0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB | |
NVOFBkpdn627G190 | |
-----END CERTIFICATE----- | |
# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network | |
# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network | |
# Label: "USERTrust RSA Certification Authority" | |
# Serial: 2645093764781058787591871645665788717 | |
# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 | |
# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e | |
# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 | |
-----BEGIN CERTIFICATE----- | |
MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB | |
iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl | |
cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV | |
BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw | |
MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV | |
BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU | |
aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy | |
dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | |
AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B | |
3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY | |
tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ | |
Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 | |
VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT | |
79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 | |
c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT | |
Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l | |
c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee | |
UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE | |
Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd | |
BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G | |
A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF | |
Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO | |
VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 | |
ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs | |
8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR | |
iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze | |
Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ | |
XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ | |
qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB | |
VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB | |
L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG | |
jjxDah2nGN59PRbxYvnKkKj9 | |
-----END CERTIFICATE----- | |
# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network | |
# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network | |
# Label: "USERTrust ECC Certification Authority" | |
# Serial: 123013823720199481456569720443997572134 | |
# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 | |
# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 | |
# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a | |
-----BEGIN CERTIFICATE----- | |
MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL | |
MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl | |
eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT | |
JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx | |
MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT | |
Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg | |
VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm | |
aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo | |
I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng | |
o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G | |
A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD | |
VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB | |
zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW | |
RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= | |
-----END CERTIFICATE----- | |
# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 | |
# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 | |
# Label: "GlobalSign ECC Root CA - R4" | |
# Serial: 14367148294922964480859022125800977897474 | |
# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e | |
# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb | |
# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c | |
-----BEGIN CERTIFICATE----- | |
MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk | |
MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH | |
bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX | |
DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD | |
QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu | |
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ | |
FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw | |
DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F | |
uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX | |
kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs | |
ewv4n4Q= | |
-----END CERTIFICATE----- | |
# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 | |
# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 | |
# Label: "GlobalSign ECC Root CA - R5" | |
# Serial: 32785792099990507226680698011560947931244 | |
# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 | |
# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa | |
# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 | |
-----BEGIN CERTIFICATE----- | |
MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk | |
MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH | |
bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX | |
DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD | |
QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu | |
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc | |
8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke | |
hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD | |
VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI | |
KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg | |
515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO | |
xwy8p2Fp8fc74SrL+SvzZpA3 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden | |
# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden | |
# Label: "Staat der Nederlanden Root CA - G3" | |
# Serial: 10003001 | |
# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 | |
# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc | |
# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 | |
-----BEGIN CERTIFICATE----- | |
MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO | |
TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh | |
dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX | |
DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl | |
ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv | |
b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP | |
cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW | |
IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX | |
xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy | |
KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR | |
9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az | |
5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 | |
6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 | |
Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP | |
bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt | |
BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt | |
XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF | |
MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd | |
INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD | |
U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp | |
LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 | |
Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp | |
gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh | |
/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw | |
0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A | |
fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq | |
4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR | |
1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ | |
QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM | |
94B7IWcnMFk= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden | |
# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden | |
# Label: "Staat der Nederlanden EV Root CA" | |
# Serial: 10000013 | |
# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba | |
# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb | |
# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a | |
-----BEGIN CERTIFICATE----- | |
MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO | |
TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh | |
dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y | |
MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg | |
TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS | |
b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS | |
M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC | |
UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d | |
Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p | |
rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l | |
pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb | |
j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC | |
KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS | |
/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X | |
cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH | |
1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP | |
px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB | |
/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 | |
MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI | |
eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u | |
2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS | |
v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC | |
wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy | |
CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e | |
vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 | |
Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa | |
Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL | |
eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 | |
FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc | |
7uzXLg== | |
-----END CERTIFICATE----- | |
# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust | |
# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust | |
# Label: "IdenTrust Commercial Root CA 1" | |
# Serial: 13298821034946342390520003877796839426 | |
# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 | |
# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 | |
# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae | |
-----BEGIN CERTIFICATE----- | |
MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK | |
MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu | |
VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw | |
MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw | |
JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG | |
SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT | |
3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU | |
+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp | |
S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 | |
bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi | |
T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL | |
vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK | |
Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK | |
dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT | |
c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv | |
l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N | |
iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB | |
/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD | |
ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH | |
6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt | |
LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 | |
nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 | |
+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK | |
W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT | |
AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq | |
l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG | |
4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ | |
mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A | |
7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H | |
-----END CERTIFICATE----- | |
# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust | |
# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust | |
# Label: "IdenTrust Public Sector Root CA 1" | |
# Serial: 13298821034946342390521976156843933698 | |
# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba | |
# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd | |
# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f | |
-----BEGIN CERTIFICATE----- | |
MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN | |
MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu | |
VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN | |
MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 | |
MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi | |
MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 | |
ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy | |
RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS | |
bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF | |
/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R | |
3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw | |
EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy | |
9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V | |
GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ | |
2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV | |
WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD | |
W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ | |
BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN | |
AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj | |
t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV | |
DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 | |
TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G | |
lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW | |
mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df | |
WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 | |
+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ | |
tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA | |
GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv | |
8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c | |
-----END CERTIFICATE----- | |
# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only | |
# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only | |
# Label: "Entrust Root Certification Authority - G2" | |
# Serial: 1246989352 | |
# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 | |
# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 | |
# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 | |
-----BEGIN CERTIFICATE----- | |
MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC | |
VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 | |
cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs | |
IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz | |
dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy | |
NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu | |
dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt | |
dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 | |
aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj | |
YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK | |
AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T | |
RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN | |
cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW | |
wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 | |
U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 | |
jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP | |
BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN | |
BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ | |
jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ | |
Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v | |
1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R | |
nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH | |
VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only | |
# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only | |
# Label: "Entrust Root Certification Authority - EC1" | |
# Serial: 51543124481930649114116133369 | |
# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc | |
# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 | |
# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 | |
-----BEGIN CERTIFICATE----- | |
MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG | |
A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 | |
d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu | |
dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq | |
RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy | |
MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD | |
VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 | |
L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g | |
Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD | |
ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi | |
A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt | |
ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH | |
Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O | |
BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC | |
R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX | |
hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G | |
-----END CERTIFICATE----- | |
# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority | |
# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority | |
# Label: "CFCA EV ROOT" | |
# Serial: 407555286 | |
# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 | |
# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 | |
# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd | |
-----BEGIN CERTIFICATE----- | |
MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD | |
TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y | |
aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx | |
MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j | |
aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP | |
T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 | |
sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL | |
TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 | |
/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp | |
7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz | |
EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt | |
hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP | |
a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot | |
aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg | |
TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV | |
PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv | |
cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL | |
tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd | |
BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB | |
ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT | |
ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL | |
jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS | |
ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy | |
P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 | |
xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d | |
Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN | |
5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe | |
/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z | |
AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ | |
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su | |
-----END CERTIFICATE----- | |
# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. | |
# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e. | |
# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5" | |
# Serial: 156233699172481 | |
# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e | |
# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb | |
# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78 | |
-----BEGIN CERTIFICATE----- | |
MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE | |
BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn | |
aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg | |
QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg | |
SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0 | |
MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD | |
VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8 | |
dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF | |
bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB | |
IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom | |
/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR | |
Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3 | |
4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z | |
5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0 | |
hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID | |
AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/ | |
BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX | |
SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l | |
VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq | |
URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf | |
peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF | |
Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW | |
+qtB4Uu2NQvAmxU= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 | |
# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903 | |
# Label: "Certinomis - Root CA" | |
# Serial: 1 | |
# MD5 Fingerprint: 14:0a:fd:8d:a8:28:b5:38:69:db:56:7e:61:22:03:3f | |
# SHA1 Fingerprint: 9d:70:bb:01:a5:a4:a0:18:11:2e:f7:1c:01:b9:32:c5:34:e7:88:a8 | |
# SHA256 Fingerprint: 2a:99:f5:bc:11:74:b7:3c:bb:1d:62:08:84:e0:1c:34:e5:1c:cb:39:78:da:12:5f:0e:33:26:88:83:bf:41:58 | |
-----BEGIN CERTIFICATE----- | |
MIIFkjCCA3qgAwIBAgIBATANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJGUjET | |
MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxHTAb | |
BgNVBAMTFENlcnRpbm9taXMgLSBSb290IENBMB4XDTEzMTAyMTA5MTcxOFoXDTMz | |
MTAyMTA5MTcxOFowWjELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNlcnRpbm9taXMx | |
FzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMR0wGwYDVQQDExRDZXJ0aW5vbWlzIC0g | |
Um9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANTMCQosP5L2 | |
fxSeC5yaah1AMGT9qt8OHgZbn1CF6s2Nq0Nn3rD6foCWnoR4kkjW4znuzuRZWJfl | |
LieY6pOod5tK8O90gC3rMB+12ceAnGInkYjwSond3IjmFPnVAy//ldu9n+ws+hQV | |
WZUKxkd8aRi5pwP5ynapz8dvtF4F/u7BUrJ1Mofs7SlmO/NKFoL21prbcpjp3vDF | |
TKWrteoB4owuZH9kb/2jJZOLyKIOSY008B/sWEUuNKqEUL3nskoTuLAPrjhdsKkb | |
5nPJWqHZZkCqqU2mNAKthH6yI8H7KsZn9DS2sJVqM09xRLWtwHkziOC/7aOgFLSc | |
CbAK42C++PhmiM1b8XcF4LVzbsF9Ri6OSyemzTUK/eVNfaoqoynHWmgE6OXWk6Ri | |
wsXm9E/G+Z8ajYJJGYrKWUM66A0ywfRMEwNvbqY/kXPLynNvEiCL7sCCeN5LLsJJ | |
wx3tFvYk9CcbXFcx3FXuqB5vbKziRcxXV4p1VxngtViZSTYxPDMBbRZKzbgqg4SG | |
m/lg0h9tkQPTYKbVPZrdd5A9NaSfD171UkRpucC63M9933zZxKyGIjK8e2uR73r4 | |
F2iw4lNVYC2vPsKD2NkJK/DAZNuHi5HMkesE/Xa0lZrmFAYb1TQdvtj/dBxThZng | |
WVJKYe2InmtJiUZ+IFrZ50rlau7SZRFDAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIB | |
BjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTvkUz1pcMw6C8I6tNxIqSSaHh0 | |
2TAfBgNVHSMEGDAWgBTvkUz1pcMw6C8I6tNxIqSSaHh02TANBgkqhkiG9w0BAQsF | |
AAOCAgEAfj1U2iJdGlg+O1QnurrMyOMaauo++RLrVl89UM7g6kgmJs95Vn6RHJk/ | |
0KGRHCwPT5iVWVO90CLYiF2cN/z7ZMF4jIuaYAnq1fohX9B0ZedQxb8uuQsLrbWw | |
F6YSjNRieOpWauwK0kDDPAUwPk2Ut59KA9N9J0u2/kTO+hkzGm2kQtHdzMjI1xZS | |
g081lLMSVX3l4kLr5JyTCcBMWwerx20RoFAXlCOotQqSD7J6wWAsOMwaplv/8gzj | |
qh8c3LigkyfeY+N/IZ865Z764BNqdeuWXGKRlI5nU7aJ+BIJy29SWwNyhlCVCNSN | |
h4YVH5Uk2KRvms6knZtt0rJ2BobGVgjF6wnaNsIbW0G+YSrjcOa4pvi2WsS9Iff/ | |
ql+hbHY5ZtbqTFXhADObE5hjyW/QASAJN1LnDE8+zbz1X5YnpyACleAu6AdBBR8V | |
btaw5BngDwKTACdyxYvRVB9dSsNAl35VpnzBMwQUAR1JIGkLGZOdblgi90AMRgwj | |
Y/M50n92Uaf0yKHxDHYiI0ZSKS3io0EHVmmY0gUJvGnHWmHNj4FgFU2A3ZDifcRQ | |
8ow7bkrHxuaAKzyBvBGAFhAn1/DNP3nMcyrDflOR1m749fPH0FFNjkulW+YZFzvW | |
gQncItzujrnEj1PhZ7szuIgVRs/taTX/dQ1G885x4cVrhkIGuUE= | |
-----END CERTIFICATE----- | |
# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed | |
# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed | |
# Label: "OISTE WISeKey Global Root GB CA" | |
# Serial: 157768595616588414422159278966750757568 | |
# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d | |
# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed | |
# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 | |
-----BEGIN CERTIFICATE----- | |
MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt | |
MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg | |
Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i | |
YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x | |
CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG | |
b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh | |
bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 | |
HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx | |
WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX | |
1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk | |
u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P | |
99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r | |
M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw | |
AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB | |
BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh | |
cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 | |
gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO | |
ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf | |
aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic | |
Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= | |
-----END CERTIFICATE----- | |
# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. | |
# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. | |
# Label: "SZAFIR ROOT CA2" | |
# Serial: 357043034767186914217277344587386743377558296292 | |
# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 | |
# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de | |
# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe | |
-----BEGIN CERTIFICATE----- | |
MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL | |
BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 | |
ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw | |
NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L | |
cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg | |
Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN | |
QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT | |
3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw | |
3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 | |
3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 | |
BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN | |
XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD | |
AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF | |
AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw | |
8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG | |
nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP | |
oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy | |
d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg | |
LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority | |
# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority | |
# Label: "Certum Trusted Network CA 2" | |
# Serial: 44979900017204383099463764357512596969 | |
# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 | |
# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 | |
# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 | |
-----BEGIN CERTIFICATE----- | |
MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB | |
gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu | |
QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG | |
A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz | |
OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ | |
VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp | |
ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 | |
b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA | |
DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn | |
0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB | |
OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE | |
fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E | |
Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m | |
o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i | |
sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW | |
OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez | |
Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS | |
adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n | |
3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD | |
AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC | |
AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ | |
F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf | |
CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 | |
XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm | |
djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ | |
WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb | |
AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq | |
P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko | |
b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj | |
XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P | |
5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi | |
DrW5viSP | |
-----END CERTIFICATE----- | |
# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Label: "Hellenic Academic and Research Institutions RootCA 2015" | |
# Serial: 0 | |
# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce | |
# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 | |
# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 | |
-----BEGIN CERTIFICATE----- | |
MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix | |
DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k | |
IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT | |
N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v | |
dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG | |
A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh | |
ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx | |
QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 | |
dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC | |
AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA | |
4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 | |
AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 | |
4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C | |
ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV | |
9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD | |
gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 | |
Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq | |
NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko | |
LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc | |
Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV | |
HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd | |
ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I | |
XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI | |
M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot | |
9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V | |
Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea | |
j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh | |
X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ | |
l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf | |
bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 | |
pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK | |
e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 | |
vm9qp/UsQu0yrbYhnr68 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority | |
# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" | |
# Serial: 0 | |
# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef | |
# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 | |
# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 | |
-----BEGIN CERTIFICATE----- | |
MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN | |
BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl | |
c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl | |
bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv | |
b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ | |
BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj | |
YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 | |
MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 | |
dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg | |
QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa | |
jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC | |
MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi | |
C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep | |
lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof | |
TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certplus Root CA G1 O=Certplus | |
# Subject: CN=Certplus Root CA G1 O=Certplus | |
# Label: "Certplus Root CA G1" | |
# Serial: 1491911565779898356709731176965615564637713 | |
# MD5 Fingerprint: 7f:09:9c:f7:d9:b9:5c:69:69:56:d5:37:3e:14:0d:42 | |
# SHA1 Fingerprint: 22:fd:d0:b7:fd:a2:4e:0d:ac:49:2c:a0:ac:a6:7b:6a:1f:e3:f7:66 | |
# SHA256 Fingerprint: 15:2a:40:2b:fc:df:2c:d5:48:05:4d:22:75:b3:9c:7f:ca:3e:c0:97:80:78:b0:f0:ea:76:e5:61:a6:c7:43:3e | |
-----BEGIN CERTIFICATE----- | |
MIIFazCCA1OgAwIBAgISESBVg+QtPlRWhS2DN7cs3EYRMA0GCSqGSIb3DQEBDQUA | |
MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy | |
dHBsdXMgUm9vdCBDQSBHMTAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBa | |
MD4xCzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2Vy | |
dHBsdXMgUm9vdCBDQSBHMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB | |
ANpQh7bauKk+nWT6VjOaVj0W5QOVsjQcmm1iBdTYj+eJZJ+622SLZOZ5KmHNr49a | |
iZFluVj8tANfkT8tEBXgfs+8/H9DZ6itXjYj2JizTfNDnjl8KvzsiNWI7nC9hRYt | |
6kuJPKNxQv4c/dMcLRC4hlTqQ7jbxofaqK6AJc96Jh2qkbBIb6613p7Y1/oA/caP | |
0FG7Yn2ksYyy/yARujVjBYZHYEMzkPZHogNPlk2dT8Hq6pyi/jQu3rfKG3akt62f | |
6ajUeD94/vI4CTYd0hYCyOwqaK/1jpTvLRN6HkJKHRUxrgwEV/xhc/MxVoYxgKDE | |
EW4wduOU8F8ExKyHcomYxZ3MVwia9Az8fXoFOvpHgDm2z4QTd28n6v+WZxcIbekN | |
1iNQMLAVdBM+5S//Ds3EC0pd8NgAM0lm66EYfFkuPSi5YXHLtaW6uOrc4nBvCGrc | |
h2c0798wct3zyT8j/zXhviEpIDCB5BmlIOklynMxdCm+4kLV87ImZsdo/Rmz5yCT | |
mehd4F6H50boJZwKKSTUzViGUkAksnsPmBIgJPaQbEfIDbsYIC7Z/fyL8inqh3SV | |
4EJQeIQEQWGw9CEjjy3LKCHyamz0GqbFFLQ3ZU+V/YDI+HLlJWvEYLF7bY5KinPO | |
WftwenMGE9nTdDckQQoRb5fc5+R+ob0V8rqHDz1oihYHAgMBAAGjYzBhMA4GA1Ud | |
DwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSowcCbkahDFXxd | |
Bie0KlHYlwuBsTAfBgNVHSMEGDAWgBSowcCbkahDFXxdBie0KlHYlwuBsTANBgkq | |
hkiG9w0BAQ0FAAOCAgEAnFZvAX7RvUz1isbwJh/k4DgYzDLDKTudQSk0YcbX8ACh | |
66Ryj5QXvBMsdbRX7gp8CXrc1cqh0DQT+Hern+X+2B50ioUHj3/MeXrKls3N/U/7 | |
/SMNkPX0XtPGYX2eEeAC7gkE2Qfdpoq3DIMku4NQkv5gdRE+2J2winq14J2by5BS | |
S7CTKtQ+FjPlnsZlFT5kOwQ/2wyPX1wdaR+v8+khjPPvl/aatxm2hHSco1S1cE5j | |
2FddUyGbQJJD+tZ3VTNPZNX70Cxqjm0lpu+F6ALEUz65noe8zDUa3qHpimOHZR4R | |
Kttjd5cUvpoUmRGywO6wT/gUITJDT5+rosuoD6o7BlXGEilXCNQ314cnrUlZp5Gr | |
RHpejXDbl85IULFzk/bwg2D5zfHhMf1bfHEhYxQUqq/F3pN+aLHsIqKqkHWetUNy | |
6mSjhEv9DKgma3GX7lZjZuhCVPnHHd/Qj1vfyDBviP4NxDMcU6ij/UgQ8uQKTuEV | |
V/xuZDDCVRHc6qnNSlSsKWNEz0pAoNZoWRsz+e86i9sgktxChL8Bq4fA1SCC28a5 | |
g4VCXA9DO2pJNdWY9BW/+mGBDAkgGNLQFwzLSABQ6XaCjGTXOqAHVcweMcDvOrRl | |
++O/QmueD6i9a5jc2NvLi6Td11n0bt3+qsOR0C5CB8AMTVPNJLFMWx5R9N/pkvo= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Certplus Root CA G2 O=Certplus | |
# Subject: CN=Certplus Root CA G2 O=Certplus | |
# Label: "Certplus Root CA G2" | |
# Serial: 1492087096131536844209563509228951875861589 | |
# MD5 Fingerprint: a7:ee:c4:78:2d:1b:ee:2d:b9:29:ce:d6:a7:96:32:31 | |
# SHA1 Fingerprint: 4f:65:8e:1f:e9:06:d8:28:02:e9:54:47:41:c9:54:25:5d:69:cc:1a | |
# SHA256 Fingerprint: 6c:c0:50:41:e6:44:5e:74:69:6c:4c:fb:c9:f8:0f:54:3b:7e:ab:bb:44:b4:ce:6f:78:7c:6a:99:71:c4:2f:17 | |
-----BEGIN CERTIFICATE----- | |
MIICHDCCAaKgAwIBAgISESDZkc6uo+jF5//pAq/Pc7xVMAoGCCqGSM49BAMDMD4x | |
CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs | |
dXMgUm9vdCBDQSBHMjAeFw0xNDA1MjYwMDAwMDBaFw0zODAxMTUwMDAwMDBaMD4x | |
CzAJBgNVBAYTAkZSMREwDwYDVQQKDAhDZXJ0cGx1czEcMBoGA1UEAwwTQ2VydHBs | |
dXMgUm9vdCBDQSBHMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABM0PW1aC3/BFGtat | |
93nwHcmsltaeTpwftEIRyoa/bfuFo8XlGVzX7qY/aWfYeOKmycTbLXku54uNAm8x | |
Ik0G42ByRZ0OQneezs/lf4WbGOT8zC5y0xaTTsqZY1yhBSpsBqNjMGEwDgYDVR0P | |
AQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNqDYwJ5jtpMxjwj | |
FNiPwyCrKGBZMB8GA1UdIwQYMBaAFNqDYwJ5jtpMxjwjFNiPwyCrKGBZMAoGCCqG | |
SM49BAMDA2gAMGUCMHD+sAvZ94OX7PNVHdTcswYO/jOYnYs5kGuUIe22113WTNch | |
p+e/IQ8rzfcq3IUHnQIxAIYUFuXcsGXCwI4Un78kFmjlvPl5adytRSv3tjFzzAal | |
U5ORGpOucGpnutee5WEaXw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=OpenTrust Root CA G1 O=OpenTrust | |
# Subject: CN=OpenTrust Root CA G1 O=OpenTrust | |
# Label: "OpenTrust Root CA G1" | |
# Serial: 1492036577811947013770400127034825178844775 | |
# MD5 Fingerprint: 76:00:cc:81:29:cd:55:5e:88:6a:7a:2e:f7:4d:39:da | |
# SHA1 Fingerprint: 79:91:e8:34:f7:e2:ee:dd:08:95:01:52:e9:55:2d:14:e9:58:d5:7e | |
# SHA256 Fingerprint: 56:c7:71:28:d9:8c:18:d9:1b:4c:fd:ff:bc:25:ee:91:03:d4:75:8e:a2:ab:ad:82:6a:90:f3:45:7d:46:0e:b4 | |
-----BEGIN CERTIFICATE----- | |
MIIFbzCCA1egAwIBAgISESCzkFU5fX82bWTCp59rY45nMA0GCSqGSIb3DQEBCwUA | |
MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w | |
ZW5UcnVzdCBSb290IENBIEcxMB4XDTE0MDUyNjA4NDU1MFoXDTM4MDExNTAwMDAw | |
MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU | |
T3BlblRydXN0IFJvb3QgQ0EgRzEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | |
AoICAQD4eUbalsUwXopxAy1wpLuwxQjczeY1wICkES3d5oeuXT2R0odsN7faYp6b | |
wiTXj/HbpqbfRm9RpnHLPhsxZ2L3EVs0J9V5ToybWL0iEA1cJwzdMOWo010hOHQX | |
/uMftk87ay3bfWAfjH1MBcLrARYVmBSO0ZB3Ij/swjm4eTrwSSTilZHcYTSSjFR0 | |
77F9jAHiOH3BX2pfJLKOYheteSCtqx234LSWSE9mQxAGFiQD4eCcjsZGT44ameGP | |
uY4zbGneWK2gDqdkVBFpRGZPTBKnjix9xNRbxQA0MMHZmf4yzgeEtE7NCv82TWLx | |
p2NX5Ntqp66/K7nJ5rInieV+mhxNaMbBGN4zK1FGSxyO9z0M+Yo0FMT7MzUj8czx | |
Kselu7Cizv5Ta01BG2Yospb6p64KTrk5M0ScdMGTHPjgniQlQ/GbI4Kq3ywgsNw2 | |
TgOzfALU5nsaqocTvz6hdLubDuHAk5/XpGbKuxs74zD0M1mKB3IDVedzagMxbm+W | |
G+Oin6+Sx+31QrclTDsTBM8clq8cIqPQqwWyTBIjUtz9GVsnnB47ev1CI9sjgBPw | |
vFEVVJSmdz7QdFG9URQIOTfLHzSpMJ1ShC5VkLG631UAC9hWLbFJSXKAqWLXwPYY | |
EQRVzXR7z2FwefR7LFxckvzluFqrTJOVoSfupb7PcSNCupt2LQIDAQABo2MwYTAO | |
BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUl0YhVyE1 | |
2jZVx/PxN3DlCPaTKbYwHwYDVR0jBBgwFoAUl0YhVyE12jZVx/PxN3DlCPaTKbYw | |
DQYJKoZIhvcNAQELBQADggIBAB3dAmB84DWn5ph76kTOZ0BP8pNuZtQ5iSas000E | |
PLuHIT839HEl2ku6q5aCgZG27dmxpGWX4m9kWaSW7mDKHyP7Rbr/jyTwyqkxf3kf | |
gLMtMrpkZ2CvuVnN35pJ06iCsfmYlIrM4LvgBBuZYLFGZdwIorJGnkSI6pN+VxbS | |
FXJfLkur1J1juONI5f6ELlgKn0Md/rcYkoZDSw6cMoYsYPXpSOqV7XAp8dUv/TW0 | |
V8/bhUiZucJvbI/NeJWsZCj9VrDDb8O+WVLhX4SPgPL0DTatdrOjteFkdjpY3H1P | |
XlZs5VVZV6Xf8YpmMIzUUmI4d7S+KNfKNsSbBfD4Fdvb8e80nR14SohWZ25g/4/I | |
i+GOvUKpMwpZQhISKvqxnUOOBZuZ2mKtVzazHbYNeS2WuOvyDEsMpZTGMKcmGS3t | |
TAZQMPH9WD25SxdfGbRqhFS0OE85og2WaMMolP3tLR9Ka0OWLpABEPs4poEL0L91 | |
09S5zvE/bw4cHjdx5RiHdRk/ULlepEU0rbDK5uUTdg8xFKmOLZTW1YVNcxVPS/Ky | |
Pu1svf0OnWZzsD2097+o4BGkxK51CUpjAEggpsadCwmKtODmzj7HPiY46SvepghJ | |
AwSQiumPv+i2tCqjI40cHLI5kqiPAlxAOXXUc0ECd97N4EOH1uS6SsNsEn/+KuYj | |
1oxx | |
-----END CERTIFICATE----- | |
# Issuer: CN=OpenTrust Root CA G2 O=OpenTrust | |
# Subject: CN=OpenTrust Root CA G2 O=OpenTrust | |
# Label: "OpenTrust Root CA G2" | |
# Serial: 1492012448042702096986875987676935573415441 | |
# MD5 Fingerprint: 57:24:b6:59:24:6b:ae:c8:fe:1c:0c:20:f2:c0:4e:eb | |
# SHA1 Fingerprint: 79:5f:88:60:c5:ab:7c:3d:92:e6:cb:f4:8d:e1:45:cd:11:ef:60:0b | |
# SHA256 Fingerprint: 27:99:58:29:fe:6a:75:15:c1:bf:e8:48:f9:c4:76:1d:b1:6c:22:59:29:25:7b:f4:0d:08:94:f2:9e:a8:ba:f2 | |
-----BEGIN CERTIFICATE----- | |
MIIFbzCCA1egAwIBAgISESChaRu/vbm9UpaPI+hIvyYRMA0GCSqGSIb3DQEBDQUA | |
MEAxCzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9w | |
ZW5UcnVzdCBSb290IENBIEcyMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAw | |
MFowQDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwU | |
T3BlblRydXN0IFJvb3QgQ0EgRzIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | |
AoICAQDMtlelM5QQgTJT32F+D3Y5z1zCU3UdSXqWON2ic2rxb95eolq5cSG+Ntmh | |
/LzubKh8NBpxGuga2F8ORAbtp+Dz0mEL4DKiltE48MLaARf85KxP6O6JHnSrT78e | |
CbY2albz4e6WiWYkBuTNQjpK3eCasMSCRbP+yatcfD7J6xcvDH1urqWPyKwlCm/6 | |
1UWY0jUJ9gNDlP7ZvyCVeYCYitmJNbtRG6Q3ffyZO6v/v6wNj0OxmXsWEH4db0fE | |
FY8ElggGQgT4hNYdvJGmQr5J1WqIP7wtUdGejeBSzFfdNTVY27SPJIjki9/ca1TS | |
gSuyzpJLHB9G+h3Ykst2Z7UJmQnlrBcUVXDGPKBWCgOz3GIZ38i1MH/1PCZ1Eb3X | |
G7OHngevZXHloM8apwkQHZOJZlvoPGIytbU6bumFAYueQ4xncyhZW+vj3CzMpSZy | |
YhK05pyDRPZRpOLAeiRXyg6lPzq1O4vldu5w5pLeFlwoW5cZJ5L+epJUzpM5ChaH | |
vGOz9bGTXOBut9Dq+WIyiET7vycotjCVXRIouZW+j1MY5aIYFuJWpLIsEPUdN6b4 | |
t/bQWVyJ98LVtZR00dX+G7bw5tYee9I8y6jj9RjzIR9u701oBnstXW5DiabA+aC/ | |
gh7PU3+06yzbXfZqfUAkBXKJOAGTy3HCOV0GEfZvePg3DTmEJwIDAQABo2MwYTAO | |
BgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUajn6QiL3 | |
5okATV59M4PLuG53hq8wHwYDVR0jBBgwFoAUajn6QiL35okATV59M4PLuG53hq8w | |
DQYJKoZIhvcNAQENBQADggIBAJjLq0A85TMCl38th6aP1F5Kr7ge57tx+4BkJamz | |
Gj5oXScmp7oq4fBXgwpkTx4idBvpkF/wrM//T2h6OKQQbA2xx6R3gBi2oihEdqc0 | |
nXGEL8pZ0keImUEiyTCYYW49qKgFbdEfwFFEVn8nNQLdXpgKQuswv42hm1GqO+qT | |
RmTFAHneIWv2V6CG1wZy7HBGS4tz3aAhdT7cHcCP009zHIXZ/n9iyJVvttN7jLpT | |
wm+bREx50B1ws9efAvSyB7DH5fitIw6mVskpEndI2S9G/Tvw/HRwkqWOOAgfZDC2 | |
t0v7NqwQjqBSM2OdAzVWxWm9xiNaJ5T2pBL4LTM8oValX9YZ6e18CL13zSdkzJTa | |
TkZQh+D5wVOAHrut+0dSixv9ovneDiK3PTNZbNTe9ZUGMg1RGUFcPk8G97krgCf2 | |
o6p6fAbhQ8MTOWIaNr3gKC6UAuQpLmBVrkA9sHSSXvAgZJY/X0VdiLWK2gKgW0VU | |
3jg9CcCoSmVGFvyqv1ROTVu+OEO3KMqLM6oaJbolXCkvW0pujOotnCr2BXbgd5eA | |
iN1nE28daCSLT7d0geX0YJ96Vdc+N9oWaz53rK4YcJUIeSkDiv7BO7M/Gg+kO14f | |
WKGVyasvc0rQLW6aWQ9VGHgtPFGml4vmu7JwqkwR3v98KzfUetF3NI/n+UL3PIEM | |
S1IK | |
-----END CERTIFICATE----- | |
# Issuer: CN=OpenTrust Root CA G3 O=OpenTrust | |
# Subject: CN=OpenTrust Root CA G3 O=OpenTrust | |
# Label: "OpenTrust Root CA G3" | |
# Serial: 1492104908271485653071219941864171170455615 | |
# MD5 Fingerprint: 21:37:b4:17:16:92:7b:67:46:70:a9:96:d7:a8:13:24 | |
# SHA1 Fingerprint: 6e:26:64:f3:56:bf:34:55:bf:d1:93:3f:7c:01:de:d8:13:da:8a:a6 | |
# SHA256 Fingerprint: b7:c3:62:31:70:6e:81:07:8c:36:7c:b8:96:19:8f:1e:32:08:dd:92:69:49:dd:8f:57:09:a4:10:f7:5b:62:92 | |
-----BEGIN CERTIFICATE----- | |
MIICITCCAaagAwIBAgISESDm+Ez8JLC+BUCs2oMbNGA/MAoGCCqGSM49BAMDMEAx | |
CzAJBgNVBAYTAkZSMRIwEAYDVQQKDAlPcGVuVHJ1c3QxHTAbBgNVBAMMFE9wZW5U | |
cnVzdCBSb290IENBIEczMB4XDTE0MDUyNjAwMDAwMFoXDTM4MDExNTAwMDAwMFow | |
QDELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCU9wZW5UcnVzdDEdMBsGA1UEAwwUT3Bl | |
blRydXN0IFJvb3QgQ0EgRzMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARK7liuTcpm | |
3gY6oxH84Bjwbhy6LTAMidnW7ptzg6kjFYwvWYpa3RTqnVkrQ7cG7DK2uu5Bta1d | |
oYXM6h0UZqNnfkbilPPntlahFVmhTzeXuSIevRHr9LIfXsMUmuXZl5mjYzBhMA4G | |
A1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRHd8MUi2I5 | |
DMlv4VBN0BBY3JWIbTAfBgNVHSMEGDAWgBRHd8MUi2I5DMlv4VBN0BBY3JWIbTAK | |
BggqhkjOPQQDAwNpADBmAjEAj6jcnboMBBf6Fek9LykBl7+BFjNAk2z8+e2AcG+q | |
j9uEwov1NcoG3GRvaBbhj5G5AjEA2Euly8LQCGzpGPta3U1fJAuwACEl74+nBCZx | |
4nxp5V2a+EEfOzmTk51V6s2N8fvB | |
-----END CERTIFICATE----- | |
# Issuer: CN=ISRG Root X1 O=Internet Security Research Group | |
# Subject: CN=ISRG Root X1 O=Internet Security Research Group | |
# Label: "ISRG Root X1" | |
# Serial: 172886928669790476064670243504169061120 | |
# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e | |
# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 | |
# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 | |
-----BEGIN CERTIFICATE----- | |
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw | |
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh | |
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 | |
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu | |
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY | |
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc | |
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ | |
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U | |
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW | |
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH | |
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC | |
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv | |
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn | |
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn | |
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw | |
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI | |
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV | |
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq | |
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL | |
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ | |
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK | |
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 | |
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur | |
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC | |
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc | |
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq | |
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA | |
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d | |
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= | |
-----END CERTIFICATE----- | |
# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM | |
# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM | |
# Label: "AC RAIZ FNMT-RCM" | |
# Serial: 485876308206448804701554682760554759 | |
# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d | |
# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 | |
# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa | |
-----BEGIN CERTIFICATE----- | |
MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx | |
CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ | |
WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ | |
BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG | |
Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ | |
yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf | |
BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz | |
WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF | |
tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z | |
374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC | |
IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL | |
mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 | |
wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS | |
MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 | |
ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet | |
UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw | |
AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H | |
YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 | |
LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD | |
nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 | |
RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM | |
LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf | |
77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N | |
JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm | |
fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp | |
6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp | |
1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B | |
9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok | |
RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv | |
uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Amazon Root CA 1 O=Amazon | |
# Subject: CN=Amazon Root CA 1 O=Amazon | |
# Label: "Amazon Root CA 1" | |
# Serial: 143266978916655856878034712317230054538369994 | |
# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 | |
# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 | |
# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e | |
-----BEGIN CERTIFICATE----- | |
MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF | |
ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 | |
b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL | |
MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv | |
b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj | |
ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM | |
9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw | |
IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 | |
VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L | |
93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm | |
jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC | |
AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA | |
A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI | |
U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs | |
N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv | |
o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU | |
5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy | |
rqXRfboQnoZsG4q5WTP468SQvvG5 | |
-----END CERTIFICATE----- | |
# Issuer: CN=Amazon Root CA 2 O=Amazon | |
# Subject: CN=Amazon Root CA 2 O=Amazon | |
# Label: "Amazon Root CA 2" | |
# Serial: 143266982885963551818349160658925006970653239 | |
# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 | |
# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a | |
# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 | |
-----BEGIN CERTIFICATE----- | |
MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF | |
ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 | |
b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL | |
MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv | |
b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK | |
gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ | |
W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg | |
1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K | |
8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r | |
2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me | |
z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR | |
8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj | |
mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz | |
7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 | |
+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI | |
0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB | |
Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm | |
UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 | |
LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY | |
+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS | |
k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl | |
7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm | |
btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl | |
urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ | |
fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 | |
n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE | |
76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H | |
9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT | |
4PsJYGw= | |
-----END CERTIFICATE----- | |
# Issuer: CN=Amazon Root CA 3 O=Amazon | |
# Subject: CN=Amazon Root CA 3 O=Amazon | |
# Label: "Amazon Root CA 3" | |
# Serial: 143266986699090766294700635381230934788665930 | |
# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 | |
# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e | |
# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 | |
-----BEGIN CERTIFICATE----- | |
MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 | |
MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g | |
Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG | |
A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg | |
Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl | |
ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j | |
QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr | |
ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr | |
BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM | |
YyRIHN8wfdVoOw== | |
-----END CERTIFICATE----- | |
# Issuer: CN=Amazon Root CA 4 O=Amazon | |
# Subject: CN=Amazon Root CA 4 O=Amazon | |
# Label: "Amazon Root CA 4" | |
# Serial: 143266989758080763974105200630763877849284878 | |
# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd | |
# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be | |
# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 | |
-----BEGIN CERTIFICATE----- | |
MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 | |
MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g | |
Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG | |
A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg | |
Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi | |
9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk | |
M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB | |
/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB | |
MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw | |
CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW | |
1KyLa2tJElMzrdfkviT8tQp21KW8EA== | |
-----END CERTIFICATE----- | |
# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. | |
# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. | |
# Label: "LuxTrust Global Root 2" | |
# Serial: 59914338225734147123941058376788110305822489521 | |
# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c | |
# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f | |
# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 | |
-----BEGIN CERTIFICATE----- | |
MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL | |
BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV | |
BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw | |
MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B | |
LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN | |
AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F | |
ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem | |
hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 | |
EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn | |
Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 | |
zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ | |
96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m | |
j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g | |
DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ | |
8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j | |
X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH | |
hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB | |
KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 | |
Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT | |
+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL | |
BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 | |
BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO | |
jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 | |
loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c | |
qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ | |
2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ | |
JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre | |
zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf | |
LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ | |
x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 | |
oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr | |
-----END CERTIFICATE----- | |
# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM | |
# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM | |
# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" | |
# Serial: 1 | |
# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 | |
# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca | |
# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 | |
-----BEGIN CERTIFICATE----- | |
MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx | |
GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp | |
bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w | |
KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 | |
BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy | |
dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG | |
EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll | |
IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU | |
QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT | |
TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg | |
LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 | |
a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr | |
LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr | |
N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X | |
YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ | |
iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f | |
AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH | |
V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL | |
BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh | |
AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf | |
IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 | |
lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c | |
8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf | |
lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= | |
-----END CERTIFICATE----- | |
# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. | |
# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. | |
# Label: "GDCA TrustAUTH R5 ROOT" | |
# Serial: 9009899650740120186 | |
# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 | |
# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 | |
# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 | |
-----BEGIN CERTIFICATE----- | |
MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE | |
BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ | |
IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 | |
MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV | |
BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w | |
HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF | |
AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj | |
Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj | |
TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u | |
KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj | |
qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm | |
MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 | |
ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP | |
zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk | |
L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC | |
jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA | |
HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC | |
AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB | |
/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg | |
p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm | |
DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 | |
COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry | |
L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf | |
JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg | |
IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io | |
2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV | |
09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ | |
XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq | |
T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe | |
MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== | |
-----END CERTIFICATE----- | |
# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Label: "TrustCor RootCert CA-1" | |
# Serial: 15752444095811006489 | |
# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 | |
# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a | |
# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c | |
-----BEGIN CERTIFICATE----- | |
MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD | |
VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk | |
MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U | |
cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y | |
IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB | |
pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h | |
IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG | |
A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU | |
cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB | |
CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid | |
RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V | |
seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme | |
9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV | |
EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW | |
hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ | |
DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw | |
DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD | |
ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I | |
/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf | |
ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ | |
yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts | |
L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN | |
zl/HHk484IkzlQsPpTLWPFp5LBk= | |
-----END CERTIFICATE----- | |
# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Label: "TrustCor RootCert CA-2" | |
# Serial: 2711694510199101698 | |
# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 | |
# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 | |
# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 | |
-----BEGIN CERTIFICATE----- | |
MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV | |
BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw | |
IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy | |
dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig | |
Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk | |
MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg | |
Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD | |
VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy | |
dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK | |
AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ | |
QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq | |
1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp | |
2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK | |
DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape | |
az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF | |
3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 | |
oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM | |
g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 | |
mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh | |
8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd | |
BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U | |
nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw | |
DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX | |
dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ | |
MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL | |
/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX | |
CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa | |
ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW | |
2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 | |
N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 | |
Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB | |
As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp | |
5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu | |
1uwJ | |
-----END CERTIFICATE----- | |
# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority | |
# Label: "TrustCor ECA-1" | |
# Serial: 9548242946988625984 | |
# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c | |
# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd | |
# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c | |
-----BEGIN CERTIFICATE----- | |
MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD | |
VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk | |
MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U | |
cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y | |
IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV | |
BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw | |
IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy | |
dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig | |
RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb | |
3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA | |
BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 | |
3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou | |
owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ | |
wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF | |
ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf | |
BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ | |
MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv | |
civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 | |
AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F | |
hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 | |
soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI | |
WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi | |
tJ/X5g== | |
-----END CERTIFICATE----- | |
# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation | |
# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation | |
# Label: "SSL.com Root Certification Authority RSA" | |
# Serial: 8875640296558310041 | |
# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 | |
# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb | |
# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 | |
-----BEGIN CERTIFICATE----- | |
MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE | |
BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK | |
DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp | |
Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz | |
OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv | |
dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv | |
bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN | |
AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R | |
xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX | |
qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC | |
C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 | |
6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh | |
/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF | |
YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E | |
JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc | |
US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 | |
ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm | |
+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi | |
M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV | |
HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G | |
A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV | |
cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc | |
Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs | |
PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ | |
q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 | |
cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr | |
a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I | |
H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y | |
K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu | |
nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf | |
oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY | |
Ic2wBlX7Jz9TkHCpBB5XJ7k= | |
-----END CERTIFICATE----- | |
# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation | |
# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation | |
# Label: "SSL.com Root Certification Authority ECC" | |
# Serial: 8495723813297216424 | |
# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e | |
# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a | |
# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 | |
-----BEGIN CERTIFICATE----- | |
MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC | |
VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T | |
U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 | |
aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz | |
WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 | |
b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS | |
b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB | |
BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI | |
7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg | |
CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud | |
EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD | |
VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T | |
kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ | |
gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl | |
-----END CERTIFICATE----- | |
# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation | |
# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation | |
# Label: "SSL.com EV Root Certification Authority RSA R2" | |
# Serial: 6248227494352943350 | |
# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 | |
# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a | |
# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c | |
-----BEGIN CERTIFICATE----- | |
MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV | |
BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE | |
CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy | |
dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy | |
MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G | |
A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD | |
DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy | |
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq | |
M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf | |
OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa | |
4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 | |
HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR | |
aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA | |
b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ | |
Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV | |
PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO | |
pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu | |
UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY | |
MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV | |
HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 | |
9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW | |
s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 | |
Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg | |
cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM | |
79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz | |
/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt | |
ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm | |
Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK | |
QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ | |
w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi | |
S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 | |
mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== | |
-----END CERTIFICATE----- | |
# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation | |
# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation | |
# Label: "SSL.com EV Root Certification Authority ECC" | |
# Serial: 3182246526754555285 | |
# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 | |
# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d | |
# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 | |
-----BEGIN CERTIFICATE----- | |
MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC | |
VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T | |
U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp | |
Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx | |
NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv | |
dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv | |
bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 | |
AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA | |
VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku | |
WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP | |
MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX | |
5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ | |
ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg | |
h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== | |
-----END CERTIFICATE----- |
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
""" | |
certifi.py | |
~~~~~~~~~~ | |
This module returns the installation location of cacert.pem. | |
""" | |
import os | |
import warnings | |
class DeprecatedBundleWarning(DeprecationWarning): | |
""" | |
The weak security bundle is being deprecated. Please bother your service | |
provider to get them to stop using cross-signed roots. | |
""" | |
def where(): | |
f = os.path.dirname(__file__) | |
return os.path.join(f, 'cacert.pem') | |
def old_where(): | |
warnings.warn( | |
"The weak security bundle has been removed. certifi.old_where() is now an alias " | |
"of certifi.where(). Please update your code to use certifi.where() instead. " | |
"certifi.old_where() will be removed in 2018.", | |
DeprecatedBundleWarning | |
) | |
return where() | |
if __name__ == '__main__': | |
print(where()) |
######################## BEGIN LICENSE BLOCK ######################## | |
# This library is free software; you can redistribute it and/or | |
# modify it under the terms of the GNU Lesser General Public | |
# License as published by the Free Software Foundation; either | |
# version 2.1 of the License, or (at your option) any later version. | |
# | |
# This library is distributed in the hope that it will be useful, | |
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
# Lesser General Public License for more details. | |
# | |
# You should have received a copy of the GNU Lesser General Public | |
# License along with this library; if not, write to the Free Software | |
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | |
# 02110-1301 USA | |
######################### END LICENSE BLOCK ######################### | |
from .compat import PY2, PY3 | |
from .universaldetector import UniversalDetector | |
from .version import __version__, VERSION | |
def detect(byte_str): | |
""" | |
Detect the encoding of the given byte string. | |
:param byte_str: The byte sequence to examine. | |
:type byte_str: ``bytes`` or ``bytearray`` | |
""" | |
if not isinstance(byte_str, bytearray): | |
if not isinstance(byte_str, bytes): | |
raise TypeError('Expected object of type bytes or bytearray, got: ' | |
'{0}'.format(type(byte_str))) | |
else: | |
byte_str = bytearray(byte_str) | |
detector = UniversalDetector() | |
detector.feed(byte_str) | |
return detector.close() |
######################## BEGIN LICENSE BLOCK ######################## | |
# The Original Code is Mozilla Communicator client code. | |
# | |
# The Initial Developer of the Original Code is | |
# Netscape Communications Corporation. | |
# Portions created by the Initial Developer are Copyright (C) 1998 | |
# the Initial Developer. All Rights Reserved. | |
# | |
# Contributor(s): | |
# Mark Pilgrim - port to Python | |
# | |
# This library is free software; you can redistribute it and/or | |
# modify it under the terms of the GNU Lesser General Public | |
# License as published by the Free Software Foundation; either | |
# version 2.1 of the License, or (at your option) any later version. | |
# | |
# This library is distributed in the hope that it will be useful, | |
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
# Lesser General Public License for more details. | |
# | |
# You should have received a copy of the GNU Lesser General Public | |
# License along with this library; if not, write to the Free Software | |
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA | |
# 02110-1301 USA | |
######################### END LICENSE BLOCK ######################### | |
# Big5 frequency table | |
# by Taiwan's Mandarin Promotion Council | |
# <http://www.edu.tw:81/mandr/> | |
# | |
# 128 --> 0.42261 | |
# 256 --> 0.57851 | |
# 512 --> 0.74851 | |
# 1024 --> 0.89384 | |
# 2048 --> 0.97583 | |
# | |
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 | |
# Random Distribution Ration = 512/(5401-512)=0.105 | |
# | |
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR | |
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 | |
#Char to FreqOrder table | |
BIG5_TABLE_SIZE = 5376 | |
BIG5_CHAR_TO_FREQ_ORDER = ( | |
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 | |
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 | |
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 | |
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 | |
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 | |
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 | |
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 | |
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 | |
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 | |
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 | |
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 | |
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 | |
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 | |
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 | |
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 | |
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 | |
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 | |
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 | |
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 | |
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 | |
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 | |
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 | |
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 | |
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 | |
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 | |
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 | |
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 | |
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 | |
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 | |
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 | |
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 | |
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 | |
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 | |
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 | |
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 | |
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 | |
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 | |
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 | |
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 | |
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 | |
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 | |
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 | |
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 | |
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 | |
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 | |
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 | |
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 | |
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 | |
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 | |
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 | |
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 | |
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 | |
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 | |
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 | |
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 | |
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 | |
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 | |
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 | |
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 | |
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 | |
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 | |
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 | |
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 | |
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 | |
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 | |
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 | |
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 | |
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 | |
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 | |
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 | |
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 | |
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 | |
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 | |
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 | |
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 | |
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 | |
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 | |
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 | |
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 | |
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 | |
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 | |
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 | |
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 | |
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 | |
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 | |
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 | |
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 | |
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 | |
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 | |
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 | |
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 | |
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 | |
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 | |
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 | |
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 | |
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 | |
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 | |
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 | |
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 | |
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 | |
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 | |
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 | |
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 | |
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 | |
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 | |
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 | |
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 | |
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 | |
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 | |
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 | |
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 | |
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 | |
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 | |
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 | |
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 | |
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 | |
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 | |
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 | |
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 | |
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 | |
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 | |
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 | |
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 | |
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 | |
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 | |
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 | |
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 | |
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 | |
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 | |
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 | |
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 | |
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 | |
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 | |
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 | |
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 | |
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 | |
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 | |
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 | |
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 | |
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 | |
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 | |
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 | |
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 | |
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 | |
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 | |
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 | |
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 | |
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 | |
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 | |
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 | |
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 | |
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 | |
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 | |
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 | |
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 | |
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 | |
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 | |
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 | |
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 | |
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 | |
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 | |
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 | |
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 | |
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 | |
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 | |
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 | |
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 | |
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 | |
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 | |
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 | |
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 | |
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 | |
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 | |
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 | |
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 | |
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 | |
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 | |
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 | |
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 | |
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 | |
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 | |
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 | |
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 | |
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 | |
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 | |
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 | |
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 | |
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 | |
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 | |
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 | |
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 | |
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 | |
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 | |
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 | |
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 | |
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 | |
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 | |
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 | |
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 | |
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 | |
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 | |
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 | |
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 | |
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 | |
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 | |
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 | |
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 | |
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 | |
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 | |
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 | |
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 | |
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 | |
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 | |
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 | |
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 | |
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 | |
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 | |
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 | |
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 | |
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 | |
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 | |
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 | |
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 | |
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 | |
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 | |
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 | |
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 | |
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 | |
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 | |
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 | |
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 | |
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 | |
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 | |
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 | |
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 | |
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 | |
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 | |
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 | |
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 | |
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 | |
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 | |
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 | |
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 | |
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 | |
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 | |
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 | |
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 | |
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 | |
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 | |
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 | |
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 | |
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 | |
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 | |
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 | |
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 | |
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 | |
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 | |
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 | |
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 | |
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 | |
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 | |
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 | |
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 | |
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 | |
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 | |
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 | |
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 | |
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 | |
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 | |
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 | |
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 | |
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 | |
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 | |
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 | |
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 | |
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 | |
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 | |
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 | |
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 | |
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 | |
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 | |
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 | |
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 | |
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 | |
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 | |
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 | |
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 | |
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 | |
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 | |
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 | |
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 | |
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 | |
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 | |
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 | |
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 | |
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 | |
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 | |
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 | |
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 | |
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 | |
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 | |
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 | |
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 | |
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 | |
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 | |
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 | |
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 | |
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 | |
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 | |
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 | |
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 | |
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 | |
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 | |
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 | |
803,2357,5712,3933,571 |
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)
(Sorry about that, but we can’t show files that are this big right now.)