-
Star
(136)
You must be signed in to star a gist -
Fork
(24)
You must be signed in to fork a gist
-
-
Save erikbern/756b1d8df2d1487497d29b90e81f8068 to your computer and use it in GitHub Desktop.
import contextlib | |
import OpenSSL.crypto | |
import os | |
import requests | |
import ssl | |
import tempfile | |
@contextlib.contextmanager | |
def pfx_to_pem(pfx_path, pfx_password): | |
''' Decrypts the .pfx file to be used with requests. ''' | |
with tempfile.NamedTemporaryFile(suffix='.pem') as t_pem: | |
f_pem = open(t_pem.name, 'wb') | |
pfx = open(pfx_path, 'rb').read() | |
p12 = OpenSSL.crypto.load_pkcs12(pfx, pfx_password) | |
f_pem.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, p12.get_privatekey())) | |
f_pem.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, p12.get_certificate())) | |
ca = p12.get_ca_certificates() | |
if ca is not None: | |
for cert in ca: | |
f_pem.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)) | |
f_pem.close() | |
yield t_pem.name | |
# HOW TO USE: | |
# with pfx_to_pem('foo.pem', 'bar') as cert: | |
# requests.post(url, cert=cert, data=payload) |
based on your code i tried using the scoped method from azure keyvault however i am getting error message
ImportError: cannot import name 'load_pkcs12' from 'OpenSSL.crypto' (unknown location)
ImportError Traceback (most recent call last)
File , line 5
1 import sys
2 import requests
----> 5 from OpenSSL.crypto import FILETYPE_PEM, dump_privatekey, load_pkcs12
6 from cryptography.hazmat.primitives.serialization.pkcs12 import load_key_and_certificates
7 from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat
ImportError: cannot import name 'load_pkcs12' from 'OpenSSL.crypto' (unknown location)
so the question is will it work or do i need to upload my .pfx file onto dbfs?
@learnprofile are you using databricks? Have you uploaded the certificate in the azure key vault and reading it like below
cert = dbutils.secrets.get(scope = "", key = "<key_name>")
HI Shafique, yes i am using it in Azure Databricks and then i have uploaded certificate in my Azure keyvault certificate store.
and then here is my code
import sys
import requests
from OpenSSL.crypto import FILETYPE_PEM, dump_privatekey, load_pkcs12
from cryptography.hazmat.primitives.serialization.pkcs12 import load_key_and_certificates
from cryptography.hazmat.primitives.serialization import Encoding, NoEncryption, PrivateFormat
import msal
pfx_path = dbutils.secrets.get("SPN-Certificate-Scope", "1649-new")
pfx_password = dbutils.secrets.get("SPN-Certificate-Scope", "1649-pfx-secret")
certificate_thumbprint = "xxxxxxxxxxx"
app_id = "xxxxxxxxxxxxxx"
tenant_id = "xxxxxxxxxxx"
def __init__(
self,
tenant_id: str,
spn_app_id: str,
certificate_thumbprint: str,
pfx_password: str = None,
pfx_bytes: bytes = None):
"""Instantiate an AzureSPNWithCertificate class.
:param tenant_id: Azure tenant id
:param spn_app_id: SPN application (client) id
:param certificate_thumbprint: SPN's certificate thumbprint
:param pfx_password: password used for the certificate
:param pfx_bytes: certificate file as bytes. If None, you must call function read_pfx_file to read it.
"""
self.tenant_id = tenant_id
self.spn_app_id = spn_app_id
self.pfx_bytes = pfx_bytes
self.pfx_password = pfx_password
self.certificate_thumbprint = certificate_thumbprint
self.private_key_bytes = None
def read_pfx_file(self, pfx_path: str):
"""Read certificate and store it as bytes parameter 'pfx_bytes'.
:param pfx_path: path to the certificate file (pfx file)
"""
with open(pfx_path, "rb") as f_pfx:
self.pfx_bytes = f_pfx.read()
# see https://stackoverflow.com/questions/6345786/python-reading-a-pkcs12-certificate-with-pyopenssl-crypto
def _set_private_key_from_certificate(self) -> bytes:
"""Retrieve the private key from the certificate and store it as bytes parameter 'private_key_bytes'."""
if not self.pfx_bytes:
raise Exception(f"Parameter 'pfx_bytes' is missing.")
private_key, _, _ = load_key_and_certificates(self.pfx_bytes, pfx_password.encode())
self.private_key_bytes = private_key.private_bytes(
encoding=Encoding.PEM,
format=PrivateFormat.PKCS8,
encryption_algorithm=NoEncryption(),
)
am i missing anything?
need more details, what system are you using. local or cloud, are you also using databricks?