Created
February 22, 2022 13:43
-
-
Save NicolaiSoeborg/19323f2168a6cd6d2129846541e1d47e to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import sys | |
| from struct import unpack | |
| from io import BytesIO | |
| from time import strftime, gmtime | |
| # ~/Library/Containers/com.microsoft.AzureVpnMac/Data/Library/Cookies/Cookies.binarycookies | |
| if len(sys.argv) != 2: | |
| print("python3 BinaryCookieReader.py Cookies.binarycookies") | |
| sys.exit(0) | |
| FilePath = sys.argv[1] | |
| try: | |
| binary_file = open(FilePath,'rb') | |
| except IOError as e: | |
| print('File Not Found :'+ FilePath) | |
| sys.exit(0) | |
| if binary_file.read(4) != b'cook': | |
| print("Not a Cookies.binarycookie file (bad magic)") | |
| sys.exit(0) | |
| def read_int(fmt, f): | |
| return unpack(fmt, f.read(4))[0] | |
| # TODO: bitwise | |
| COOKIE_LOOKUP = { | |
| 0: '', | |
| 1: 'Secure', | |
| 4: 'HttpOnly', | |
| 5: 'Secure; HttpOnly', | |
| } | |
| # Number of pages in the binary file: 4 bytes | |
| num_pages = read_int('>i', binary_file) | |
| page_sizes = [] | |
| for np in range(num_pages): | |
| page_sizes.append(read_int('>i', binary_file)) | |
| pages = [] | |
| for ps in page_sizes: | |
| pages.append(BytesIO(binary_file.read(ps))) | |
| for page in pages: | |
| if b'\x00\x00\x01\x00' != page.read(4): | |
| raise | |
| num_cookies = read_int('<i', page) | |
| cookie_offsets = [] | |
| for nc in range(num_cookies): | |
| cookie_offsets.append(read_int('<i', page)) | |
| if page.read(4) not in [b'\x01\x00\x00\x00', b'\x00\x00\x00\x00']: | |
| raise | |
| cookie = b'' | |
| for offset in cookie_offsets: | |
| page.seek(offset) | |
| cookiesize = read_int('<i', page) | |
| cookie = BytesIO(page.read(cookiesize)) | |
| #unknown | |
| cookie.read(4) | |
| flags = read_int('<i', cookie) | |
| cookie_flags = COOKIE_LOOKUP.get(flags, 'Unknown') | |
| #unknown | |
| cookie.read(4) | |
| urloffset = read_int('<i', cookie) #cookie domain offset from cookie starting point | |
| nameoffset = read_int('<i', cookie) #cookie name offset from cookie starting point | |
| pathoffset = read_int('<i', cookie) #cookie path offset from cookie starting point | |
| valueoffset = read_int('<i', cookie) #cookie value offset from cookie starting point | |
| endofcookie = cookie.read(8) | |
| expiry_date_epoch = unpack('<d', cookie.read(8))[0]+978307200 #Expiry date is in Mac epoch format: Starts from 1/Jan/2001 | |
| expiry_date = strftime("%a, %d %b %Y ", gmtime(expiry_date_epoch))[:-1] #978307200 is unix epoch of 1/Jan/2001 //[:-1] strips the last space | |
| create_date_epoch = unpack('<d', cookie.read(8))[0]+978307200 #Cookies creation time | |
| create_date = strftime("%a, %d %b %Y ", gmtime(create_date_epoch))[:-1] | |
| cookie.seek(urloffset-4) #fetch domaain value from url offset | |
| url = b'' | |
| u = cookie.read(1) | |
| while unpack('<b', u)[0] != 0: | |
| url = url + u | |
| u = cookie.read(1) | |
| cookie.seek(nameoffset-4) #fetch cookie name from name offset | |
| name = b'' | |
| n = cookie.read(1) | |
| while unpack('<b', n)[0] != 0: | |
| name = name + n | |
| n = cookie.read(1) | |
| cookie.seek(pathoffset-4) #fetch cookie path from path offset | |
| path = b'' | |
| pa = cookie.read(1) | |
| while unpack('<b', pa)[0] != 0: | |
| path = path + pa | |
| pa = cookie.read(1) | |
| cookie.seek(valueoffset-4) #fetch cookie value from value offset | |
| value = b'' | |
| va = cookie.read(1) | |
| while unpack('<b',va)[0] != 0: | |
| value = value + va | |
| va = cookie.read(1) | |
| print(f'Cookie: {name.decode()}={value.decode()}; domain={url.decode()}; path={path.decode()}; expires={expiry_date}; {cookie_flags}') | |
| binary_file.close() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment