I hereby claim:
- I am controversial on github.
- I am controversial (https://keybase.io/controversial) on keybase.
- I have a public key ASCN4emQHep5ufb0y0GPo4q3mRzJXxsiUdiNtAr0J2GKnQo
To claim this, I am signing this object:
| const darkQuery = '(prefers-color-scheme: dark)'; | |
| // Make mymind’s color scheme match the system’s | |
| function updateDarkMode() { | |
| const darkModeToggle = document.querySelector('.color-scheme-toggle'); | |
| const isDark = !darkModeToggle.classList.contains('light'); | |
| if (window.matchMedia(darkQuery).matches !== isDark) { | |
| darkModeToggle.click(); | |
| } | |
| } |
| import numpy as np | |
| # press 'Setup Parameters' in the OP to call this function to re-create the parameters. | |
| # The CHOP will not work until you do | |
| def onSetupParameters(scriptOp): | |
| page = scriptOp.appendCustomPage('Custom') | |
| p = page.appendInt('Samplerate', label='Sample Rate') | |
| p.val = 44100 | |
| p2 = page.appendFloat('Time', label='Time') | |
| p2.expr = 'absTime.seconds' |
| // Sketch plugin to generate an icon library from a directory of SVGs | |
| const sketchDom = require('sketch/dom'); | |
| // Prompt the user to select a folder | |
| const dialog = NSOpenPanel.openPanel(); | |
| dialog.setCanChooseFiles(false); | |
| dialog.setCanChooseDirectories(true); | |
| dialog.setAllowsMultipleSelection(false); | |
| dialog.setCanCreateDirectories(false); | |
| const dialogResponse = dialog.runModal(); |
I hereby claim:
To claim this, I am signing this object:
| module.exports = { | |
| config: { | |
| // default font size in pixels for all tabs | |
| fontSize: 12, | |
| // font family with optional fallbacks | |
| fontFamily: '"Fira Code", Menlo, "DejaVu Sans Mono", "Lucida Console", monospace', | |
| // terminal cursor background color and opacity (hex, rgb, hsl, hsv, hwb or cmyk) | |
| cursorColor: '#cdd2e9', |
| # Tool to sort organization repos by star count, as a workaround for https://github.com/isaacs/github/issues/201. | |
| # It would be *awesome* if GitHub could implement this... | |
| import requests, json | |
| USERNAME = "YOUR_USERNAME_HERE" | |
| PASSWORD = "YOUR_PASSWORD_HERE" | |
| def parse_links(linkstr): | |
| # At the end of paging, 'last' is no longer returned as second link |
| """A set of operations for working with the file lists returned from zipfile.ZipFile.namelist()""" | |
| import os | |
| def getParentDirs(path): | |
| """ "test/again/hello" -> ["test", "test/again"] """ | |
| dirs = path.split("/")[:-1] | |
| if dirs: | |
| return ["/".join(dirs[:i+1]) for i,d in enumerate(dirs) if dirs[:i+1]] | |
| else: |
| classes=['NSAKDeserializer', 'NSAKDeserializerStream', 'NSAKSerializer', 'NSAKSerializerStream', 'NSAbstractLayoutGuide', 'NSAddressCheckingResult', 'NSAffineTransform', 'NSArchiver', 'NSArrayChange', 'NSArrayChanges', 'NSAssertionHandler', 'NSAttributedString', 'NSAutoreleasePool', 'NSAutoresizingMaskLayoutConstraint', 'NSBigMutableString', 'NSBlockExpression', 'NSBlockOperation', 'NSBlockPredicate', 'NSBoundKeyPath', 'NSBundle', 'NSBundleResourceRequest', 'NSByteCountFormatter', 'NSCachedURLResponse', 'NSCalendarDate', 'NSCharacterSet', 'NSCheapMutableString', 'NSCoder', 'NSComparisonPredicate', 'NSComparisonPredicateOperator', 'NSComplexOrthography', 'NSComplexRegularExpressionCheckingResult', 'NSCompoundPredicate', 'NSCompoundPredicateOperator', 'NSConcreteAttributedString', 'NSConcreteMutableAttributedString', 'NSConcreteValue', 'NSCondition', 'NSConditionLock', 'NSConstantString', 'NSConstantValueExpression', 'NSContentSizeLayoutConstraint', 'NSCorrectionCheckingResult', 'NSCountedSet', 'NSDashCheckingR |
| #Scrapes wikipedia. Start with the name of a page. Then, it will click the first | |
| #5 links on this page. For each of these links, it will click the first 5 links | |
| #on *that* page. It will not stray more than 5 pages away from the start page. | |
| #These attributes can be adjusted by changing BREADTH and MAXDEPTH. This script | |
| #will output a PNG file of your wikipedia map. | |
| #REQUIREMENTS: `wikipedia` and `pydot` | |
| import wikipedia as wp | |
| import pydot |
| import os | |
| import urllib2 | |
| from collections import Counter | |
| #FETCH DATA | |
| files=os.listdir(os.path.expanduser("~/Library/Caches/Metadata/Safari/History")) | |
| webpages=[f for f in files if f.startswith("http")] #https also starts with http | |
| print "Found {} pieces of data".format(len(webpages)) |