Attention: this is the key used to sign the certificate requests, anyone holding this can sign certificates on your behalf. So keep it in a safe place!
openssl genrsa -des3 -out rootCA.key 4096
<?php | |
if(!isset($_GET['a'])) { | |
die('<form action="" method="get"><input type="text" name="a"/><input type="submit"/></form>'); | |
} | |
$url = "https://1fichier.com/?" . urlencode($_GET['a']) . "?auth=1&inline"; | |
stream_context_set_default(array( | |
'http' => array( | |
'method' => 'GET', | |
'header' => 'Authorization: Basic ' . '[BASE64 encoded login credentials. Format: "user:pass" (without quotes)]' |
#!/usr/bin/env ruby | |
# usage: stripiT.rb | |
# the original files will be OVERWRITTEN. | |
# Script to remove extraneous/unwanted atoms from iTunes purchased files by way of AtomicParsley. | |
# Output should be comparable to the atoms left over after reencoding the file in iTunes itself. | |
# I only care about songs, so I have no clue how well this applies to video files | |
# Some information taken from: https://code.google.com/p/mp4v2/wiki/iTunesMetadata |
<?php | |
/* | |
* YOURLS : sample file showing how to use the API | |
* This shows how to tap into your YOURLS install API from *ANOTHER* server | |
* not from a file hosted on the same server. It's just a bit dumb to make a | |
* remote HTTP request to the server the request originates from. | |
* | |
* Rename to .php | |
* |
# after appcleaner does his magic, do this | |
sudo rm -rf "/Library/Application Support/Paragon Software/" | |
sudo rm /Library/LaunchDaemons/com.paragon-software.installer.plist | |
sudo rm /Library/LaunchDaemons/com.paragon-software.ntfs.loader.plist | |
sudo rm /Library/LaunchDaemons/com.paragon-software.ntfsd.plist | |
sudo rm /Library/LaunchAgents/com.paragon-software.ntfs.notification-agent.plist | |
sudo rm -rf /Library/Filesystems/ufsd_NTFS.fs/ | |
sudo rm -rf /Library/PrivilegedHelperTools/com.paragon-software.installer | |
sudo rm -rf /Library/Extensions/ufsd_NTFS.kext/ |
#Here's my bash script using "nice" for CPU prioritization on a Synology DiskStation NAS: | |
#!/bin/bash | |
echo "Backing up server folder to Google Drive." | |
#echo | |
# run rclone using nice, with config options | |
# >> appends to log file | |
# 2>&1 silences all output | |
# & runs in background |
import requests | |
import base64 | |
from tqdm import tqdm | |
master_json_url = 'https://178skyfiregce-a.akamaihd.net/exp=1474107106~acl=%2F142089577%2F%2A~hmac=0d9becc441fc5385462d53bf59cf019c0184690862f49b414e9a2f1c5bafbe0d/142089577/video/426274424,426274425,426274423,426274422/master.json?base64_init=1' | |
base_url = master_json_url[:master_json_url.rfind('/', 0, -26) + 1] | |
resp = requests.get(master_json_url) | |
content = resp.json() |
function convertMS( milliseconds ) { | |
var day, hour, minute, seconds; | |
seconds = Math.floor(milliseconds / 1000); | |
minute = Math.floor(seconds / 60); | |
seconds = seconds % 60; | |
hour = Math.floor(minute / 60); | |
minute = minute % 60; | |
day = Math.floor(hour / 24); | |
hour = hour % 24; | |
return { |
import re | |
import requests | |
import json | |
import urlparse | |
from sys import stderr | |
def connect(url): | |
""" Extract source from passed URL. """ | |
headers = {"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:30.0)"\ |
IMPORTANT: Read this before implementing one of the configuration files below (for either Varnish 3.x or 4.x+).
USE: Replace the contents of the main Varnish configuration file located in /etc/varnish/default.vcl (root server access required - obviously) with the contents of the configuration you'll use (depending on your Varnish version) from the 2 examples provided below.
IMPORTANT: The following setup assumes a 180 sec (3 minute) cache time for cacheable content that does not have the correct cache-control HTTP headers. You can safely increase this to 300 sec (or more) for less busier sites or drop it to 60 sec or even 30 sec for high traffic sites. It obviously depends on your use case.