_config.yml
Jekyll 的全局配置文件。
比如网站的名字,网站的域名,网站的链接格式等等。
#!/usr/bin/env ruby | |
require 'nokogiri' | |
ibooks_home = '/Users/xxx/Library/Containers/com.apple.BKAgentService/Data/Documents/iBooks/Books/' | |
Dir.foreach(ibooks_home) do |dir| | |
if File.extname(dir) == ".epub" | |
dir = ibooks_home + dir | |
File.open(dir + "/iTunesMetadata.plist") do |f| |
<!DOCTYPE html> | |
<html> | |
<head> | |
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"> | |
<title>Treemap - Neat Zoom Effect</title> | |
<script type="text/javascript" src="./d3/d3.js"></script> | |
<style type="text/css"> | |
body { | |
overflow: hidden; | |
margin: 0; |
license: gpl-3.0 | |
height: 1060 | |
redirect: https://observablehq.com/@d3/nested-treemap |
# Dump DynamoDB data to JSON file | |
# s3://[bucket]/offine-cms-[table-name].json | |
# Refs: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/quickstart.html | |
import decimal | |
import json | |
import boto3 | |
from boto3.dynamodb.conditions import Key, Key, Attr |
# | |
# systemd unit file for Debian | |
# | |
# Put this in /lib/systemd/system | |
# Run: | |
# - systemctl enable sidekiq | |
# - systemctl {start,stop,restart} sidekiq | |
# | |
# This file corresponds to a single Sidekiq process. Add multiple copies | |
# to run multiple processes (sidekiq-1, sidekiq-2, etc). |
if (!String.toRegExp) | |
String.toRegExp = function String_toRegExp(pattern, flags) { | |
return new RegExp(pattern.replace(/[\[\]\\{}()+*?.$^|]/g, function (match) { return '\\' + match; }), flags); | |
}; |
package pubsub | |
import ( | |
"github.com/garyburd/redigo/redis" | |
log "github.com/sirupsen/logrus" | |
) | |
// Service service | |
type Service struct { | |
pool *redis.Pool |
ticker := time.NewTicker(5 * time.Second) | |
quit := make(chan struct{}) | |
go func() { | |
for { | |
select { | |
case <- ticker.C: | |
// do stuff | |
case <- quit: | |
ticker.Stop() | |
return |
""" | |
Copy objects from one bucket/prefix to another bucket with the same prefix. | |
Used to allow CloudFront logs to get parsed for uploading to ES *AND* analyzed | |
by WAF. | |
CloudFront Distribution logs -> s3://es-bucket/incoming -> Lambda (this) -> s3://waf-bucket/ | |
Set environment variable `destination_bucket` |