- user_id -
user/:uid
- feed_id -
feed/:feed_uri
- category_id -
:user_id/category/:category
(special category:global.all
,global.uncategorized
) - tag_id -
:user_id/tag/:tag
(special tag:global.saved
)
http://cloud.feedly.com/:version/:api
'use latest' | |
const express = require('express') | |
const graphqlHTTP = require('express-graphql') | |
const { buildSchema } = require('graphql') | |
const app = express() | |
const webtask = require('webtask-tools') | |
const bodyParser = require('body-parser') | |
const schema = buildSchema(` | |
type Query { |
# To get the `action` prop: | |
# | |
# 1. Go to your dashboard on mailchimp.com and navigate | |
# to Lists > Signup Forms > Embedded Forms. | |
# | |
# 2. Copy the `<form>` action from the generated HTML code. | |
# | |
# 3. Pass that into the component via the prop, like so: | |
# | |
# <mailchimp-subscribe |
const functions = require('firebase-functions') | |
const admin = require('firebase-admin') | |
admin.initializeApp(functions.config().firebase); | |
const stripe = require('stripe')(functions.config().stripe.testkey) | |
exports.stripeCharge = functions.database |
A simple App using Vue.js & Firebase with Auth.
See the DEMO.
variable "bucket_site" {} | |
variable "region" {} | |
variable "route53_domain_name" {} | |
variable "route53_domain_zoneid" {} | |
variable "route53_domain_alias_name" {} | |
variable "route53_domain_alias_zoneid" {} | |
provider "aws" { | |
region = "${var.region}" | |
} |
variable "bucket_site" {} | |
variable "region" {} | |
variable "route53_domain_name" {} | |
variable "route53_domain_zoneid" {} | |
variable "route53_domain_alias_name" {} | |
variable "route53_domain_alias_zoneid" {} | |
provider "aws" { | |
region = "${var.region}" | |
} |
On July 22, Github announced the 3rd Annual Github Data Challenge presenting multiple sources of data available.
This sounded to me a good opportunity to use their available data and import it in Neo4j in order to have a lot of fun at analyzing the data that fits naturally in a graph.
As I work mainly offline or behind military proxies that do not permit me to use the ReST API, I decided to go for the Github Archive available here, you can then download json files representing Github Events on a daily/hour basis.
The count of contributions (summary of Pull Requests, opened issues and commits) to public repos at GitHub.com from Fri, 19 Jun 2015 15:17:38 GMT till Sun, 19 Jun 2016 15:17:38 GMT.
Only first 1000 GitHub users according to the count of followers are taken. This is because of limitations of GitHub search. Sorting algo in pseudocode:
githubUsers
.filter(user => user.followers > 635)
select repository_name, count(repository_name) as pushes, repository_description, repository_url | |
from [githubarchive:github.timeline] | |
where type="PushEvent" | |
and repository_language="Emacs Lisp" | |
and parse_utc_usec(created_at) >= parse_utc_usec('2014-01-01 00:00:00') | |
group by repository_name, repository_description, repository_url | |
order by pushes desc | |
limit 100 |