You can install command line without install XCode
$ xcode-select --install
| # Requirements | |
| * Python 2.7 | |
| * Google Chrome | |
| * ChromeDriver - WebDriver for Chrome | |
| - Download the latest chromedrive which is 2.28 from here - https://sites.google.com/a/chromium.org/chromedriver/downloads | |
| - Extract and move `chromedriver` file to `/usr/local/bin/chromedriver` | |
| - git clone https://github.com/li-xinyang/OS_FrontendMaster-dl | |
| - cd OS_FrontendMaster-dl |
| test |
| sudo docker pull elasticsearch:1.7 | |
| sudo docker run -d -p 9200:9200 -p 9300:9300 -v "/data-0":/usr/share/elasticsearch/data -v "$PWD/scripts":/usr/share/elasticsearch/config/scripts elasticsearch:1.7 elasticsearch -Des.cluster.name="avengers" | |
| sudo docker run -d -P -v "/data-1":/usr/share/elasticsearch/data -v "$PWD/scripts":/usr/share/elasticsearch/config/scripts elasticsearch:1.7 elasticsearch -Des.cluster.name="avengers" | |
| # Local server | |
| # Create new Docker Virtual machine |
| //Based on gulpfile.js from Google Web Starter Kit. | |
| //and https://gist.github.com/soin08/4793992d8cc537f62df3 | |
| //https://github.com/google/web-starter-kit | |
| 'use strict'; | |
| // Include Gulp & Tools We'll Use | |
| var gulp = require('gulp'); | |
| var autoPrefixer = require('gulp-autoprefixer'); | |
| var sourceMaps = require('gulp-sourcemaps'); | |
| var concat = require('gulp-concat'); |
| var fs = require('fs'); | |
| var querystring = require('querystring'); | |
| var http = require('http'); | |
| var indexs = JSON.parse(fs.readFileSync('indexes.json', 'utf8')); | |
| var host = '192.168.186.143'; | |
| port = '9200', | |
| postData = "", | |
| indexName = ""; |
| # https://www.npmjs.com/package/elasticdump | |
| git clone https://github.com/taskrabbit/elasticsearch-dump.git | |
| cd elasticsearch-dump/ | |
| curl -XGET http://localhost:9200/_all/_settings,_mapping > indexes.json | |
| sudo docker build -t elasticdump . | |
| time sudo docker run --rm -ti -v ~/dumps/:/data elasticdump --all=true --type=data --input=http://<ip>:9200/ --output=/data/data-backup.json | |
| # Backup and index to a gzip using stdout: |
| # https://github.com/taskrabbit/elasticsearch-dump | |
| # Install npm in ubuntu: | |
| curl -sL https://deb.nodesource.com/setup_5.x | sudo -E bash - | |
| sudo apt-get install -y nodejs | |
| # Optional | |
| sudo apt-get install -y build-essential | |
| # Install global elasticdump |
| function detectChromeExtension(extensionId, accesibleResource, callback) { | |
| if (typeof(chrome) !== 'undefined') { | |
| var xmlHttp = new XMLHttpRequest(), | |
| testUrl = 'chrome-extension://' + extensionId + '/' + accesibleResource; | |
| xmlHttp.open('HEAD', testUrl, true); | |
| xmlHttp.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); | |
| xmlHttp.timeout = 1000; | |
| xmlHttp.onreadystatechange = function () { | |
| if (xmlHttp.readyState == 4 && typeof(callback) == 'function') { |
| with open("filename") as f: | |
| newfile = f.read() |