###修改 hostname
##修改 master 的 hostname
# vim /etc/hostname
master| #!/bin/bash | |
| if [[ "$(sw_vers -productVersion)" != 10\.15* ]]; then | |
| echo "This is only meant to run on macOS 10.15.* Catalina" >&2 | |
| exit 1 | |
| fi | |
| reply= | |
| printf "Are you pretty damn sure you want to run this? (Yes/No) " | |
| read -r reply | |
| [[ $reply != Yes ]] && exit 1 |
| #!/bin/bash | |
| # IMPORTANT: Don't forget to logout from your Apple ID in the settings before running it! | |
| # IMPORTANT: You will need to run this script from Recovery. In fact, macOS Catalina brings read-only filesystem which prevent this script from working from the main OS. | |
| # This script needs to be run from the volume you wish to use. | |
| # E.g. run it like this: cd /Volumes/Macintosh\ HD && sh /Volumes/Macintosh\ HD/Users/sabri/Desktop/disable.sh | |
| # WARNING: It might disable things that you may not like. Please double check the services in the TODISABLE vars. | |
| # Get active services: launchctl list | grep -v "\-\t0" | |
| # Find a service: grep -lR [service] /System/Library/Launch* /Library/Launch* ~/Library/LaunchAgents |
| const waitFor = (ms) => new Promise(r => setTimeout(r, ms)) | |
| const asyncForEach = async (array, callback) => { | |
| for (let index = 0; index < array.length; index++) { | |
| await callback(array[index], index, array) | |
| } | |
| } | |
| const start = async () => { | |
| await asyncForEach([1, 2, 3], async (num) => { | |
| await waitFor(50) |
| # List all tables: | |
| select db_id, id, name, sum(rows) as mysum | |
| from stv_tbl_perm where db_id = 100546 | |
| group by db_id, id, name order by mysum desc; | |
| # list all running processes: | |
| select pid, query from stv_recents where status = 'Running'; | |
| # describe table | |
| select * from PG_TABLE_DEF where tablename='audit_trail'; |
| import boto3 | |
| from io import StringIO | |
| def _write_dataframe_to_csv_on_s3(dataframe, filename): | |
| """ Write a dataframe to a CSV on S3 """ | |
| print("Writing {} records to {}".format(len(dataframe), filename)) | |
| # Create buffer | |
| csv_buffer = StringIO() | |
| # Write dataframe to buffer | |
| dataframe.to_csv(csv_buffer, sep="|", index=False) |
| #!/bin/sh | |
| # script to convert mysql schema to be compatible with data warehouse software | |
| # make sure that s3cmd and maatkit utility is installed | |
| db_name=${1:-'test'} | |
| > /root/$db_name.txt | |
| temppath='/mnt/data/pdump1' | |
| host='localhost' | |
| user='maatkit' |
| from pyramid.config import Configurator | |
| from zope.interface.registry import Components | |
| import simplejson as json | |
| import datetime | |
| import decimal | |
| from .. import lib | |
| //Add some contstraints for good measture, constraints must be ran individually | |
| CREATE CONSTRAINT ON (c:Customer) ASSERT c.email IS UNIQUE; | |
| CREATE CONSTRAINT ON (p:Product) ASSERT p.uuid IS UNIQUE; | |
| //:Start Product and customer query: | |
| //Products, bundles and categories | |
| CREATE (product1:Product {name: "Product 1", uuid: "d8d177cc-1542-11e5-b60b-1697f925ec7b", price: 10}) | |
| CREATE (product2:Product {name: "Product 2", uuid: "d8d17b28-1542-11e5-b60b-1697f925ec7b", price: 20}) | |
| CREATE (product3:Product {name: "Product 3", uuid: "d8d17c72-1542-11e5-b60b-1697f925ec7b", price: 30}) |
使用 apt-mirror 来同步 hortonworks 镜像服务器上的 Ambari, HDP, HDP-UTILS 仓库, 方便本地搭建 Hadoop 集群. 适合于 Ubuntu/Debian 分支的操作系统.