git clone git@github.com:YOUR-USERNAME/YOUR-FORKED-REPO.git
cd into/cloned/fork-repo
git remote add upstream git://github.com/ORIGINAL-DEV-USERNAME/REPO-YOU-FORKED-FROM.git
git fetch upstream
| 1. Convert our ".jks" file to ".p12" (PKCS12 key store format): | |
| keytool -importkeystore -srckeystore oldkeystore.jks -destkeystore newkeystore.p12 -deststoretype PKCS12 | |
| 1.1. List new keystore file contents: | |
| keytool -deststoretype PKCS12 -keystore newkeystore.p12 -list | |
| 2. Extract pem (certificate) from ".p12" keysotre file: | |
| # to generate your dhparam.pem file, run in the terminal | |
| openssl dhparam -out /etc/nginx/ssl/dhparam.pem 2048 |
| var | |
| // Local ip address that we're trying to calculate | |
| address | |
| // Provides a few basic operating-system related utility functions (built-in) | |
| ,os = require('os') | |
| // Network interfaces | |
| ,ifaces = os.networkInterfaces(); | |
| // Iterate over interfaces ... |
| import org.apache.spark.mllib.linalg.distributed.RowMatrix | |
| import org.apache.spark.mllib.linalg._ | |
| import org.apache.spark.{SparkConf, SparkContext} | |
| // To use the latest sparse SVD implementation, please build your spark-assembly after this | |
| // change: https://github.com/apache/spark/pull/1378 | |
| // Input tsv with 3 fields: rowIndex(Long), columnIndex(Long), weight(Double), indices start with 0 | |
| // Assume the number of rows is larger than the number of columns, and the number of columns is | |
| // smaller than Int.MaxValue |
| rabbitmqctl add_user test test | |
| rabbitmqctl set_user_tags test administrator | |
| rabbitmqctl set_permissions -p / test ".*" ".*" ".*" |
^([0-9]+)\.([0-9]+)\.([0-9]+)(?:-([0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*))?(?:\+[0-9A-Za-z-]+)?$
| """ | |
| Copies all keys from the source Redis host to the destination Redis host. | |
| Useful to migrate Redis instances where commands like SLAVEOF and MIGRATE are | |
| restricted (e.g. on Amazon ElastiCache). | |
| The script scans through the keyspace of the given database number and uses | |
| a pipeline of DUMP and RESTORE commands to migrate the keys. | |
| Requires Redis 2.8.0 or higher. |
| import * as aws from "@pulumi/aws"; | |
| import * as awsx from "@pulumi/awsx"; | |
| import * as eks from "@pulumi/eks"; | |
| import * as k8s from "@pulumi/kubernetes"; | |
| /* | |
| * 1) Single step deployment three IAM Roles | |
| */ | |
| // Administrator AWS IAM clusterAdminRole with full access to all AWS resources |
| import ec2 = require('@aws-cdk/aws-ec2'); | |
| import ecs = require('@aws-cdk/aws-ecs'); | |
| import elbv2 = require('@aws-cdk/aws-elasticloadbalancingv2'); | |
| import cdk = require('@aws-cdk/core'); | |
| class PublicFargateService extends cdk.Stack { | |
| constructor(scope: cdk.App, id: string) { | |
| super(scope, id); | |
| // Create VPC and Fargate Cluster |