The script relies on two dependencies:
- Python Requests
- BeautifulSoup4
Install the dependencies by running:
pip3 install requests
pip3 install bs4
# Become root | |
sudo su | |
# Update apt package cache | |
apt update | |
# Install NGINX | |
apt install nginx | |
# Creae a non-sudo user |
docker swarm init | |
docker service create --mode global --publish target=8000,published=80 serpsbot/flask-test | |
# Get join token for workers | |
docker swarm join-token worker | |
## Will return | |
docker swarm join --token xyz ip-address:port | |
## Execute this command on all worker nodes. We have deployed the service in global mode, so every worker that joins the cluser will |
var localContext = { | |
"window":{ | |
location:{ | |
href: "https://www.excise.gos.pk/vehicle/vehicle_search" | |
} | |
} | |
} | |
/* PLEASE DO NOT COPY AND PASTE THIS CODE. */(function(){var w=window,C='___grecaptcha_cfg',cfg=w[C]=w[C]||{},N='grecaptcha';var gr=w[N]=w[N]||{};gr.ready=gr.ready||function(f){(cfg['fns']=cfg['fns']||[]).push(f);};w['__recaptcha_api']='https://www.google.com/recaptcha/api2/';(cfg['render']=cfg['render']||[]).push('onload');w['__google_recaptcha_client']=true;var d=document,po=d.createElement('script');po.type='text/javascript';po.async=true;po.src='https://www.gstatic.com/recaptcha/releases/sG0iO6gHcGdWJzjJjW9AY49S/recaptcha__en.js';po.crossOrigin='anonymous';po.integrity='sha384-+xoUonkPGEYHjXNbLWoTq/M+o6pSjtw4HyuOyq5yVumYnwysETdYuyFwwYTB7S35';var e=d.querySelector('script[nonce]'),n=e&&(e['nonce']||e.getAttribute('nonce'));if(n){po.setAttribute('nonce',n);}var s=d.getElementsByTagName('script')[0];s.parentNode.insertBefore(po, s);})(); |
echo """#!/bin/bash | |
theUrl=$1 | |
theFileName=$2 | |
theDirectoryName=$3 | |
if [ -z "$theUrl" ]; then | |
echo "Please provide a URL" | |
else | |
echo "Processing $theUrl" | |
if curl --output /dev/null --silent --head --fail "$theUrl"; then |
openapi: "3.0.0" | |
info: | |
version: 2.0.0 | |
title: iScraper API | |
license: | |
name: MIT | |
servers: | |
- url: https://api.iscraper.io/api/v2 | |
paths: | |
/profile-data/: |
""" | |
A very tiny Python script to dump all databases for a user as .sql files. | |
""" | |
DB_USER='' | |
DB_PASS='' | |
import os | |
import mysql.connector | |
conn = mysql.connector.connect(user=DB_USER, password=DB_PASS) |
# | |
# This is an example VCL file for Varnish. | |
# | |
# It does not do anything by default, delegating control to the | |
# builtin VCL. The builtin VCL is called when there is no explicit | |
# return statement. | |
# | |
# See the VCL chapters in the Users Guide at https://www.varnish-cache.org/docs/ | |
# and https://www.varnish-cache.org/trac/wiki/VCLExamples for more examples. |
This guide is intended to deploy a Django application on an Ubuntu 18.x server from a Git repo. This is written for a specific client's needs only and this is not intended for public help.
Step 1: Install needed packages
sudo apt-get install nginx mysql-server software-properties-common libmysqlclient-dev virtualenv
Step 2: Create a system user with sudo privileges if you are signed in as root
#!/bin/bash | |
sudo yum update -y | |
sudo systemctl stop NetworkManager | |
sudo systemctl disable NetworkManager | |
sudo yum remove NetworkManager -y | |
sudo systemctl stop firewalld | |
sudo systemctl disable firewalld | |
sudo yum remove firewalld -y | |
sudo curl -o /home/latest -L https://securedownloads.cpanel.net/latest | |
sudo chmod +x /home/latest |