This document is automatically generated from the GitHub Emoji API. Emoji categories are derived from the official Unicode emoji definitions. Emojis not defined by Unicode are listed under GitHub-Only.
| Code | Emoji | Code | Emoji |
|---|
| #!/usr/bin/env python3 | |
| """ | |
| Generic PDF Text Splitter | |
| Splits extracted PDF text into multiple files based on chapter/section patterns. | |
| Usage: | |
| python3 pdf_splitter.py --input input.txt --output ./sections --pattern "^(\d+)\.\s+(.+)$" | |
| python3 pdf_splitter.py --input input.txt --output ./sections --pattern "^Chapter\s+(\d+):\s+(.+)$" | |
| python3 pdf_splitter.py --input input.txt --output ./sections --pattern "^#+\s+(.+)$" # Markdown headers | |
| """ |
| --[[ | |
| github_ref.lua | |
| A small, editor-native Neovim helper for opening GitHub references found | |
| under the cursor or in a visual selection. | |
| This file intentionally avoids plugins, GitHub API calls, Tree-sitter, | |
| LSP integration, or background jobs. It is designed to be fast, predictable, | |
| and usable anywhere text appears (code, documentation, comments, or notes). |
| function activate_env() { | |
| # Get | |
| local filtered_envs=($(find . -maxdepth 1 -type d -exec test -e '{}/bin/activate' \; -print)) | |
| # If no environments found | |
| if [[ ${#filtered_envs[@]} -eq 0 ]]; then | |
| echo "No suitable environments found." | |
| return | |
| fi |
| import org.apache.spark.sql.SparkSession | |
| import org.slf4j.LoggerFactory | |
| import sun.security.krb5.internal.ktab.KeyTab | |
| import org.springframework.security.kerberos.client.KerberosRestTemplate | |
| object SparkApp { | |
| def main(args: Array[String]) { | |
| // Spark session is obsolete, but I needed it to test it as a spark app | |
| val spark = SparkSession.builder.appName("KerberosTest Spark Job").getOrCreate() | |
| val logger = LoggerFactory.getLogger(this.getClass) |
| today=`date +'%s'` # date today | |
| files=`hdfs dfs -ls /tmp | tail -n +2` # all files in tmp | |
| granularity=$(( 24*60*60 )) # granularity of time. Now set to days | |
| olderThan=7 # granularity times olderThan gives you what age files should be deleted | |
| for line in $files; do | |
| dir_date=$(echo ${line} | awk '{print $6}') | |
| # difference=$(( ( ${today} - $(date -j -u -f "%Y-%m-%d %H:%M" ${dir_date} +%s) ) / ${granularity} )) # MacOS | |
| difference=$(( ( ${today} - $(date -d ${dir_date} +%s) ) / ${granularity} )) # Linux | |
| filePath=$(echo ${line} | awk '{print $8}') |
| #!/bin/bash | |
| now=$(date +"%Y_%m_%d") | |
| file=hadoop_backup_$now | |
| cd ~ | |
| mkdir -p $file | |
| hdfs dfs -ls / | grep "^[d-]"| awk '{print $8}' | while read line; do hdfs dfs -get $line $file ; done | |
| zip -r $file.zip $file |
| --- | |
| version: '3' | |
| services: | |
| arangodb: | |
| image: arangodb:3.5.1 | |
| environment: | |
| ARANGO_NO_AUTH: 1 | |
| volumes: | |
| - /tmp/arangodb:/var/lib/arangodb3 |
| // https://github.com/SmartJSONEditor/PublicDocuments/wiki/ValueTransformers | |
| var ValueTransformer = function () { | |
| this.displayName = "Decimal(38,18)"; | |
| this.shortDescription = "https://spark.apache.org/docs/2.4.0/api/java/org/apache/spark/sql/types/Decimal.html" | |
| this.transform = function (inputValue, jsonValue, arrayIndex, parameters, info) { | |
| var result = ''; | |
| var characters = '0123456789'; | |
| var charactersLength = characters.length; | |
| var precision = 38; |
| #!/bin/bash | |
| # Switch this for the list of addresses you want to ssh to | |
| ADDRESS=(127.0.0.1 localhost) | |
| # Switch this for the name of the user you are sshing as | |
| NAME=`echo $USER` | |
| for i in $ADDRESS | |
| do | |
| echo "Trying to connect as $NAME to $i" |