$ zmv -n '(.)(<->)(.[^.]#)' '$1$(($2+1))$3' # would rename x.0001.y to x.2.y. $ zmv -n '(.0#)(<->)(.[^.]#)' '$1$(($2+1))$3'
$ zmv '*' '${(L)f}'
$ autoload zmv
$ zmv -n '(.)(<->)(.[^.]#)' '$1$(($2+1))$3' # would rename x.0001.y to x.2.y. $ zmv -n '(.0#)(<->)(.[^.]#)' '$1$(($2+1))$3'
$ zmv '*' '${(L)f}'
$ autoload zmv
import cats.data.{ EitherT, State } | |
import cats.implicits._ | |
import cats.{ Monad, ~> } | |
import io.aecor.liberator.macros.free | |
import io.aecor.liberator.syntax._ | |
import io.aecor.liberator.{ ProductKK, Term } | |
@free | |
trait Api[F[_]] { | |
def doThing(aThing: String, params: Map[String, String]): F[Either[String, String]] |
// originally by @SethTisue, see http://stackoverflow.com/questions/40622878/how-do-i-tell-sbt-to-use-a-nightly-build-of-scala-2-11-or-2-12/40622879#40622879 | |
resolvers += "nightlies" at "https://scala-ci.typesafe.com/artifactory/scala-release-temp/" | |
scalaVersion := { | |
val propsUrl = new URL("https://scala-ci.typesafe.com/job/scala-2.12.x-integrate-bootstrap/lastSuccessfulBuild/artifact/jenkins.properties/*view*/") | |
val props = new java.util.Properties | |
props.load(propsUrl.openStream) | |
props.getProperty("version") | |
} | |
scalaBinaryVersion := "2.12" |
// These lines go in ~/.sbt/0.13/global.sbt | |
watchSources ++= ( | |
(baseDirectory.value * "*.sbt").get | |
++ (baseDirectory.value / "project" * "*.scala").get | |
++ (baseDirectory.value / "project" * "*.sbt").get | |
) | |
addCommandAlias("rtu", "; reload ; test:update") | |
addCommandAlias("rtc", "; reload ; test:compile") | |
addCommandAlias("ru", "; reload ; update") |
(defconst doom-fringe-size '3 "Default fringe width") | |
;;; Setting up the fringe | |
;; switches order of fringe and margin | |
(setq-default fringes-outside-margins t) | |
;; standardize fringe width | |
(fringe-mode doom-fringe-size) | |
(push `(left-fringe . ,doom-fringe-size) default-frame-alist) | |
(push `(right-fringe . ,doom-fringe-size) default-frame-alist) |
% sbtx dependencyGraph | |
... blah blah ... | |
[info] *** Welcome to the sbt build definition for Scala! *** | |
[info] Check README.md for more information. | |
[error] Not a valid command: dependencyGraph | |
[error] Not a valid project ID: dependencyGraph | |
% sbtx -Dplugins=graph dependencyGraph | |
... blah blah ... |
package com.databricks.spark.jira | |
import scala.io.Source | |
import org.apache.spark.rdd.RDD | |
import org.apache.spark.sql._ | |
import org.apache.spark.sql.functions._ | |
import org.apache.spark.sql.sources.{TableScan, BaseRelation, RelationProvider} |
"""Download Dash User Docsets and install them in Zeal""" | |
import configparser | |
import json | |
import tarfile | |
import urllib.request | |
from pathlib import Path | |
from sys import platform | |
import easygui |
A primer/refresher on the category theory concepts that most commonly crop up in conversations about Scala or FP. (Because it's embarassing when I forget this stuff!)
I'll be assuming Scalaz imports in code samples, and some of the code may be pseudo-Scala.
A functor is something that supports map
.
// This allow to transform single entity (from a given RDD or DStream) in different ways | |
// using HList of mappers and bind them into given output type | |
// potentially useful when doing feature extraction out of single entity / basic etl | |
// Usage: | |
import twitter4j.Status | |
import shapeless._ | |
val user: Status => String = ... // a function that extract author of tweet |