Este mini-guia compreende as ferramentas necessárias para executar contratos com o Mist e desenvolver Dapps utilizando uma rede local privada.
- Instalar Mist
| [core] | |
| # The home folder for airflow, default is ~/airflow | |
| airflow_home = /Users/p1nox/airflow | |
| # The folder where your airflow pipelines live, most likely a | |
| # subfolder in a code repository | |
| dags_folder = /Users/p1nox/airflow/dags | |
| # The folder where airflow should store its log files. This location | |
| base_log_folder = /Users/p1nox/airflow/logs |
| import java.io.InputStream | |
| import org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils | |
| import org.apache.spark.sql.{ DataFrame, Row } | |
| import org.postgresql.copy.CopyManager | |
| import org.postgresql.core.BaseConnection | |
| val jdbcUrl = s"jdbc:postgresql://..." // db credentials elided | |
| val connectionProperties = { |
| def knapsack_aux(x: (Int, Int), is: List[Int]): List[Int] = { | |
| for { | |
| w <- is.zip(is.take(x._1) ::: is.take(is.size - x._1).map(_ + x._2)) | |
| } yield math.max(w._1, w._2) | |
| } | |
| def knapsack_rec(xs: List[(Int, Int)], is: List[Int]): List[List[Int]] = { | |
| xs match { | |
| case x :: xs => knapsack_aux(x, is) :: knapsack_rec(xs, knapsack_aux(x, is)) | |
| case _ => Nil |
| package io.forward.ftp | |
| import java.io.{File, FileOutputStream, InputStream} | |
| import org.apache.commons.net.ftp._ | |
| import scala.util.Try | |
| final class FTP() { | |
| private val client = new FTPClient |
| # -*- coding: utf-8 -*- | |
| from django.db import models | |
| import uuid | |
| ANIMALS = (('DO', 'Dog'), ('CA', 'Cat')) | |
| class Animal(models.Model): | |
| tipo = models.CharField(max_length=2, choices=ANIMALS) |
| while true; do sleep 1; curl http://www.google.com; echo -e '\n\n\n\n'$(date);done |