Skip to content

Instantly share code, notes, and snippets.

unc TestTransferTransaction(t *testing.T) {
store := NewStore(testDB)
sourceAccount := CreateRandomAccount(t)
targetAccount := CreateRandomAccount(t)
fmt.Println(">> before:", sourceAccount.Balance, targetAccount.Balance)
n := 5
amount := int64(10)
func (transaction *DBTransaction) transferTx(ctx context.Context, arg AccountTransferTxParams) (TransferTxResult, error) {
var result TransferTxResult
err := transaction.executeTx(ctx, func(queries *Queries) error {
var err error
result.Transfer, err = queries.CreateTransfer(ctx, CreateTransferParams{
SourceAccountID: arg.SourceAccountID,
TargetAccountID: arg.TargetAccountID,
Amount: arg.Amount,
})
if err != nil {
func (transaction *DBTransaction) executeTx(ctx context.Context, fn func(*Queries) error) error {
tx, err := transaction.db.BeginTx(ctx, nil)
if err != nil {
return err
}
query := New(tx)
err = fn(query)
if err != nil {
if rbkerr := tx.Rollback(); rbkerr != nil {
input {
kafka{
codec => json
bootstrap_servers => "localhost:9092"
topics => ["elastic-test"]
}
}
filter {
mutate {
import { Kafka, logLevel } from "kafkajs";
import { IEvent, IUser } from "./model";
import { v4 as uuidv4 } from "uuid";
const kafka = new Kafka({
clientId: "random-producer",
brokers: ["localhost:9092"],
connectionTimeout: 3000,
});
var randomstring = require("randomstring");
Using JAVA_HOME defined java: /Library/Java/JavaVirtualMachines/jdk-11.0.9.jdk/Contents/Home
WARNING, using JAVA_HOME while Logstash distribution comes with a bundled JDK
Sending Logstash logs to /Users/ereshgorantla/Documents/Dev/logstash-7.13.0/logs which is now configured via log4j2.properties
[2021-06-21T18:03:54,197][INFO ][logstash.runner ] Log4j configuration path used is: /Users/ereshgorantla/Documents/Dev/logstash-7.13.0/config/log4j2.properties
[2021-06-21T18:03:54,205][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.13.0", "jruby.version"=>"jruby 9.2.16.0 (2.5.7) 2021-03-03 f82228dc32 Java HotSpot(TM) 64-Bit Server VM 11.0.9+7-LTS on 11.0.9+7-LTS +indy +jit [darwin-x86_64]"}
[2021-06-21T18:03:54,262][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2021-06-21T18:03:54,726][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2021-06-21T18:03:5
input {
jdbc {
jdbc_driver_library => "/Users/ereshgorantla/Documents/Dev/postgresql-42.2.19.jar"
jdbc_driver_class => "org.postgresql.Driver"
jdbc_connection_string => "jdbc:postgresql://localhost:5434/address_service"
jdbc_user => postgres
jdbc_password => postgres
jdbc_paging_enabled => true
#schedule => "* * * * * *"
statement => "select user_id as userId, first_name as firstName, last_name as lastName, date_of_birth as dateOfBirth, city as city, country as country, st_asgeojson(location) as location, created_at as createdAt, updated_at as updatedAt, additional_data::text as additionalData from users"
PUT users_test
{
"mappings" : {
"properties" : {
"userId" : {
"type" : "text",
"fields" : {
"keyword" : {
"type" : "keyword",
"ignore_above" : 256
version: '2.2'
services:
es01:
image: docker.elastic.co/elasticsearch/elasticsearch:7.12.1
container_name: es01
environment:
- node.name=es01
- cluster.name=es-docker-cluster
- discovery.seed_hosts=es02,es03
- cluster.initial_master_nodes=es01,es02,es03
QUERY PLAN |
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
GroupAggregate (cost=51300.47..51488.39 rows=8352 width=40) (actual time=6.380..7.642 rows=12 loops=1) |
Group Key: (date_trunc('hour'::text, created_at)) |
-> Sort (cost=51300.47..51321.35 rows=8352 width=12) (actual time=6.255..6.717 rows=8640 loops=1) |
Sort Key: (date_trunc('hour'::text, created_at))