Skip to content

Instantly share code, notes, and snippets.

@dunithd
dunithd / customer_feedback.csv
Created September 29, 2025 12:01
A CSV file containing a list of customer feedback.
We can make this file beautiful and searchable if this error is corrected: Unclosed quoted field in line 8.
"customer_id","channel","feedback_text","product_id","timestamp"
"742","mobile_app","I wish ACME had better investment tools like other banks offer. Your competitors provide more options.","ACME Savings","2023-05-12 09:23:45"
"158","survey","The loan approval process takes way too long compared to XYZ Bank. I've been waiting 3 weeks!","ACME Loan","2023-06-18 14:12:33"
"315","branch_visit","I'm interested in premium banking services. What benefits do you offer beyond standard accounts?","ACME Card","2023-07-02 10:45:21"
"889","mobile_app","Mobile app crashes every time I try to check my balance. This has been happening for a week.","ACME Savings","2023-05-29 17:30:15"
"426","email","Would love an automatic savings feature that rounds up purchases and saves the difference","ACME Savings","2023-04-15 11:20:09"
"203","branch_visit","As a premium customer, I'm disappointed with the long wait times at your downtown branch","ACME Premium","2023-07-22 15:45:30"
"567","survey","How does your mortgage rate compare to B
from pinotdb import connect
import pandas as pd
import streamlit as st
import numpy as np
import altair as alt
import plotly.express as px
st.title('SpaceX Launch Statistics')
st.markdown("Perform exploratory data analysis on SpaceX launch data set with Apache Pinot")
import psycopg2
import pandas.io.sql as sqlio
import pandas as pd
import dash
from dash import dcc
from dash import html
import plotly.express as px
app = dash.Dash(__name__)
asyncapi: '2.0.0'
info:
title: Metrics Service
version: 1.0.0
description: This service is in charge of providing health data for a computer cluster
servers:
production:
url: 'ws://localhost:8080'
protocol: ws
channels:
curl -H 'Content-Type: application/json' debezium:8083/connectors --data '
{
"name": "shipments-connector",
"config": {
"connector.class": "io.debezium.connector.postgresql.PostgresConnector",
"plugin.name": "pgoutput",
"database.hostname": "postgres",
"database.port": "5432",
"database.user": "postgresuser",
"database.password": "postgrespw",
version: "3.7"
services:
postgres:
image: debezium/postgres:13
ports:
- 5432:5432
healthcheck:
test: "pg_isready -U postgresuser -d shipment_db"
interval: 2s
timeout: 20s
{
"tableName": "steps",
"tableType": "REALTIME",
"segmentsConfig": {
"timeColumnName": "loggedAt",
"timeType": "MILLISECONDS",
"schemaName": "steps",
"replicasPerPartition": "1"
},
"tenants": {},
{
"schemaName": "steps",
"dimensionFieldSpecs": [
{
"name": "userId",
"dataType": "INT"
},
{
"name": "userName",
"dataType": "STRING"
package com.edu.samples;
import com.edu.samples.messagelog.MessageLog;
import com.edu.samples.serde.OrderEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import javax.transaction.Transactional;
package com.edu.samples.messagelog;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.time.Instant;
@Entity
@Table(name = "consumed_messages")
public class ConsumedMessage {