- Configurações de URL hardcoded no banco de dados
- Permalinks não configurados corretamente
- Cache persistente (tanto no WordPress quanto no servidor)
- HTACCESS desatualizado
- Tabela de GUIDs inconsistente
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
WITH daily_sales AS ( | |
SELECT | |
p.producer_id, | |
EXTRACT(DOW FROM s.purchase_date::DATE) AS day_of_week, | |
EXTRACT(YEAR FROM s.purchase_date::DATE) AS year, | |
COUNT(s.purchase_id) AS total_sales | |
FROM | |
"case".sales s | |
JOIN "case".products p ON | |
p.product_id = s.product_id |
Clearing all cache from a Windows system can help improve performance and free up disk space. Here are the steps to clear various types of cache on Windows:
- Press
Windows + R
to open the Run dialog box. - Type
temp
and press Enter. This will open the Temp folder. - Select all files and folders (
Ctrl + A
) and pressDelete
. You may need administrator permissions for some files. - Repeat the process with
%temp%
in the Run dialog box.
- Press
Windows + R
to open the Run dialog box.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
use std::fs::File; | |
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder; | |
fn main(){ | |
let file_path = "Flights_1m.parquet"; | |
let file = File::open(file_path).unwrap(); | |
// let builder = ParquetRecordBatchReaderBuilder::try_new(file).unwrap(); | |
let builder = ParquetRecordBatchReaderBuilder::try_new(file).unwrap(); | |
println!("Converted arrow schema is: {}", builder.schema()); | |
let mut reader = builder.build().unwrap(); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
WITH RECURSIVE | |
cpf(digits, len) AS ( | |
SELECT ARRAY[CAST(FLOOR(RANDOM() * 10) AS INTEGER)], 1 | |
UNION ALL | |
SELECT digits || CAST(FLOOR(RANDOM() * 10) AS INTEGER), len + 1 | |
FROM cpf | |
WHERE len < 9 | |
), | |
cpf_with_first_verifier(digits, len) AS ( | |
SELECT digits || (CASE WHEN 11 - MOD(SUM((10 - i + 1) * v), 11) >= 10 THEN 0 ELSE 11 - MOD(SUM((10 - i + 1) * v), 11) END), len + 1 |
O erro "TypeError: cannot pickle 'SSLContext' object" ocorre quando você está tentando serializar um objeto SSLContext
, que não é serializável por padrão no PySpark.
Para contornar esse erro, você pode definir uma função personalizada para converter cada linha em JSON usando a biblioteca json
do Python. Aqui está um exemplo de como fazer isso:
from pyspark.sql import SparkSession
import json
# Função para converter uma linha em JSON
def row_to_json(row):
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from pyspark.sql import SparkSession | |
import boto3 | |
# Configurar a sessão Spark | |
spark = SparkSession.builder.getOrCreate() | |
# Criar DataFrame com dados de exemplo | |
data = [("mensagem1",), ("mensagem2",), ("mensagem3",)] | |
df = spark.createDataFrame(data, ["message"]) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from pyspark.sql import SparkSession | |
def init_spark(): | |
spark = SparkSession.builder.config("spark.jars", "/postgresql-42.5.4.jar")\ | |
.master("local").appName("PySpark_Postgres_test").getOrCreate() | |
sc = spark.sparkContext | |
return spark,sc | |
def main(): |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-- create delta table on athena | |
CREATE EXTERNAL TABLE cliente | |
LOCATION 's3://bucket_name/cliente/' | |
TBLPROPERTIES ('table_type' = 'DELTA') |
NewerOlder