Created
January 30, 2026 04:40
-
-
Save terremoth/2fe325c7e906cf4b456b96b06c712972 to your computer and use it in GitHub Desktop.
Código "vibe-coded" com chatgpt+gemini para "prever" a quantidade de consoles handhelds que vão sair em 2026.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import numpy as np | |
| import pandas as pd | |
| from sklearn.linear_model import LinearRegression | |
| from sklearn.preprocessing import PolynomialFeatures | |
| from statsmodels.tsa.holtwinters import ExponentialSmoothing | |
| from statsmodels.tsa.arima.model import ARIMA | |
| from statsmodels.tsa.statespace.structural import UnobservedComponents | |
| from scipy.optimize import curve_fit | |
| import warnings | |
| warnings.filterwarnings("ignore") | |
| # ========================= | |
| # DATASET | |
| # ========================= | |
| data = { | |
| 2004: 1, 2005: 3, 2006: 1, 2007: 2, 2008: 1, 2009: 5, 2010: 7, | |
| 2011: 3, 2012: 15, 2013: 14, 2014: 5, 2015: 2, 2016: 5, 2017: 8, | |
| 2018: 12, 2019: 33, 2020: 38, 2021: 40, 2022: 36, 2023: 58, 2024: 84, 2025: 91 | |
| } | |
| df = pd.DataFrame(list(data.items()), columns=["Year", "Y"]) | |
| X = df[["Year"]] | |
| y = df["Y"] | |
| TARGET_YEAR = 2026 | |
| def as_int(x): | |
| return int(max(0, round(float(x)))) | |
| print(f"--- REFINANDO PREVISÃO 2026 (Busca por Saturação) ---") | |
| # ========================= | |
| # 1) LOGISTIC GROWTH COM LIMITES (BOUNDS) | |
| # ========================= | |
| # Explicação: A curva S precisa saber que não pode crescer pra sempre. | |
| # Definimos bounds para forçar o parâmetro 'c' (teto) a não ser infinito. | |
| def logistic_model(x, a, b, c): | |
| return c / (1 + np.exp(-(x - b) / a)) | |
| # Bounds: ([min_a, min_b, min_c], [max_a, max_b, max_c]) | |
| # min_c = 84 (não pode ser menor que o atual) | |
| # max_c = 130 (chute conservador: o mercado não vai passar de 130 tão cedo) | |
| try: | |
| p0 = [2, 2022, 100] | |
| bounds = ([0.1, 2000, 84], [10, 2030, 140]) | |
| popt, _ = curve_fit(logistic_model, df["Year"], df["Y"], p0=p0, bounds=bounds, maxfev=10000) | |
| pred_logistic = logistic_model(TARGET_YEAR, *popt) | |
| except: | |
| pred_logistic = df["Y"].iloc[-1] | |
| print(f"Logistic (Bounded): {as_int(pred_logistic)}") | |
| # ========================= | |
| # 2) HOLT-WINTERS (Damped Strict) | |
| # ========================= | |
| try: | |
| # 'damping_slope' força o modelo a desacelerar a tendência futura | |
| model_hw = ExponentialSmoothing( | |
| df["Y"], | |
| trend="add", | |
| damped_trend=True, | |
| seasonal=None | |
| ).fit(damping_slope=0.9) # 0.9 significa "freia 10% a cada passo" | |
| pred_hw = model_hw.forecast(1).iloc[0] | |
| except: | |
| pred_hw = df["Y"].iloc[-1] | |
| print(f"Holt-Winters (Damped): {as_int(pred_hw)}") | |
| # ========================= | |
| # 3) POLINOMIAL (O "Pé no chão") | |
| # ========================= | |
| # Grau 2 tende a ser mais conservador que grau 3 em séries curtas | |
| poly = PolynomialFeatures(degree=2) | |
| X_poly = poly.fit_transform(X) | |
| model_poly = LinearRegression().fit(X_poly, y) | |
| pred_poly = model_poly.predict(poly.transform([[TARGET_YEAR]]))[0] | |
| print(f"Polynomial (Deg 2): {as_int(pred_poly)}") | |
| # ========================= | |
| # 4) KALMAN (Trend suavizada) | |
| # ========================= | |
| model_kalman = UnobservedComponents(df["Y"], level="local linear trend").fit(disp=False) | |
| pred_kalman = model_kalman.forecast(1).iloc[0] | |
| print(f"Kalman: {as_int(pred_kalman)}") | |
| # ========================= | |
| # AGREGAÇÃO INTELIGENTE (Smart Ensemble) | |
| # ========================= | |
| # A estratégia aqui é: O Poly (76) viu a história toda. O HW (variavel) vê o momento. | |
| # O Logistic Bounded vê o teto físico. | |
| preds = { | |
| "logistic": pred_logistic, | |
| "hw": pred_hw, | |
| "poly": pred_poly, | |
| "kalman": pred_kalman | |
| } | |
| # 1. Identificar se temos modelos "otimistas" (acima do ultimo valor) e "pessimistas" | |
| last_val = df["Y"].iloc[-1] | |
| optimists = [v for v in preds.values() if v > last_val] | |
| pessimists = [v for v in preds.values() if v <= last_val] # improvável aqui, mas possível | |
| # Se todos forem otimistas, pegamos a média ponderada focando no Logistic (que tem limites físicos) | |
| if len(optimists) == len(preds): | |
| # Peso maior para Logistic (que tem bounds) e Poly (que tem curva suave) | |
| # Peso menor para HW e Kalman (que tendem a projetar linha reta da ultima subida) | |
| w_logistic = 0.4 | |
| w_poly = 0.3 | |
| w_hw = 0.15 | |
| w_kalman = 0.15 | |
| final_pred = (pred_logistic * w_logistic) + \ | |
| (pred_poly * w_poly) + \ | |
| (pred_hw * w_hw) + \ | |
| (pred_kalman * w_kalman) | |
| else: | |
| final_pred = np.median(list(preds.values())) | |
| print(f"\n>>> PREVISÃO FINAL AJUSTADA: {as_int(final_pred)}") |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| numpy | |
| pandas | |
| scikit-learn | |
| statsmodels | |
| ruptures | |
| pykalman | |
| pymc | |
| xgboost | |
| torch |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment