Skip to content

Instantly share code, notes, and snippets.

@rth
Last active May 10, 2023 07:24
Show Gist options
  • Save rth/c791274f9768081a53f27b9114a7115d to your computer and use it in GitHub Desktop.
Save rth/c791274f9768081a53f27b9114a7115d to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Influence of dropout on text classification with MLPClassifier (FastText)\n",
"\n",
"Adapted from @ogrisel's [FastText notebook](https://github.com/ogrisel/notebooks/blob/master/sklearn_demos/fastText.ipynb) for scikit-learn"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Simple example"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import itertools\n",
"\n",
"import pandas as pd\n",
"from IPython.display import display\n",
"\n",
"\n",
"from sklearn.datasets import fetch_20newsgroups\n",
"from sklearn.feature_extraction.text import HashingVectorizer\n",
"\n",
"# use just 2 groups / 20 for faster computations\n",
"categories = ['alt.atheism', 'comp.graphics']#, 'sci.med', 'sci.space']\n",
"\n",
"twentyng_train = fetch_20newsgroups(subset='train', categories=categories)\n",
"docs_train, target_train = twentyng_train.data, twentyng_train.target\n",
"\n",
"twentyng_test = fetch_20newsgroups(subset='test', categories=categories)\n",
"docs_test, target_test = twentyng_test.data, twentyng_test.target"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/home/rth/.miniconda/envs/sklearn-test/lib/python3.5/site-packages/sklearn/neural_network/multilayer_perceptron.py:629: ConvergenceWarning: Stochastic Optimizer: Maximum iterations reached and the optimization hasn't converged yet.\n",
" % (), ConvergenceWarning)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"== MLP Classification scores ==\n",
" * dropout: None => score: 0.97\n",
"== MLP Classification scores ==\n",
" * dropout: (0.3, 0.3) => score: 0.96\n",
"== MLP Classification scores ==\n",
" * dropout: (0.5, 0.5) => score: 0.96\n",
"== MLP Classification scores ==\n",
" * dropout: (0.7, 0.7) => score: 0.96\n"
]
}
],
"source": [
"from sklearn.neural_network import MLPClassifier\n",
"import numpy as np\n",
"vec = HashingVectorizer(encoding='latin-1', binary=True, ngram_range=(1, 2),\n",
" norm='l1', n_features=2 ** 18)\n",
"\n",
"X_train = vec.transform(docs_train)\n",
"X_test = vec.transform(docs_test)\n",
"\n",
"hidden_layer_sizes=(10,)\n",
"\n",
"for dropout_val in [None, 0.3, 0.5, 0.7]:\n",
" if dropout_val is not None:\n",
" dropout = tuple((len(hidden_layer_sizes)+1)*[dropout_val])\n",
" else:\n",
" dropout = dropout_val\n",
" cmod = MLPClassifier(solver='adam', hidden_layer_sizes=hidden_layer_sizes,\n",
" max_iter=200, verbose=0, activation='tanh',\n",
" dropout=dropout)\n",
" \n",
" cmod.fit(X_train, target_train)\n",
" \n",
" score = cmod.score(X_test, target_test)\n",
" print('== MLP Classification scores ==')\n",
" print(' * dropout: {} => score: {:.2f}'.format(dropout, score))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"anaconda-cloud": {},
"kernelspec": {
"display_name": "Python [default]",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
@Solenyalyl
Copy link

have you added dropout layer in MLPClassifier?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment