update blog post content
continuous-integration/drone/push Build is passing
Details
continuous-integration/drone/push Build is passing
Details
This commit is contained in:
parent
4db50a370a
commit
797ab81b4a
|
@ -35,6 +35,8 @@ tags:
|
|||
- [Defining the Interface with ELI5](#defining-the-interface-with-eli5)
|
||||
- [Getting an Explanation](#getting-an-explanation-1)
|
||||
- [ELI5 and a Remotely Hosted Model / API](#eli5-and-a-remotely-hosted-model--api)
|
||||
- [Setting up](#setting-up)
|
||||
- [Building a Remote Model Adapter](#building-a-remote-model-adapter)
|
||||
|
||||
|
||||
|
||||
|
@ -355,4 +357,59 @@ Et voila! Hopefully you will get some output that looks like the below:
|
|||
|
||||
{{<figure src="images/explanation_example.png" caption="The output of the explain functon should look something like this">}}
|
||||
|
||||
## ELI5 and a Remotely Hosted Model / API
|
||||
|
||||
## ELI5 and a Remotely Hosted Model / API
|
||||
|
||||
This one is quite fun and exciting. Since LIME is model agnostic, we can get an explanation for a remotely hosted model assuming we have access to
|
||||
the full probability distribution over its labels (and assuming you have enough API credits to train your local model).
|
||||
|
||||
In this example I'm using Huggingface's [inference api](https://api-inference.huggingface.co/docs/python/html/quicktour.html) where they host transformer models on your behalf - you can pay to have your models run on GPUs for higher throughput. I made this guide with the free tier allowance which gives you 30k tokens per month - if you are using LIME with default settings you could easily eat through this whilst generating a single explanation so this is yet again a contrived example that gives you a taster of what is possible.
|
||||
|
||||
### Setting up
|
||||
|
||||
For this part of the tutorial you will need the Python [requests](https://docs.python-requests.org/en/latest/) library and we are also going to make use of [scipy](https://docs.scipy.org). You will also need a huggingface account and you will need to set up your API key as described in the [documentation](https://api-inference.huggingface.co/docs/python/html/quicktour.html).
|
||||
|
||||
### Building a Remote Model Adapter
|
||||
|
||||
Firstly we need to build a model adapter function that allows ELI5 to interface with huggingface's models.
|
||||
|
||||
```python
|
||||
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
MODEL="nlptown/bert-base-multilingual-uncased-sentiment"
|
||||
API_TOKEN="YOUR API KEY HERE"
|
||||
API_URL = f"https://api-inference.huggingface.co/models/{MODEL}"
|
||||
headers = {"Authorization": f"Bearer {API_TOKEN}"}
|
||||
|
||||
def query(payload):
|
||||
data = json.dumps(payload)
|
||||
response = requests.request("POST", API_URL, headers=headers, data=data)
|
||||
return json.loads(response.content.decode("utf-8"))
|
||||
|
||||
def result_to_df(result):
|
||||
rows = []
|
||||
|
||||
for result_row in result:
|
||||
row = {}
|
||||
for lbl_score in result_row:
|
||||
row[lbl_score['label']] = lbl_score['score']
|
||||
|
||||
rows.append(row)
|
||||
|
||||
return pd.DataFrame(rows)
|
||||
|
||||
def remote_model_adapter(texts: List[str]):
|
||||
|
||||
all_scores = []
|
||||
|
||||
for text in texts:
|
||||
|
||||
data = query(text)
|
||||
all_scores.extend(result_to_df(data).values)
|
||||
|
||||
return softmax(np.array(all_scores), axis=1)
|
||||
```
|
||||
|
||||
|
|
|
@ -1182,7 +1182,7 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"\n",
|
||||
"from scipy.special import softmax\n",
|
||||
"import requests\n",
|
||||
"\n",
|
||||
"MODEL=\"nlptown/bert-base-multilingual-uncased-sentiment\"\n",
|
||||
|
@ -1297,35 +1297,440 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 136,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/home/james/miniconda3/envs/pgesg/lib/python3.7/site-packages/sklearn/base.py:213: FutureWarning: From version 0.24, get_params will raise an AttributeError if a parameter cannot be retrieved as an instance attribute. Previously it would return None.\n",
|
||||
" FutureWarning)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"\n",
|
||||
" <style>\n",
|
||||
" table.eli5-weights tr:hover {\n",
|
||||
" filter: brightness(85%);\n",
|
||||
" }\n",
|
||||
"</style>\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <p style=\"margin-bottom: 0.5em; margin-top: 0em\">\n",
|
||||
" <b>\n",
|
||||
" \n",
|
||||
" y=1 star\n",
|
||||
" \n",
|
||||
"</b>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" (probability <b>0.001</b>, score <b>-7.683</b>)\n",
|
||||
"\n",
|
||||
"top features\n",
|
||||
" </p>\n",
|
||||
" \n",
|
||||
" <table class=\"eli5-weights\"\n",
|
||||
" style=\"border-collapse: collapse; border: none; margin-top: 0em; table-layout: auto; margin-bottom: 2em;\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"border: none;\">\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\" title=\"Feature contribution already accounts for the feature value (for linear models, contribution = weight * feature value), and the sum of feature contributions is equal to the score or, for some classifiers, to the probability. Feature values are shown if "show_feature_values" is True.\">\n",
|
||||
" Contribution<sup>?</sup>\n",
|
||||
" </th>\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">Feature</th>\n",
|
||||
" \n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 99.21%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -0.075\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" <BIAS>\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 80.00%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -7.608\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" Highlighted in text (sum)\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" </tbody>\n",
|
||||
" </table>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" <p style=\"margin-bottom: 2.5em; margin-top:-0.5em;\">\n",
|
||||
" <span style=\"background-color: hsl(0, 100.00%, 80.17%); opacity: 0.87\" title=\"-0.933\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 81.50%); opacity: 0.87\" title=\"0.845\">restaurant</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 62.35%); opacity: 0.98\" title=\"-2.333\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 66.81%); opacity: 0.95\" title=\"-1.948\">amazing</span><span style=\"opacity: 0.80\">, </span><span style=\"background-color: hsl(0, 100.00%, 93.07%); opacity: 0.82\" title=\"-0.208\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 79.39%); opacity: 0.88\" title=\"0.987\">quality</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 83.63%); opacity: 0.86\" title=\"-0.710\">of</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 92.60%); opacity: 0.82\" title=\"-0.228\">their</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 75.75%); opacity: 0.90\" title=\"1.244\">food</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 77.53%); opacity: 0.89\" title=\"-1.116\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 63.04%); opacity: 0.98\" title=\"-2.272\">exceptional</span><span style=\"opacity: 0.80\">. </span><span style=\"background-color: hsl(0, 100.00%, 60.00%); opacity: 1.00\" title=\"-2.543\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 69.65%); opacity: 0.93\" title=\"-1.714\">waiters</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 69.66%); opacity: 0.93\" title=\"1.713\">were</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 64.68%); opacity: 0.97\" title=\"2.129\">so</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 85.25%); opacity: 0.85\" title=\"-0.612\">polite</span><span style=\"opacity: 0.80\">.</span>\n",
|
||||
" </p>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <p style=\"margin-bottom: 0.5em; margin-top: 0em\">\n",
|
||||
" <b>\n",
|
||||
" \n",
|
||||
" y=2 stars\n",
|
||||
" \n",
|
||||
"</b>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" (probability <b>0.021</b>, score <b>-3.995</b>)\n",
|
||||
"\n",
|
||||
"top features\n",
|
||||
" </p>\n",
|
||||
" \n",
|
||||
" <table class=\"eli5-weights\"\n",
|
||||
" style=\"border-collapse: collapse; border: none; margin-top: 0em; table-layout: auto; margin-bottom: 2em;\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"border: none;\">\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\" title=\"Feature contribution already accounts for the feature value (for linear models, contribution = weight * feature value), and the sum of feature contributions is equal to the score or, for some classifiers, to the probability. Feature values are shown if "show_feature_values" is True.\">\n",
|
||||
" Contribution<sup>?</sup>\n",
|
||||
" </th>\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">Feature</th>\n",
|
||||
" \n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 97.89%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -0.306\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" <BIAS>\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 87.95%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -3.689\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" Highlighted in text (sum)\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" </tbody>\n",
|
||||
" </table>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" <p style=\"margin-bottom: 2.5em; margin-top:-0.5em;\">\n",
|
||||
" <span style=\"background-color: hsl(120, 100.00%, 77.88%); opacity: 0.89\" title=\"1.091\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 77.55%); opacity: 0.89\" title=\"1.114\">restaurant</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 74.29%); opacity: 0.91\" title=\"-1.353\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 74.88%); opacity: 0.90\" title=\"1.309\">amazing</span><span style=\"opacity: 0.80\">, </span><span style=\"background-color: hsl(120, 100.00%, 76.34%); opacity: 0.89\" title=\"1.201\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 69.90%); opacity: 0.93\" title=\"-1.694\">quality</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 66.08%); opacity: 0.96\" title=\"-2.010\">of</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 76.23%); opacity: 0.90\" title=\"-1.209\">their</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 95.97%); opacity: 0.81\" title=\"-0.096\">food</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 63.19%); opacity: 0.98\" title=\"-2.259\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 86.06%); opacity: 0.84\" title=\"-0.564\">exceptional</span><span style=\"opacity: 0.80\">. </span><span style=\"background-color: hsl(120, 100.00%, 83.97%); opacity: 0.85\" title=\"0.689\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 69.15%); opacity: 0.94\" title=\"1.755\">waiters</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 83.11%); opacity: 0.86\" title=\"0.742\">were</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 87.33%); opacity: 0.84\" title=\"0.492\">so</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 83.30%); opacity: 0.86\" title=\"0.730\">polite</span><span style=\"opacity: 0.80\">.</span>\n",
|
||||
" </p>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <p style=\"margin-bottom: 0.5em; margin-top: 0em\">\n",
|
||||
" <b>\n",
|
||||
" \n",
|
||||
" y=3 stars\n",
|
||||
" \n",
|
||||
"</b>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" (probability <b>0.858</b>, score <b>0.965</b>)\n",
|
||||
"\n",
|
||||
"top features\n",
|
||||
" </p>\n",
|
||||
" \n",
|
||||
" <table class=\"eli5-weights\"\n",
|
||||
" style=\"border-collapse: collapse; border: none; margin-top: 0em; table-layout: auto; margin-bottom: 2em;\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"border: none;\">\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\" title=\"Feature contribution already accounts for the feature value (for linear models, contribution = weight * feature value), and the sum of feature contributions is equal to the score or, for some classifiers, to the probability. Feature values are shown if "show_feature_values" is True.\">\n",
|
||||
" Contribution<sup>?</sup>\n",
|
||||
" </th>\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">Feature</th>\n",
|
||||
" \n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(120, 100.00%, 94.90%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" +1.079\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" Highlighted in text (sum)\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 98.94%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -0.114\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" <BIAS>\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" </tbody>\n",
|
||||
" </table>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" <p style=\"margin-bottom: 2.5em; margin-top:-0.5em;\">\n",
|
||||
" <span style=\"background-color: hsl(120, 100.00%, 79.89%); opacity: 0.87\" title=\"0.953\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 65.83%); opacity: 0.96\" title=\"2.030\">restaurant</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 80.63%); opacity: 0.87\" title=\"0.903\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 89.89%); opacity: 0.83\" title=\"-0.356\">amazing</span><span style=\"opacity: 0.80\">, </span><span style=\"background-color: hsl(120, 100.00%, 85.21%); opacity: 0.85\" title=\"0.614\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 86.50%); opacity: 0.84\" title=\"0.539\">quality</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 83.13%); opacity: 0.86\" title=\"-0.741\">of</span><span style=\"opacity: 0.80\"> their </span><span style=\"background-color: hsl(120, 100.00%, 93.02%); opacity: 0.82\" title=\"0.210\">food</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 81.59%); opacity: 0.87\" title=\"-0.839\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 84.40%); opacity: 0.85\" title=\"-0.662\">exceptional</span><span style=\"opacity: 0.80\">. </span><span style=\"background-color: hsl(120, 100.00%, 78.63%); opacity: 0.88\" title=\"1.039\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 94.01%); opacity: 0.81\" title=\"0.169\">waiters</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 88.32%); opacity: 0.83\" title=\"0.438\">were</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 67.12%); opacity: 0.95\" title=\"1.922\">so</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 82.45%); opacity: 0.86\" title=\"-0.784\">polite</span><span style=\"opacity: 0.80\">.</span>\n",
|
||||
" </p>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <p style=\"margin-bottom: 0.5em; margin-top: 0em\">\n",
|
||||
" <b>\n",
|
||||
" \n",
|
||||
" y=4 stars\n",
|
||||
" \n",
|
||||
"</b>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" (probability <b>0.016</b>, score <b>-4.281</b>)\n",
|
||||
"\n",
|
||||
"top features\n",
|
||||
" </p>\n",
|
||||
" \n",
|
||||
" <table class=\"eli5-weights\"\n",
|
||||
" style=\"border-collapse: collapse; border: none; margin-top: 0em; table-layout: auto; margin-bottom: 2em;\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"border: none;\">\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\" title=\"Feature contribution already accounts for the feature value (for linear models, contribution = weight * feature value), and the sum of feature contributions is equal to the score or, for some classifiers, to the probability. Feature values are shown if "show_feature_values" is True.\">\n",
|
||||
" Contribution<sup>?</sup>\n",
|
||||
" </th>\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">Feature</th>\n",
|
||||
" \n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 97.95%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -0.294\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" <BIAS>\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 87.28%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -3.987\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" Highlighted in text (sum)\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" </tbody>\n",
|
||||
" </table>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" <p style=\"margin-bottom: 2.5em; margin-top:-0.5em;\">\n",
|
||||
" <span style=\"opacity: 0.80\">the </span><span style=\"background-color: hsl(120, 100.00%, 98.68%); opacity: 0.80\" title=\"0.019\">restaurant</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 90.65%); opacity: 0.83\" title=\"-0.319\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 77.64%); opacity: 0.89\" title=\"1.108\">amazing</span><span style=\"opacity: 0.80\">, </span><span style=\"background-color: hsl(120, 100.00%, 84.03%); opacity: 0.85\" title=\"0.685\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 88.50%); opacity: 0.83\" title=\"-0.428\">quality</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 74.42%); opacity: 0.91\" title=\"-1.343\">of</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 95.05%); opacity: 0.81\" title=\"0.129\">their</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 64.97%); opacity: 0.97\" title=\"2.104\">food</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 94.62%); opacity: 0.81\" title=\"-0.145\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 97.42%); opacity: 0.80\" title=\"-0.051\">exceptional</span><span style=\"opacity: 0.80\">. </span><span style=\"background-color: hsl(0, 100.00%, 89.87%); opacity: 0.83\" title=\"-0.358\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 88.46%); opacity: 0.83\" title=\"0.431\">waiters</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 75.72%); opacity: 0.90\" title=\"-1.247\">were</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 70.82%); opacity: 0.93\" title=\"-1.621\">so</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 76.61%); opacity: 0.89\" title=\"-1.182\">polite</span><span style=\"opacity: 0.80\">.</span>\n",
|
||||
" </p>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <p style=\"margin-bottom: 0.5em; margin-top: 0em\">\n",
|
||||
" <b>\n",
|
||||
" \n",
|
||||
" y=5 stars\n",
|
||||
" \n",
|
||||
"</b>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" (probability <b>0.104</b>, score <b>-2.343</b>)\n",
|
||||
"\n",
|
||||
"top features\n",
|
||||
" </p>\n",
|
||||
" \n",
|
||||
" <table class=\"eli5-weights\"\n",
|
||||
" style=\"border-collapse: collapse; border: none; margin-top: 0em; table-layout: auto; margin-bottom: 2em;\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"border: none;\">\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\" title=\"Feature contribution already accounts for the feature value (for linear models, contribution = weight * feature value), and the sum of feature contributions is equal to the score or, for some classifiers, to the probability. Feature values are shown if "show_feature_values" is True.\">\n",
|
||||
" Contribution<sup>?</sup>\n",
|
||||
" </th>\n",
|
||||
" \n",
|
||||
" <th style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">Feature</th>\n",
|
||||
" \n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 98.58%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -0.173\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" <BIAS>\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
" <tr style=\"background-color: hsl(0, 100.00%, 91.69%); border: none;\">\n",
|
||||
" <td style=\"padding: 0 1em 0 0.5em; text-align: right; border: none;\">\n",
|
||||
" -2.170\n",
|
||||
" </td>\n",
|
||||
" <td style=\"padding: 0 0.5em 0 0.5em; text-align: left; border: none;\">\n",
|
||||
" Highlighted in text (sum)\n",
|
||||
" </td>\n",
|
||||
" \n",
|
||||
"</tr>\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" </tbody>\n",
|
||||
" </table>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" <p style=\"margin-bottom: 2.5em; margin-top:-0.5em;\">\n",
|
||||
" <span style=\"background-color: hsl(0, 100.00%, 75.16%); opacity: 0.90\" title=\"-1.288\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 89.18%); opacity: 0.83\" title=\"0.393\">restaurant</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 93.06%); opacity: 0.82\" title=\"0.208\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 83.68%); opacity: 0.86\" title=\"0.706\">amazing</span><span style=\"opacity: 0.80\">, </span><span style=\"background-color: hsl(0, 100.00%, 95.96%); opacity: 0.81\" title=\"-0.096\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 66.13%); opacity: 0.96\" title=\"2.005\">quality</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 84.99%); opacity: 0.85\" title=\"0.627\">of</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 88.21%); opacity: 0.83\" title=\"-0.444\">their</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 81.60%); opacity: 0.87\" title=\"-0.839\">food</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 90.47%); opacity: 0.83\" title=\"0.328\">was</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 71.27%); opacity: 0.92\" title=\"1.585\">exceptional</span><span style=\"opacity: 0.80\">. </span><span style=\"background-color: hsl(0, 100.00%, 80.47%); opacity: 0.87\" title=\"-0.914\">the</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 71.14%); opacity: 0.93\" title=\"1.595\">waiters</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 71.65%); opacity: 0.92\" title=\"-1.555\">were</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(0, 100.00%, 77.01%); opacity: 0.89\" title=\"-1.153\">so</span><span style=\"opacity: 0.80\"> </span><span style=\"background-color: hsl(120, 100.00%, 96.48%); opacity: 0.81\" title=\"0.079\">polite</span><span style=\"opacity: 0.80\">.</span>\n",
|
||||
" </p>\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
],
|
||||
"text/plain": [
|
||||
"TextExplainer(char_based=False,\n",
|
||||
" clf=SGDClassifier(alpha=0.001, loss='log', penalty='elasticnet',\n",
|
||||
" random_state=RandomState(MT19937) at 0x7FE4409B88D0),\n",
|
||||
" n_samples=20, random_state=42,\n",
|
||||
" sampler=MaskingTextSamplers(random_state=RandomState(MT19937) at 0x7FE4409B88D0,\n",
|
||||
" sampler_params=None,\n",
|
||||
" token_pattern='(?u)\\\\b\\\\w+\\\\b',\n",
|
||||
" weights=array([0.7, 0.3])),\n",
|
||||
" token_pattern='(?u)\\\\b\\\\w+\\\\b',\n",
|
||||
" vec=CountVectorizer(ngram_range=(1, 2),\n",
|
||||
" token_pattern='(?u)\\\\b\\\\w+\\\\b'))"
|
||||
"<IPython.core.display.HTML object>"
|
||||
]
|
||||
},
|
||||
"execution_count": 136,
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
|
|
Loading…
Reference in New Issue