# NLP笔记Day3：逻辑回归模型

## 逻辑回归模型

• 使用散点图表示正向/负向推特的统计
• 可视化线性回归模型的输出结果

### 导入库

import nltk
from os import getcwd
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np

from utils import process_tweet, build_freqs

### 加载NLTK样本数据集

tweets = all_positive_tweets + all_negative_tweets
labels = np.append(np.ones((len(all_positive_tweets),1)), np.zeros((len(all_negative_tweets),1)), axis = 0)

train_pos = all_positive_tweets[:4000]
train_neg = all_negative_tweets[:4000]

train_x = train_pos + train_neg

print("Number of tweets: ", len(train_x))
Number of tweets:  8000

### 加载提取特征

X = data[['bias', 'positive', 'negative']].values  # 大家可以自行测试一下如果用一个中括号会发生什么？ 使用type()函数去看不同的调用方式有助于理解
Y = data['sentiment'].values;  # 为什么要使用分号？如果不用会怎么样？

print(X.shape)
print(X)
(8000, 3)
[[  1.00000000e+00   3.02000000e+03   6.10000000e+01]
[  1.00000000e+00   3.57300000e+03   4.44000000e+02]
[  1.00000000e+00   3.00500000e+03   1.15000000e+02]
...,
[  1.00000000e+00   1.44000000e+02   7.83000000e+02]
[  1.00000000e+00   2.05000000e+02   3.89000000e+03]
[  1.00000000e+00   1.89000000e+02   3.97400000e+03]]

### 加载训练好的逻辑回归模型

theta = [7e-08, 0.0005239, -0.00055517]

### 样本数据可视化

fig, ax = plt.subplots(figsize = (8,8))

colors = ['red', 'green']

ax.scatter(X[:,1], X[:,2], c=[colors[int(k)] for k in Y], s = 0.1) # positive列为散点图的x轴，negative列为y轴
plt.xlabel("Positive")
plt.ylabel("Negative")
Text(0,0.5,'Negative')

## 在数据旁绘制模型

def neg(theta, pos):
return (-theta[0] - pos * theta[1]) / theta[2]

def direction(theta, pos):
return pos * theta[2] / theta[1]

fig, ax = plt.subplots(figsize = (8,8))

colors = ['red', 'green']

ax.scatter(X[:,1], X[:,2], c=[colors[int(k)] for k in Y], s = 0.1)
plt.xlabel("Positive")
plt.ylabel("Negative")

maxpos = np.max(X[:,1])

offset = 5000

ax.plot([0, maxpos], [neg(theta, 0), neg(theta, maxpos)], color = 'gray')