In [1]:
# encoding=utf8
import numpy as np
import pandas as pd
import re
import matplotlib.pyplot as plt
import os
from bs4 import BeautifulSoup

from keras.preprocessing.text import Tokenizer,one_hot
from keras.preprocessing.sequence import pad_sequences
from keras.utils.np_utils import to_categorical
from keras.layers import Embedding
from keras.layers import Dense, Input, Flatten
from keras.layers import Conv1D, MaxPooling1D, Embedding, Merge, Dropout
from keras.models import Model
from keras.layers.merge import Concatenate
from keras import optimizers

os.environ['KERAS_BACKEND']='tensorflow'

MAX_SEQUENCE_LENGTH = 1000
MAX_NB_WORDS = 20000
EMBEDDING_DIM = 100
TRAINING_SPLIT = 0.6
VALIDATION_SPLIT = 0.2
epochs = 10
batch_size = 128
np.random.seed(7)


Using TensorFlow backend.

In [2]:
def clean_str(string):
    string = re.sub("[\s+\.\!,\"\']+".decode("utf8"), "", string)   
    string = re.sub("[!,。?、~()]+".decode("utf8"), "", string)  
    string = re.sub("[【】『』「」︰:]+".decode("utf8"), "", string) 
    string = re.sub("[《》”“;]+".decode("utf8"),"",string)
    return string.strip()

def one_hot_encoding(labels):
    one_hot_labels = []
    for label in labels:
        if label.decode("utf8") == u"足球":
            one_hot_labels.append([0.,0.,1.])
        if label.decode("utf8") == u"梁振英":
            one_hot_labels.append([0.,1.,0.])
        if label.decode("utf8") == u"美國大選":
            one_hot_labels.append([1.,0.,0.])
    return one_hot_labels

In [3]:
train_path = "./dataset/train_set.csv"
test_path = "./dataset/test_set.csv"
data_train = pd.read_csv(train_path)
data_test = pd.read_csv(test_path)
print ("training data: ", data_train.shape)
print ("testing data: ", data_test.shape)


('training data: ', (3894, 3))
('testing data: ', (974, 2))

In [4]:
texts = []
labels = []
for idx in xrange(data_train.content.shape[0]):
    soup = BeautifulSoup(data_train.content[idx],"lxml")
    text = soup.get_text()
    texts.append(clean_str(text).encode("utf8"))
    labels.append(data_train.label[idx])

In [5]:
test_texts = []
for idx in xrange(data_test.content.shape[0]):
    soup = BeautifulSoup(data_test.content[idx],"lxml")
    text = soup.get_text()
    test_texts.append(clean_str(text).encode("utf8"))

In [6]:
tokenizer = Tokenizer(num_words=MAX_NB_WORDS)
tokenizer.fit_on_texts(texts+test_texts)
sequences = tokenizer.texts_to_sequences(texts)
test_sequences = tokenizer.texts_to_sequences(test_texts)

In [7]:
word_index = tokenizer.word_index
print('Found %s unique tokens.' % len(word_index))


Found 24455 unique tokens.

In [8]:
data = pad_sequences(sequences, maxlen=MAX_SEQUENCE_LENGTH)
labels = np.array(one_hot_encoding(labels)).reshape(-1,3)

print('Shape of training tensor:', data.shape)
print('Shape of label tensor:', labels.shape)


('Shape of training tensor:', (3894, 1000))
('Shape of label tensor:', (3894, 3))

In [9]:
test_data = pad_sequences(test_sequences, maxlen=MAX_SEQUENCE_LENGTH)
print('Shape of testing tensor:', test_data.shape)


('Shape of testing tensor:', (974, 1000))

In [10]:
indices = np.arange(data.shape[0])
np.random.shuffle(indices)
data = data[indices]
labels = labels[indices]

In [11]:
train_index = int(TRAINING_SPLIT*data.shape[0])
validation_index = train_index + int(VALIDATION_SPLIT*data.shape[0])

x_train = data[:train_index]
y_train = labels[:train_index]
x_val = data[train_index:validation_index]
y_val = labels[train_index:validation_index]
x_test = data[validation_index:]
y_test = labels[validation_index:]

In [12]:
print ("train: ", x_train.shape, y_train.shape)
print ("validation: ", x_val.shape, y_val.shape)
print ("test: ",x_test.shape, y_test.shape)


('train: ', (2336, 1000), (2336, 3))
('validation: ', (778, 1000), (778, 3))
('test: ', (780, 1000), (780, 3))

The text CNN model doesn't get a good job because I use the pretrained glove dictionary provided by Standford NLP, there is just few chinese characters in that dictionary, I should train the self dictionary to get competitive result


In [13]:
GLOVE_DIR = "./glove.6B"
embeddings_index = {}
f = open(os.path.join(GLOVE_DIR, 'glove.6B.100d.txt'))
for line in f:
    values = line.split()
    word = values[0]
    coefs = np.asarray(values[1:], dtype='float32')
    embeddings_index[word] = coefs
f.close()

print('Total %s word vectors in Glove 6B 100d.' % len(embeddings_index))

embedding_matrix = np.random.random((len(word_index) + 1, EMBEDDING_DIM))
for word, i in word_index.items():
    if i >= MAX_NB_WORDS:
        continue
    embedding_vector = embeddings_index.get(word)
    if embedding_vector is not None:
        embedding_matrix[i] = embedding_vector
        
embedding_layer = Embedding(len(word_index) + 1,
                            EMBEDDING_DIM,
                            weights=[embedding_matrix],
                            input_length=MAX_SEQUENCE_LENGTH,
                            trainable=True)


Total 400000 word vectors in Glove 6B 100d.

In [14]:
from keras.callbacks import ModelCheckpoint

convs = []
kernel_sizes = [3,4,5]

sequence_input = Input(shape=(MAX_SEQUENCE_LENGTH,), dtype='int32')
embedded_sequences = embedding_layer(sequence_input)

for ksz in kernel_sizes:
    l_conv = Conv1D(filters=128,kernel_size=ksz,activation='relu')(embedded_sequences)
    l_pool = MaxPooling1D(5)(l_conv)
    convs.append(l_pool)
    
merge = Concatenate(axis=1)(convs)
cov1= Conv1D(128, 5, activation='relu')(merge)
pool1 = MaxPooling1D(5)(cov1)
cov2 = Conv1D(128, 5, activation='relu')(pool1)
pool2 = MaxPooling1D(30)(cov2)
flat_layer = Flatten()(pool2)
dense_layer = Dense(128, activation='relu')(flat_layer)
dropout_layer = Dropout(0.5)(dense_layer)
preds_layer = Dense(3, activation='softmax')(dropout_layer)

model = Model(sequence_input, preds_layer)
rmsprop = optimizers.RMSprop(lr=0.001,decay=1e-5)

#model.load_weights("./best_weights.hdf5")
model.compile(loss='categorical_crossentropy',
              optimizer=rmsprop,
              metrics=['acc'])

print("model fitting")
model.summary()
checkpointer = ModelCheckpoint(filepath='./best_weights.hdf5', verbose=1, save_best_only=True)
callbacks_list = [checkpointer]
history = model.fit(x_train, y_train, validation_data=(x_val, y_val),
          epochs=20, batch_size=batch_size, callbacks=callbacks_list)


model fitting
____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
input_1 (InputLayer)             (None, 1000)          0                                            
____________________________________________________________________________________________________
embedding_1 (Embedding)          (None, 1000, 100)     2445600     input_1[0][0]                    
____________________________________________________________________________________________________
conv1d_1 (Conv1D)                (None, 998, 128)      38528       embedding_1[0][0]                
____________________________________________________________________________________________________
conv1d_2 (Conv1D)                (None, 997, 128)      51328       embedding_1[0][0]                
____________________________________________________________________________________________________
conv1d_3 (Conv1D)                (None, 996, 128)      64128       embedding_1[0][0]                
____________________________________________________________________________________________________
max_pooling1d_1 (MaxPooling1D)   (None, 199, 128)      0           conv1d_1[0][0]                   
____________________________________________________________________________________________________
max_pooling1d_2 (MaxPooling1D)   (None, 199, 128)      0           conv1d_2[0][0]                   
____________________________________________________________________________________________________
max_pooling1d_3 (MaxPooling1D)   (None, 199, 128)      0           conv1d_3[0][0]                   
____________________________________________________________________________________________________
concatenate_1 (Concatenate)      (None, 597, 128)      0           max_pooling1d_1[0][0]            
                                                                   max_pooling1d_2[0][0]            
                                                                   max_pooling1d_3[0][0]            
____________________________________________________________________________________________________
conv1d_4 (Conv1D)                (None, 593, 128)      82048       concatenate_1[0][0]              
____________________________________________________________________________________________________
max_pooling1d_4 (MaxPooling1D)   (None, 118, 128)      0           conv1d_4[0][0]                   
____________________________________________________________________________________________________
conv1d_5 (Conv1D)                (None, 114, 128)      82048       max_pooling1d_4[0][0]            
____________________________________________________________________________________________________
max_pooling1d_5 (MaxPooling1D)   (None, 3, 128)        0           conv1d_5[0][0]                   
____________________________________________________________________________________________________
flatten_1 (Flatten)              (None, 384)           0           max_pooling1d_5[0][0]            
____________________________________________________________________________________________________
dense_1 (Dense)                  (None, 128)           49280       flatten_1[0][0]                  
____________________________________________________________________________________________________
dropout_1 (Dropout)              (None, 128)           0           dense_1[0][0]                    
____________________________________________________________________________________________________
dense_2 (Dense)                  (None, 3)             387         dropout_1[0][0]                  
====================================================================================================
Total params: 2,813,347
Trainable params: 2,813,347
Non-trainable params: 0
____________________________________________________________________________________________________
Train on 2336 samples, validate on 778 samples
Epoch 1/20
2304/2336 [============================>.] - ETA: 0s - loss: 1.0983 - acc: 0.5182Epoch 00000: val_loss improved from inf to 1.00478, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 53s - loss: 1.0973 - acc: 0.5184 - val_loss: 1.0048 - val_acc: 0.5437
Epoch 2/20
2304/2336 [============================>.] - ETA: 0s - loss: 1.0124 - acc: 0.5434Epoch 00001: val_loss improved from 1.00478 to 0.99893, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 56s - loss: 1.0122 - acc: 0.5437 - val_loss: 0.9989 - val_acc: 0.5437
Epoch 3/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.9972 - acc: 0.5473Epoch 00002: val_loss did not improve
2336/2336 [==============================] - 55s - loss: 0.9986 - acc: 0.5458 - val_loss: 1.0684 - val_acc: 0.4177
Epoch 4/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.9201 - acc: 0.5846Epoch 00003: val_loss improved from 0.99893 to 0.92184, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 56s - loss: 0.9195 - acc: 0.5839 - val_loss: 0.9218 - val_acc: 0.5977
Epoch 5/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.8405 - acc: 0.6233Epoch 00004: val_loss improved from 0.92184 to 0.87049, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 57s - loss: 0.8428 - acc: 0.6220 - val_loss: 0.8705 - val_acc: 0.6362
Epoch 6/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.7527 - acc: 0.6602Epoch 00005: val_loss did not improve
2336/2336 [==============================] - 57s - loss: 0.7543 - acc: 0.6580 - val_loss: 0.9557 - val_acc: 0.5733
Epoch 7/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.7087 - acc: 0.6853Epoch 00006: val_loss improved from 0.87049 to 0.82972, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 55s - loss: 0.7079 - acc: 0.6841 - val_loss: 0.8297 - val_acc: 0.6401
Epoch 8/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.6493 - acc: 0.7157Epoch 00007: val_loss improved from 0.82972 to 0.74769, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 54s - loss: 0.6491 - acc: 0.7158 - val_loss: 0.7477 - val_acc: 0.6697
Epoch 9/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.6028 - acc: 0.7326Epoch 00008: val_loss did not improve
2336/2336 [==============================] - 51s - loss: 0.6024 - acc: 0.7342 - val_loss: 0.8513 - val_acc: 0.6812
Epoch 10/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.5631 - acc: 0.7548Epoch 00009: val_loss improved from 0.74769 to 0.73551, saving model to ./best_weights.hdf5
2336/2336 [==============================] - 50s - loss: 0.5742 - acc: 0.7504 - val_loss: 0.7355 - val_acc: 0.6774
Epoch 11/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.5131 - acc: 0.7799Epoch 00010: val_loss did not improve
2336/2336 [==============================] - 55s - loss: 0.5104 - acc: 0.7808 - val_loss: 0.7512 - val_acc: 0.7211
Epoch 12/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.4974 - acc: 0.7817Epoch 00011: val_loss did not improve
2336/2336 [==============================] - 54s - loss: 0.4972 - acc: 0.7812 - val_loss: 0.8871 - val_acc: 0.6889
Epoch 13/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.4432 - acc: 0.7977Epoch 00012: val_loss did not improve
2336/2336 [==============================] - 54s - loss: 0.4425 - acc: 0.7975 - val_loss: 1.0938 - val_acc: 0.6787
Epoch 14/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.4555 - acc: 0.8043Epoch 00013: val_loss did not improve
2336/2336 [==============================] - 55s - loss: 0.4532 - acc: 0.8048 - val_loss: 0.9933 - val_acc: 0.6928
Epoch 15/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.3919 - acc: 0.8190Epoch 00014: val_loss did not improve
2336/2336 [==============================] - 55s - loss: 0.3917 - acc: 0.8189 - val_loss: 1.3321 - val_acc: 0.5835
Epoch 16/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.3912 - acc: 0.8346Epoch 00015: val_loss did not improve
2336/2336 [==============================] - 56s - loss: 0.3895 - acc: 0.8356 - val_loss: 0.8854 - val_acc: 0.7069
Epoch 17/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.3599 - acc: 0.8407Epoch 00016: val_loss did not improve
2336/2336 [==============================] - 53s - loss: 0.3596 - acc: 0.8408 - val_loss: 1.1314 - val_acc: 0.6504
Epoch 18/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.3281 - acc: 0.8563Epoch 00017: val_loss did not improve
2336/2336 [==============================] - 51s - loss: 0.3273 - acc: 0.8557 - val_loss: 0.9518 - val_acc: 0.6979
Epoch 19/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.3222 - acc: 0.8498Epoch 00018: val_loss did not improve
2336/2336 [==============================] - 51s - loss: 0.3207 - acc: 0.8502 - val_loss: 1.0689 - val_acc: 0.7224
Epoch 20/20
2304/2336 [============================>.] - ETA: 0s - loss: 0.2763 - acc: 0.8780Epoch 00019: val_loss did not improve
2336/2336 [==============================] - 52s - loss: 0.2753 - acc: 0.8789 - val_loss: 1.1553 - val_acc: 0.7031

In [19]:
model.load_weights("./model_weight/tc1.h5")

In [20]:
scores = model.evaluate(x_test, y_test, verbose=0)
print("Accuracy: %.2f%%" % (scores[1]*100))


Accuracy: 70.64%

In [16]:
print("Saved model to disk")
model_json = model.to_json()
with open("tc1.json", "w") as json_file:
    json_file.write(model_json)
model.save_weights("tc1.h5", overwrite=True)


Saved model to disk

In [21]:
import matplotlib.pyplot as plt
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper left')
plt.show()



In [22]:
y_pred = model.predict(test_data)
y_pred = np.round(y_pred)

In [23]:
test_label = []
for i in range(len(y_pred)):
    if y_pred[i].tolist() == [0,0,1]:
        test_label.append(u"足球")
    if y_pred[i].tolist() == [0,1,0]:
        test_label.append(u"梁振英")
    if y_pred[i].tolist() == [1,0,0]:
        test_label.append(u"美國大選")
    if y_pred[i].tolist() == [0,0,0]:
        test_label.append(u"其他")

In [24]:
data_test["label"] = test_label

In [25]:
print data_test


        id                                            content label
0        6  南華添鋒力 簽前厄瓜多爾國腳保耶 港超勁旅南華宣布羅致前厄瓜多爾國腳菲力斯保耶(Felix ...    足球
1      128  如果大學$0捐款 科大嶺南將蝕過千萬元 據now新聞台報道,身兼8大校監的特首梁振英曾以大學...    足球
2      136  英超最強火力對碰 雙城爭冠靠鋒霸 英超今季風起雲湧,傳統「Big 5」只剩兩隊名列積分榜前5...    足球
3      213  【01球評】膺半程冠軍 阿仙奴今季不奪標更待何時? 近年「兵工廠」每季的起落都離不開一個循環...    足球
4      658  【書商失蹤】梁振英:希望失蹤的李波本人提供資料 行政長官梁振英出席行政會議前見記者,被問及李...   梁振英
5      700  【施政盤點】三份施政報告 僅一半政策達標 行政長官梁振英即將公布任內第四份施政報告,《香港0...    足球
6      729  【施政盤點】「治港絕招」 設19委員會 空談多實務少 行政長官梁振英上任3年多,先後成立多個...    足球
7      837  高普首簽 「新馬迪」來季投紅軍 利物浦傷兵滿營及戰績不穩,主帥高普仍不忘投資未來,昨以510...    足球
8     1037  「最潮主帥」鬥利物浦:我已領先1:0 英乙球隊埃克塞特在明晨的足總盃於主場迎戰利物浦,雖然越...    足球
9     1095  紅軍超殘陣逼和英乙隊 高普:負擔不起重賽 逾十名球員受傷的利物浦,今晨在足總盃第三圈以大部份...    足球
10    1113  【施丹上馬】皇馬六條A 退下來各自精彩 僅卡路士教波 2003年碧咸轉投皇家馬德里,與施丹、...   梁振英
11    1153  【施丹上馬】踢而優則教 碧根鮑華告魯夫完美球王 足球史上,獲譽為球王的寥寥可數,踢而優則教的...   梁振英
12    1200  【01球評】施丹首戰 回歸原點抄足肥安 防守毛病未解決 所謂「新官上任三把火」,不過從皇家馬...    足球
13    1261  新兵白鶴對辦 東方大破南華踞榜首現霸氣 星期日下午的港超榜首大戰,東方在4130名觀眾面前盡...    足球
14    1332  【意甲半程總結】四軍混戰 拿玻里勢破祖記壟斷 兵多將廣 拿玻里攻上榜首\r\n\r\n\r\...   梁振英
15    1374  【01球評】協同效應+品牌角力 美斯C朗壟斷金球獎 「美斯定C朗?」球迷間討論的問題,在明晨...    足球
16    1407  金球獎合併5年 首屆美斯得獎爭議最大 自國際足協在2010年把世界足球先生及《法國足球》的歐...    足球
17    1564  梁振英批司法覆核遭濫用 對政府代價大 現任終院首席法官馬道立日前強調,司法覆核維護公眾利益,...   梁振英
18    1643  【港大民調】支持度僅37%創新低 數據顯示董曾梁一蟹不如一蟹 特首梁振英昨日宣讀施政報告,香...   梁振英
19    1667  【獨家民調】梁治三年 市民最不滿政制司法 最滿意房屋規劃 今年的施政報告重點力推一帶一路經濟...    足球
20    1776  奧巴馬炮轟特朗普耍選戰手段 承認未能團結美國 美國總統奧巴馬在電視節目上,批評共和黨總統參選...  美國大選
21    1831  【施政報告】預留20億元成立創科創投基金 行政長官梁振英在新一份施政報告提到本港創新科技發展...   梁振英
22    2011  由象徵式政策到象徵式施政報告 師父教落,自我介紹中說出來的,通常都不重要,沒有說出來的才重要...  美國大選
23    2036  【施政報告】工聯會考慮不支持梁振英 梁連任夢添陰霾 行政長官梁振英發表任內第4份《施政報告》...    足球
24    2040  港美同日發表施政藍圖 梁振英奧巴馬4大看點 演說長度奧巴馬 58分44秒\r\n\r\n梁振...    其他
25    2176  【獨家民調】青年人拒撐梁 無學生支持梁振英連任特首 梁振英昨日(13日)發表《施政報告》後,...    足球
26    2221  港中戰球迷噓國歌 FIFA再罰香港足總7.7萬 去年「11‧17」港中大戰餘波未了,國際足協...    足球
27    2254  【01拆局】曾俊華民望雖高 未獲左派認可 與泛民友好成雙刃劍 特首梁振英新一份施政報告劣評如...   梁振英
28    2287  【港足日與夜】從日與夜開始 了解五個香港足球員的故事 經歷去年世界盃外圍賽的熱潮,香港足球再...    足球
29    2327  【施政報告】梁振英:即使當年有普選我仍會當選 今年《施政報告》被指「區議會化」,及變成「一帶...   梁振英
..     ...                                                ...   ...
944  91666  【梁振英UGL案.博評】梁周瓜田李下與議會歪風 梁振英、周浩鼎因UGL調查而鬧得輿論一片沸沸...   梁振英
945  91693  【趣聞】女足球員打架誤一生 毆打對手被終身禁賽 有時候一次衝動,足以改變一生。早前波斯尼亞女...    足球
946  91699  【01觀點.梁振英UGL案】調查演成爛局 要解釋的不止周浩鼎 若非其身不正 何不大方受查\r...   梁振英
947  91764  【足球明星選舉】費蘭度奪四大獎項 朱志光首嘗最佳教練 11支球隊經過一整季的努力,港超聯決出...    足球
948  91774  【拆局】夕陽政府再打郊野公園主意 梁振英搞邊科? 房協應政府邀請,研究在郊野公園邊陲地帶興建...    足球
949  91887  【英冠附加賽】高普伴郎率利記借將發功 哈特斯菲爾德晉級鬥雷丁 英超戰局大定,英冠則仍然有一場...    足球
950  92044  【拆局】梁振英循「正途」去信委員會 研UGL文件真偽有利案情? 梁振英今日下午去信調查UGL...   梁振英
951  92099  【英超】熱刺大炒李斯特城 卡尼大四喜有望衛冕神射手(有片) 熱刺作客憑哈利卡尼大四喜加上孫興...    足球
952  92124  【UGL案】周浩鼎宣布即時辭任委員會委員 重申無違規違法無隱瞞 讓特首梁振英修改交予立法會調...   梁振英
953  92165  【港足日與夜.富力3】由中超降格港超 忍受球迷辱罵是成長特訓 R&F富力留在港超踢波被罵得狗...    足球
954  92413  【德甲】韋比女友來季成德甲主球證 創歐洲主流聯賽歷史(有片) 德甲來季少了拿姆及沙比阿朗素,...    足球
955  92455  【港超】南華連續三季四大皆空 錯在球隊停在80年代? 南華自班主張廣勇入主後,連續3季班費都...    足球
956  92462  梁振英送蘭花 證青協總幹事王<U+44EA>鳴退休 青協職員稱不捨 &nbsp; &nbsp...   梁振英
957  92494  【UGL案】謝偉俊稱無機制換走梁繼昌 集體決定無人可隻手遮天 行政長官梁振英經民建聯議員周浩...   梁振英
958  92506  【德甲】漢堡絕殺反勝禾夫斯堡 連續54季未降班保招牌(有片) 漢堡與禾夫斯堡兩支德甲老牌球隊...    足球
959  92584  【港超附加賽】南區3:1慘勝元朗 折多員大將下周決賽撼東方 地區打<U+5421>再次於港超...    足球
960  92586  【梁振英UGL案.博評】鼎,請鍾樹根回去做議員吧 【梁振英UGL案】近期熱爆政界的周浩鼎——...    足球
961  92633  【西甲】皇馬壓巴塞5年首奪西甲 施丹獲贈香檳浴(有片) 5年的等待,終於由施丹達成力壓巴塞隆...    足球
962  92706  【英超】阿仙奴成史上最高分第5名 雲格未定下季去留 雲格執教阿仙奴20年,首次未能帶領球隊獲...    足球
963  92742  【港超】盧卡斯率先轉會傑志 有望周五鬥熱刺 今季為和富大埔贏得菁英盃的功臣盧卡斯,一直盛傳會...   梁振英
964  92744  【UGL案】梁振英繼續狙擊梁繼昌 指無回應質疑「道理何在」? 梁繼昌在多名民主派議員陪同下召...   梁振英
965  92770  【英超】泰利盧卡斯薩巴列達說再見 別了熟悉的面孔(有片) 一代新人換舊人,下一季英超,追捧多...    足球
966  92817  梁振英罕有推小冊子晒政績 尹兆堅:他想幫自己政治上風光大葬 本屆政府臨近尾聲,行政長官梁振英...    足球
967  92990  【熱刺訪港】卡尼阿里簽名不停手 普捷天奴揚言下季爭英超 應屆英超亞軍熱刺周二抵港,準備周五於...   梁振英
968  93063  【政圈風聲】UGL餘波愈演愈烈 林鄭或淪另類受害者? 「浩鼎門」一石激起千重浪,梁振英連日與...   梁振英
969  93507  【熱刺訪港】普捷天奴成搶手貨 主席利維開腔派定心丸 英超今季群雄割據,摩連奴、干地及哥迪奧拿...  美國大選
970  93651  【熱刺訪港】孫興<U+615C>林志堅再聚舊 承諾賽後交換球衣 熱刺周五將與傑志於香港大球場...   梁振英
971  93690  【港足日與夜.王振鵬】膠唔會膠一世 神經刀變神龍(有片) 有些球員出道十多年,一直都被人睇低...    足球
972  93985  【中超】泰維斯抱怨遭中超球員踢傷 澄清無意離開上海申花 受人錢財不一定替人消災,阿根廷前鋒泰...    足球
973  94324  【傑志對熱刺.來稿】睇波睇到開party咁先至過癮 剛過去的周末,香港刮起一股足球熱。先是上...   梁振英

[974 rows x 3 columns]

In [26]:
data_test.to_csv("./prediction.csv", encoding='utf-8')