机器学习各个算法7--朴素贝叶斯

python安装 feedparser模块-----http://blog.csdn.net/u013709332/article/details/31820267

https://www.cnblogs.com/rhongp/p/6383909.html



使用python进行文本分类

准备数据:从文本中构建词向量

相关函数:

from numpy import *

def loadDataSet(): #实验样本,词条切分后的样本集合  类别集合  
    postingList=[['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],
                 ['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
                 ['my', 'dalmation', 'is', 'so', 'cute', 'I', 'love', 'him'],
                 ['stop', 'posting', 'stupid', 'worthless', 'garbage'],
                 ['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
                 ['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']]
    classVec = [0,1,0,1,0,1]    #1 is abusive, 0 not
    return postingList,classVec
                 
def createVocabList(dataSet):    #返回所有文档中不重复词的列表
    vocabSet = set([])  #create empty set
    for document in dataSet:
        vocabSet = vocabSet | set(document) #union of the two sets
    return list(vocabSet)

def setOfWords2Vec(vocabList, inputSet):  #输出文档向量
    returnVec = [0]*len(vocabList)
    for word in inputSet:
        if word in vocabList:
            returnVec[vocabList.index(word)] = 1
        else: print "the word: %s is not in my Vocabulary!" % word
    return returnVec

测试代码

import bayes
listOPosts, listClasses = bayes.loadDataSet()
myVocabList = bayes.createVocabList(listOPosts)
print myVocabList

print bayes.setOfWords2Vec(myVocabList, listOPosts[0])

结果:

['cute', 'love', 'help', 'garbage', 'quit', 'I', 'problems', 'is', 'park', 'stop', 'flea', 'dalmation', 'licks', 'food', 'not', 'him', 'buying', 'posting', 'has', 'worthless', 'ate', 'to', 'maybe', 'please', 'dog', 'how', 'stupid', 'so', 'take', 'mr', 'steak', 'my']
[0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1]


训练算法,从词向量计算概率

相关函数

def trainNB0(trainMatrix,trainCategory):    #朴素贝叶斯分类器训练函数
    numTrainDocs = len(trainMatrix)   #文档矩阵的长度,也就是行数,也就是文档的个数
    numWords = len(trainMatrix[0])  #第一行的长度,那不就是词的个数
    pAbusive = sum(trainCategory)/float(numTrainDocs)   #trainCategory---每一篇文档的类别标签向量,侮辱性的个数/总的文档数,也就是侮辱性的概率
    p0Num = ones(numWords); p1Num = ones(numWords)      #change to ones() #p0Num首先是个向量
    p0Denom = 2.0; p1Denom = 2.0                        #change to 2.0  防止出现0/0现象
    for i in range(numTrainDocs):
        if trainCategory[i] == 1:   #如果属于侮辱类
            p1Num += trainMatrix[i]
            p1Denom += sum(trainMatrix[i])
        else:
            p0Num += trainMatrix[i]
            p0Denom += sum(trainMatrix[i])
    p1Vect = log(p1Num/p1Denom)          #change to log()避免下溢出
    p0Vect = log(p0Num/p0Denom)          #change to log()
#     p1Vect = (p1Num/p1Denom)          #change to log()避免下溢出
#     p0Vect = (p0Num/p0Denom)          #change to log()
    return p0Vect,p1Vect,pAbusive    #直观理解就是p0Vect表示不是侮辱类时字典中的各个字出现的概率,p1Vect是侮辱类。。。。,pAbusive是侮辱类出现的概率

def classifyNB(vec2Classify, p0Vec, p1Vec, pClass1):    #具体分类函数
    p1 = sum(vec2Classify * p1Vec) + log(pClass1)    #element-wise mult
    p0 = sum(vec2Classify * p0Vec) + log(1.0 - pClass1)
    print p1
    print p0
    if p1 > p0:
        return 1
    else: 
        return 0
   
def testingNB():       #针对这两个例子进行具体测试
    listOPosts,listClasses = loadDataSet()
    myVocabList = createVocabList(listOPosts)
    trainMat=[]
    for postinDoc in listOPosts:
        trainMat.append(setOfWords2Vec(myVocabList, postinDoc))
    p0V,p1V,pAb = trainNB0(array(trainMat),array(listClasses))
    testEntry = ['love', 'my', 'dalmation']
    thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
    print testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb)
    testEntry = ['stupid', 'garbage']
    thisDoc = array(setOfWords2Vec(myVocabList, testEntry))
    print testEntry,'classified as: ',classifyNB(thisDoc,p0V,p1V,pAb)

测试代码

#具体测试
from numpy import *
trainMat = []
for postinDoc in listOPosts:
    trainMat.append(bayes.setOfWords2Vec(myVocabList,postinDoc))
p0V,p1V,pAb = bayes.trainNB0(trainMat, listClasses)
print pAb
print p0V
print p1V

bayes.testingNB()

结果

0.5
[-2.56494936 -2.56494936 -2.56494936 -3.25809654 -3.25809654 -2.56494936
 -2.56494936 -2.56494936 -3.25809654 -2.56494936 -2.56494936 -2.56494936
 -2.56494936 -3.25809654 -3.25809654 -2.15948425 -3.25809654 -3.25809654
 -2.56494936 -3.25809654 -2.56494936 -2.56494936 -3.25809654 -2.56494936
 -2.56494936 -2.56494936 -3.25809654 -2.56494936 -3.25809654 -2.56494936
 -2.56494936 -1.87180218]
[-3.04452244 -3.04452244 -3.04452244 -2.35137526 -2.35137526 -3.04452244
 -3.04452244 -3.04452244 -2.35137526 -2.35137526 -3.04452244 -3.04452244
 -3.04452244 -2.35137526 -2.35137526 -2.35137526 -2.35137526 -2.35137526
 -3.04452244 -1.94591015 -3.04452244 -2.35137526 -2.35137526 -3.04452244
 -1.94591015 -3.04452244 -1.65822808 -3.04452244 -2.35137526 -3.04452244
 -3.04452244 -3.04452244]
['love', 'my', 'dalmation'] classified as:  -9.82671449373
-7.69484807238
0
['stupid', 'garbage'] classified as:  -4.70275051433
-7.2093402566
1


使用朴素贝叶斯过滤垃圾邮件

文本解析

import re    #正则表达式
regEx = re.compile('\\W*')
emailText = open('email/ham/6.txt').read()
listOfTokens = regEx.split(emailText)
print listOfTokens
print [tok.lower() for tok in listOfTokens if len(tok)>0]  #列表表达式

结果

['Hello', 'Since', 'you', 'are', 'an', 'owner', 'of', 'at', 'least', 'one', 'Google', 'Groups', 'group', 'that', 'uses', 'the', 'customized', 'welcome', 'message', 'pages', 'or', 'files', 'we', 'are', 'writing', 'to', 'inform', 'you', 'that', 'we', 'will', 'no', 'longer', 'be', 'supporting', 'these', 'features', 'starting', 'February', '2011', 'We', 'made', 'this', 'decision', 'so', 'that', 'we', 'can', 'focus', 'on', 'improving', 'the', 'core', 'functionalities', 'of', 'Google', 'Groups', 'mailing', 'lists', 'and', 'forum', 'discussions', 'Instead', 'of', 'these', 'features', 'we', 'encourage', 'you', 'to', 'use', 'products', 'that', 'are', 'designed', 'specifically', 'for', 'file', 'storage', 'and', 'page', 'creation', 'such', 'as', 'Google', 'Docs', 'and', 'Google', 'Sites', 'For', 'example', 'you', 'can', 'easily', 'create', 'your', 'pages', 'on', 'Google', 'Sites', 'and', 'share', 'the', 'site', 'http', 'www', 'google', 'com', 'support', 'sites', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '174623', 'with', 'the', 'members', 'of', 'your', 'group', 'You', 'can', 'also', 'store', 'your', 'files', 'on', 'the', 'site', 'by', 'attaching', 'files', 'to', 'pages', 'http', 'www', 'google', 'com', 'support', 'sites', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '90563', 'on', 'the', 'site', 'If', 'you', 're', 'just', 'looking', 'for', 'a', 'place', 'to', 'upload', 'your', 'files', 'so', 'that', 'your', 'group', 'members', 'can', 'download', 'them', 'we', 'suggest', 'you', 'try', 'Google', 'Docs', 'You', 'can', 'upload', 'files', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '50092', 'and', 'share', 'access', 'with', 'either', 'a', 'group', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '66343', 'or', 'an', 'individual', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '86152', 'assigning', 'either', 'edit', 'or', 'download', 'only', 'access', 'to', 'the', 'files', 'you', 'have', 'received', 'this', 'mandatory', 'email', 'service', 'announcement', 'to', 'update', 'you', 'about', 'important', 'changes', 'to', 'Google', 'Groups', '']
['hello', 'since', 'you', 'are', 'an', 'owner', 'of', 'at', 'least', 'one', 'google', 'groups', 'group', 'that', 'uses', 'the', 'customized', 'welcome', 'message', 'pages', 'or', 'files', 'we', 'are', 'writing', 'to', 'inform', 'you', 'that', 'we', 'will', 'no', 'longer', 'be', 'supporting', 'these', 'features', 'starting', 'february', '2011', 'we', 'made', 'this', 'decision', 'so', 'that', 'we', 'can', 'focus', 'on', 'improving', 'the', 'core', 'functionalities', 'of', 'google', 'groups', 'mailing', 'lists', 'and', 'forum', 'discussions', 'instead', 'of', 'these', 'features', 'we', 'encourage', 'you', 'to', 'use', 'products', 'that', 'are', 'designed', 'specifically', 'for', 'file', 'storage', 'and', 'page', 'creation', 'such', 'as', 'google', 'docs', 'and', 'google', 'sites', 'for', 'example', 'you', 'can', 'easily', 'create', 'your', 'pages', 'on', 'google', 'sites', 'and', 'share', 'the', 'site', 'http', 'www', 'google', 'com', 'support', 'sites', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '174623', 'with', 'the', 'members', 'of', 'your', 'group', 'you', 'can', 'also', 'store', 'your', 'files', 'on', 'the', 'site', 'by', 'attaching', 'files', 'to', 'pages', 'http', 'www', 'google', 'com', 'support', 'sites', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '90563', 'on', 'the', 'site', 'if', 'you', 're', 'just', 'looking', 'for', 'a', 'place', 'to', 'upload', 'your', 'files', 'so', 'that', 'your', 'group', 'members', 'can', 'download', 'them', 'we', 'suggest', 'you', 'try', 'google', 'docs', 'you', 'can', 'upload', 'files', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '50092', 'and', 'share', 'access', 'with', 'either', 'a', 'group', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '66343', 'or', 'an', 'individual', 'http', 'docs', 'google', 'com', 'support', 'bin', 'answer', 'py', 'hl', 'en', 'answer', '86152', 'assigning', 'either', 'edit', 'or', 'download', 'only', 'access', 'to', 'the', 'files', 'you', 'have', 'received', 'this', 'mandatory', 'email', 'service', 'announcement', 'to', 'update', 'you', 'about', 'important', 'changes', 'to', 'google', 'groups']


垃圾邮件检测

相关函数

def textParse(bigString):    #input is big string, #output is word list   文本解析
    import re
    listOfTokens = re.split(r'\W*', bigString)
    return [tok.lower() for tok in listOfTokens if len(tok) > 2] 
    
def spamTest():    #垃圾邮件测试函数
    docList=[]; classList = []; fullText =[]
    for i in range(1,26):
        wordList = textParse(open('email/spam/%d.txt' % i).read())   #垃圾邮件标注1
        docList.append(wordList)
        fullText.extend(wordList)
        classList.append(1)
        wordList = textParse(open('email/ham/%d.txt' % i).read())  #非垃圾邮件标注为0
        docList.append(wordList)
        fullText.extend(wordList)
        classList.append(0)
    vocabList = createVocabList(docList)#create vocabulary
    trainingSet = range(50); testSet=[]           #create test set
    for i in range(10):     #随机选取10个
        randIndex = int(random.uniform(0,len(trainingSet)))
        testSet.append(trainingSet[randIndex])
        del(trainingSet[randIndex])  
    trainMat=[]; trainClasses = []
    for docIndex in trainingSet:#train the classifier (get probs) trainNB0
        trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
        trainClasses.append(classList[docIndex])
    p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
    errorCount = 0
    for docIndex in testSet:        #classify the remaining items
        wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
        if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
            errorCount += 1
            print "classification error",docList[docIndex]
    print 'the error rate is: ',float(errorCount)/len(testSet)
    #return vocabList,fullText

测试代码

#实际测试
bayes.spamTest()

结果

classification error ['benoit', 'mandelbrot', '1924', '2010', 'benoit', 'mandelbrot', '1924', '2010', 'wilmott', 'team', 'benoit', 'mandelbrot', 'the', 'mathematician', 'the', 'father', 'fractal', 'mathematics', 'and', 'advocate', 'more', 'sophisticated', 'modelling', 'quantitative', 'finance', 'died', '14th', 'october', '2010', 'aged', 'wilmott', 'magazine', 'has', 'often', 'featured', 'mandelbrot', 'his', 'ideas', 'and', 'the', 'work', 'others', 'inspired', 'his', 'fundamental', 'insights', 'you', 'must', 'logged', 'view', 'these', 'articles', 'from', 'past', 'issues', 'wilmott', 'magazine']
the error rate is:  0.1

使用朴素贝叶斯分类器从个人广告中获取区域倾向

相关函数

def calcMostFreq(vocabList,fullText):    #返回出现次数最多的前30个单词
    import operator
    freqDict = {}
    for token in vocabList:
        freqDict[token]=fullText.count(token)
    sortedFreq = sorted(freqDict.iteritems(), key=operator.itemgetter(1), reverse=True) 
    return sortedFreq[:30]       

def localWords(feed1,feed0):   #输入两个rss源
    import feedparser
    docList=[]; classList = []; fullText =[]
    minLen = min(len(feed1['entries']),len(feed0['entries']))   #计算条目数量
    for i in range(minLen):
        wordList = textParse(feed1['entries'][i]['summary']) #使用的是'summary'的内容
        docList.append(wordList)
        fullText.extend(wordList)
        classList.append(1) #NY is class 1
        wordList = textParse(feed0['entries'][i]['summary'])
        docList.append(wordList)
        fullText.extend(wordList)
        classList.append(0)
    vocabList = createVocabList(docList)#create vocabulary
    top30Words = calcMostFreq(vocabList,fullText)   #remove top 30 words  因为这些词很多都是冗余和结构辅助型
    for pairW in top30Words:
        if pairW[0] in vocabList: vocabList.remove(pairW[0])
    trainingSet = range(2*minLen); testSet=[]           #create test set
    for i in range(20):
        randIndex = int(random.uniform(0,len(trainingSet)))
        testSet.append(trainingSet[randIndex])
        del(trainingSet[randIndex])  
    trainMat=[]; trainClasses = []
    for docIndex in trainingSet:#train the classifier (get probs) trainNB0
        trainMat.append(bagOfWords2VecMN(vocabList, docList[docIndex]))
        trainClasses.append(classList[docIndex])
    p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
    errorCount = 0
    for docIndex in testSet:        #classify the remaining items
        wordVector = bagOfWords2VecMN(vocabList, docList[docIndex])
        if classifyNB(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
            errorCount += 1
    print 'the error rate is: ',float(errorCount)/len(testSet)
    return vocabList,p0V,p1V

def getTopWords(ny,sf):
    import operator
    vocabList,p0V,p1V=localWords(ny,sf)
    topNY=[]; topSF=[]
    for i in range(len(p0V)):
        if p0V[i] > -6.0 : topSF.append((vocabList[i],p0V[i]))
        if p1V[i] > -6.0 : topNY.append((vocabList[i],p1V[i]))
    sortedSF = sorted(topSF, key=lambda pair: pair[1], reverse=True)
    print "SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**"
    for item in sortedSF:
        print item[0]
    sortedNY = sorted(topNY, key=lambda pair: pair[1], reverse=True)
    print "NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**"
    for item in sortedNY:
        print item[0]

测试代码

#导入RSS源  RSS是站点用来和其他站点之间共享内容的一种简易方式
import feedparser
ny = feedparser.parse('http://newyork.craigslist.org/stp/index.rss')
print ny['entries']
print len(ny['entries'])

sf = feedparser.parse('http://sfbay.craigslist.org/stp/index.rss')  #旧金山湾区
vocabList, pSF, pNY = bayes.localWords(ny,sf)

#显示地域相关的用词
bayes.getTopWords(ny, sf)

RSS的用途及理解:http://blog.sina.com.cn/s/blog_7e599baa0100vy5l.html

http://blog.csdn.net/perfectguyipeng/article/details/73556278

微信公众号对比跟 RSS 有什么优势?----https://www.zhihu.com/question/23318820



结果

the error rate is:  0.4
the error rate is:  0.55
SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**SF**
here
old
very
what
meet
travel
might
going
married
come
easy
year
all
asian
tickle
healthy
extra
pic
work
watch
years
sucking
little
area
first
another
together
kind
any
sometimes
professional
availability
title
hairy
sex
visiting
doesn
will
situation
perhaps
hang
fucked
well
know
fwb
audience
play
host
weekend
lot
hello
time
smoking
send
must
masturbate
ticket
soles
school
leave
guy
says
270
sub
neg
find
access
indian
blur
new
seeks
body
socialize
change
bible
study
smoke
weird
prefer
confidante
total
working
next
live
music
taken
type
until
more
relax
company
train
women
join
give
share
chit
states
want
pink
needs
ping
attend
spot
mesh
man
maybe
green
road
talk
over
top
vary
130
bigboydick20
difference
expectations
then
them
seeking
food
name
good
side
week
drinker
taught
open
since
got
friday
ask
interaction
bisexuality
days
american
ish
think
enjoy
feel
number
lol
177
given
anyone
too
friend
exploring
eyes
treasured
way
happily
serve
sync
than
lbs
san
mind
comfortable
say
also
which
most
don
clean
gentleman
bright
relation
one
hope
outside
haven
black
pretty
lady
get
familiar
trees
pleasures
naked
respond
physically
see
are
state
enjoys
email
country
quiet
etc
weirdo
grooming
active
church
blonde
light
companionship
much
feather
wants
chub
cash
while
fun
non
optimistic
make
several
pal
material
drink
kik
running
improv
off
older
anybody
kinky
being
thanks
touch
discipline
jose
discuss
attractive
real
around
lacking
vacaville
necessary
should
nipple
because
hair
happens
does
fitness
about
lifestyle
anything
getting
own
ethnic
into
down
right
art
introverted
there
hey
start
was
happy
45p
hear
line
planet
mature
nerdy
promises
moved
educated
curious
education
whit
when
occasional
field
interested
occasion
friends
oral
age
stripping
husky
smoker
starting
coach
discreetly
ouch
four
sleep
decisions
buddy
destress
flash
father
compensate
sent
returning
minded
respectful
level
marry
cloudy
sane
small
pleas
ten
190
cost
pass
7th
depression
drinks
blue
asap
handed
ever
outgoing
full
never
met
sexu
others
alone
sexy
along
strong
great
experience
pics
social
makes
massage
highly
guests
lose
two
few
wondering
clicking
award
basically
glad
town
backgroun
room
hour
servants
learn
male
heart
distractions
figure
slim
fuck
something
sense
honest
conversation
siz
comfort
how
winners
pure
nyc
ordinary
intelligent
profile
after
membership
coming
such
guys
stress
pleasure
help
welcomed
partnered
course
through
satisfies
still
its
fit
better
production
wry
dad
workout
safe
break
hands
now
day
term
pregnant
always
40s
each
found
friendship
england
weight
doing
energy
universities
sexually
girl
special
living
research
dominate
overweight
ssage
quite
favs
wanted
havent
training
could
times
thing
organize
260lbs
yourself
done
traveler
message
realistically
miss
size
city
caught
least
swgjm
twenties
white
free
muscles
relationship
pnp
explanation
hotel
part
somewhat
butt
king
anxiety
mm4m
project
pampered
spoiled
treated
interests
phobia
philippina
half
need
strength
offering
studying
wasters
mid
build
online
honestly
latino
gym
200
begin
experienced
theatre
normal
paid
brooklyn
tall
nothing
alpha
class
singer
spends
request
face
bodybuilder
came
shot
queen
bring
decade
zumba
only
achieve
his
routine
reps
reply
workouts
intuitive
transitional
artist
twice
bad
stuff
including
where
equinox
choreographer
best
subject
satisfying
pictures
hopefully
mainly
wide
solo
men
weighing
missing
lotion
coffee
both
hours
connection
otherwise
afterwards
headed
period
height
dancer
likes
boat
late
marc
decent
fro
quickly
whom
bbw
warmth
life
lived
relaxing
photos
immersive
165
general
chill
myself
smouldering
53y
ive
seven
hoping
sincere
player
reads
tissue
ready
partner
shar
150
same
trip
member
moms
educa
european
status
opportunity
shalom
youre
hispanic
cheer
edge
massaging
executive
rest
schools
shape
financially
human
world
yet
generous
cut
cup
photographer
workers
lets
other
m4m
mannered
transformation
smart
take
submissive
march
read
big
mwm
february
dark
260
five
using
bit
loss
either
night
become
people
proper
intere
humor
bottom
creative
everything
business
reaching
meaningf
eye
post
months
drama
dinner
youthful
seeing
grammy
communication
commitment
ppl
appropriate
amsterdam
bubbly
her
long
brazilian
versitle
chatting
converse
oil
back
goals
hopes
trying
bud
handsome
count
toned
places
below
problem
daddy
similar
mails
videos
direction
deep
package
midweek
sessions
actor
lately
trans
nice
fot
francisco
football
drag
younger
studio
assets
spend
home
daily
far
having
NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**NY**
want
all
mind
life
time
father
guy
asap
body
full
alone
massage
pic
women
male
man
seeking
safe
good
weight
wanted
yourself
white
part
latino
most
don
clean
bring
one
artist
sex
will
fun
start
chatting
nice
friends
coach
discreetly
buddy
destress
send
sent
level
pass
depression
what
access
new
seeks
sexu
others
sexy
along
great
social
makes
guests
working
lose
type
more
relax
company
basically
room
hour
work
servants
meet
figure
give
slim
fuck
something
siz
comfort
nyc
ordinary
membership
such
maybe
pleasure
welcomed
partnered
satisfies
still
its
fit
might
then
workout
now
day
40s
each
week
england
doing
energy
sexually
special
open
dominate
overweight
quite
training
feel
260lbs
done
miss
size
together
least
twenties
free
hotel
butt
king
anxiety
pampered
treated
lbs
phobia
half
say
need
strength
offering
wasters
mid
build
which
gym
begin
paid
nothing
alpha
class
spends
face
bodybuilder
came
queen
decade
zumba
black
his
routine
reps
reply
transitional
married
twice
equinox
see
best
subject
satisfying
pictures
mainly
men
weighing
lotion
come
period
height
likes
companionship
whom
bbw
lived
relaxing
165
myself
while
player
tissue
partner
member
status
older
well
hispanic
edge
being
shape
world
workers
take
submissive
big
mwm
260
know
using
loss
necessary
night
become
people
bottom
creative
everything
about
drama
own
commitment
ppl
art
hey
lot
brazilian
was
versitle
oil
line
trying
count
toned
places
mature
daddy
similar
mails
direction
deep
package
midweek
sessions
interested
trans
fot
football
drag
younger
home
daily
hello
having
ouch
four
sleep
asian
smoking
decisions
flash
tickle
compensate
must
returning
very
minded
masturbate
ticket
soles
school
respectful
marry
cloudy
leave
sane
small
pleas
says
ten
190
270
cost
7th
drinks
blue
sub
neg
find
indian
handed
blur
ever
outgoing
never
here
socialize
met
strong
change
bible
healthy
study
experience
pics
smoke
weird
extra
prefer
highly
confidante
total
two
next
few
live
music
wondering
taken
until
clicking
award
train
glad
town
backgroun
join
learn
heart
distractions
share
chit
states
sense
honest
pink
needs
conversation
travel
ping
how
winners
pure
intelligent
profile
attend
watch
after
spot
mesh
coming
guys
stress
green
road
talk
help
over
top
vary
years
course
130
through
bigboydick20
difference
expectations
better
production
wry
dad
them
sucking
food
break
hands
term
name
pregnant
always
found
friendship
side
universities
drinker
taught
girl
living
since
research
got
friday
little
ssage
ask
favs
havent
interaction
could
area
bisexuality
days
times
thing
american
ish
think
first
enjoy
organize
number
lol
another
177
traveler
message
realistically
city
given
caught
swgjm
anyone
too
friend
exploring
eyes
muscles
treasured
way
relationship
happily
pnp
explanation
serve
sync
somewhat
than
kind
mm4m
project
spoiled
interests
san
philippina
comfortable
any
studying
also
online
honestly
200
experienced
theatre
normal
brooklyn
tall
singer
sometimes
request
professional
shot
gentleman
bright
relation
availability
title
hope
outside
haven
only
going
pretty
achieve
lady
get
familiar
trees
workouts
intuitive
pleasures
naked
bad
stuff
including
where
respond
hairy
choreographer
physically
are
visiting
state
hopefully
enjoys
email
wide
solo
missing
coffee
both
country
hours
quiet
etc
connection
weirdo
grooming
otherwise
afterwards
headed
active
dancer
church
blonde
boat
late
light
marc
decent
fro
quickly
much
feather
warmth
wants
chub
photos
doesn
immersive
general
chill
smouldering
cash
53y
situation
ive
seven
hoping
sincere
non
reads
optimistic
ready
shar
150
perhaps
make
same
trip
moms
educa
several
pal
european
material
drink
hang
kik
running
improv
fucked
opportunity
off
shalom
youre
anybody
cheer
massaging
kinky
executive
rest
schools
financially
thanks
human
touch
yet
discipline
jose
generous
cut
cup
photographer
discuss
lets
other
attractive
easy
m4m
mannered
transformation
smart
real
march
around
lacking
read
february
dark
five
bit
vacaville
should
fwb
audience
nipple
either
because
old
play
hair
proper
intere
happens
humor
does
business
reaching
fitness
meaningf
host
eye
post
lifestyle
anything
getting
months
dinner
youthful
seeing
grammy
ethnic
communication
into
down
appropriate
right
amsterdam
bubbly
her
introverted
weekend
there
long
happy
converse
back
45p
hear
goals
hopes
bud
handsome
planet
below
nerdy
problem
promises
videos
year
moved
educated
curious
education
whit
when
actor
lately
occasional
field
francisco
occasion
studio
assets
spend
oral
age
stripping
far
husky
smoker
starting




猜你喜欢

转载自blog.csdn.net/qq_28088259/article/details/79217783