1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
| # -*-coding:UTF-8 -*-
import multiprocessing as mp
from multiprocessing import Queue
import BeautifulSoup as bs4
from Queue import Empty
import urllib
import json
'''
getPageData 类似于生产者,获取工作列表。 getDetail 类似消费者,去获取工作的详细信息。queue 中存放工作详情页面的ID
'''
interface = 'http://www.lagou.com/jobs/positionAjax.json?px=default&yx=10k-15k&needAddtiOnalResult=false'
detailUrl = 'http://www.lagou.com/jobs/{0}.html'
def getPageData(task, queue, keyword='python'):
while True:
try:
page = task.get(timeout=1)
except Empty:
break
post_data = {'kd': keyword, 'pn': page, 'first': 'false'}
opener = urllib.urlopen(interface, urllib.urlencode(post_data))
jsOnData= json.loads(opener.read())
results = jsonData['content']['positionResult']['result']
for result in results:
queue.put(result['positionId'])
def getDetail(queue, result):
while True:
try:
positiOnId= queue.get(timeout=1)
except Empty:
print mp.current_process().name + 'exit'
break
url = detailUrl.format(positionId)
print url, mp.current_process().name
opener = urllib.urlopen(url)
html = opener.read()
soup = bs4.BeautifulSoup(html)
cOntent= soup.findAll(attrs={"class": "job_bt"})[0]
result.put('{0}\n{1}'.format(detailUrl.format(positionId), content))
def start(keyword='python'):
task = Queue()
queue = Queue()
result = Queue()
post_data = {'kd': keyword, 'pn': 1, 'first': 'true'}
opener = urllib.urlopen(interface, urllib.urlencode(post_data))
jsOnData= json.loads(opener.read())
# 页数
totalCount = jsonData['content']['positionResult']['totalCount']
resultSize = jsonData['content']['positionResult']['resultSize']
pageNums = totalCount / resultSize
if totalCount % resultSize:
pageNums += 1
results = jsonData['content']['positionResult']['result']
for r in results:
queue.put(r['positionId'])
# 调试前三页
pageNums = 3
for i in range(2, pageNums + 1):
task.put(i)
num_cOnsumers= mp.cpu_count()
processes = [mp.Process(target=getDetail, args=(queue, result))
for _ in range(num_consumers)]
processes.append(mp.Process(target=getPageData, args=(task, queue)))
for p in processes:
p.start()
for p in processes:
p.join()
print 'processes over'
with open('jobs', 'w+') as f:
while not result.empty():
a = result.get()
f.write(a)
if __name__ == '__main__':
start() |