from
scrapy.crawler
import
CrawlerProcess
from
scrapy.utils.project
import
get_project_settings
from
douban.spiders.douban_spider
import
Douban_spider
import
urllib.robotparser
import
scrapy.spiderloader
import
scrapy.statscollectors
import
scrapy.logformatter
import
scrapy.dupefilters
import
scrapy.squeues
import
scrapy.extensions.spiderstate
import
scrapy.extensions.corestats
import
scrapy.extensions.telnet
import
scrapy.extensions.logstats
import
scrapy.extensions.memusage
import
scrapy.extensions.memdebug
import
scrapy.extensions.feedexport
import
scrapy.extensions.closespider
import
scrapy.extensions.debug
import
scrapy.extensions.httpcache
import
scrapy.extensions.statsmailer
import
scrapy.extensions.throttle
import
scrapy.core.scheduler
import
scrapy.core.engine
import
scrapy.core.scraper
import
scrapy.core.spidermw
import
scrapy.core.downloader
import
scrapy.downloadermiddlewares.stats
import
scrapy.downloadermiddlewares.httpcache
import
scrapy.downloadermiddlewares.COOKIEs
import
scrapy.downloadermiddlewares.useragent
import
scrapy.downloadermiddlewares.httpproxy
import
scrapy.downloadermiddlewares.ajaxcrawl
import
scrapy.downloadermiddlewares.chunked
import
scrapy.downloadermiddlewares.decompression
import
scrapy.downloadermiddlewares.defaultheaders
import
scrapy.downloadermiddlewares.downloadtimeout
import
scrapy.downloadermiddlewares.httpauth
import
scrapy.downloadermiddlewares.httpcompression
import
scrapy.downloadermiddlewares.redirect
import
scrapy.downloadermiddlewares.retry
import
scrapy.downloadermiddlewares.robotstxt
import
scrapy.spidermiddlewares.depth
import
scrapy.spidermiddlewares.httperror
import
scrapy.spidermiddlewares.offsite
import
scrapy.spidermiddlewares.referer
import
scrapy.spidermiddlewares.urllength
import
scrapy.pipelines
import
scrapy.core.downloader.handlers.http
import
scrapy.core.downloader.contextfactory
from
douban.pipelines
import
DoubanPipeline
from
douban.items
import
DoubanItem
import
douban.settings
if
__name__
=
=
'__main__'
:
setting
=
get_project_settings()
process
=
CrawlerProcess(settings
=
setting)
process.crawl(Douban_spider)
process.start()