我将django dynamic scrapy与scrapyd一起使用,并与此错误混淆(在最后一行,ScrapyDemo
是我的scrapy项目名称)
有人帮帮我:
2015-08-16 20:18:31+0800 [Launcher,12972/stderr] Unhandled error in Deferred:
2015-08-16 20:18:31+0800 [Launcher,12972/stderr]
Traceback (most recent call last):
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/cmdline.py", line 150, in _run_command
cmd.run(args, opts)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/commands/crawl.py", line 57, in run
self.crawler_process.crawl(spname, **opts.spargs)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/crawler.py", line 153, in crawl
d = crawler.crawl(*args, **kwargs)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/twisted/internet/defer.py", line 1274, in unwindGenerator
return _inlineCallbacks(None, gen, Deferred())
--- <exception caught here> ---
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/twisted/internet/defer.py", line 1128, in _inlineCallbacks
result = g.send(result)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/crawler.py", line 71, in crawl
self.engine = self._create_engine()
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/crawler.py", line 83, in _create_engine
return ExecutionEngine(self, lambda _: self.stop())
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/core/engine.py", line 67, in __init__
self.scraper = Scraper(crawler)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/core/scraper.py", line 70, in __init__
self.itemproc = itemproc_cls.from_crawler(crawler)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/middleware.py", line 56, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/middleware.py", line 34, in from_settings
mw = mwcls.from_crawler(crawler)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/media.py", line 33, in from_crawler
pipe = cls.from_settings(crawler.settings)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/images.py", line 57, in from_settings
return cls(store_uri)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/dynamic_scraper/pipelines.py", line 15, in __init__
super(DjangoImagesPipeline, self).__init__(*args, **kwargs)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/files.py", line 160, in __init__
self.store = self._get_store(store_uri)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/files.py", line 181, in _get_store
return store_cls(uri)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/files.py", line 43, in __init__
self._mkdir(self.basedir)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/site-packages/scrapy/pipelines/files.py", line 72, in _mkdir
os.makedirs(dirname)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/os.py", line 150, in makedirs
makedirs(head, mode)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/os.py", line 150, in makedirs
makedirs(head, mode)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/os.py", line 150, in makedirs
makedirs(head, mode)
File "/Users/zhushajun/.pyenv/versions/scrapy_env2/lib/python2.7/os.py", line 157, in makedirs
mkdir(name, mode)
exceptions.OSError: [Errno 20] Not a directory: '/private/var/folders/32/2_lmw4t55m5fx_gb5v7d_0vr0000gn/T/ScrapyDemo-1439724559-dDctsA.egg/ScrapyDemo'
目前没有回答
相关问题 更多 >
编程相关推荐