Error report on importing mongodb in python

raceback (most recent call last):
  File "/home/shenjianlin/.local/lib/python3.4/site-packages/twisted/internet/defer.py", line 653, in _runCallbacks
    current.result = callback(current.result, *args, **kw)
  File "/home/shenjianlin/my_project/Espider/Espider/pipelines/MongoPipelines.py", line 25, in process_item
    res = self.Mongodb.find(key_word)
  File "/usr/lib64/python3.4/site-packages/pymongo/collection.py", line 3100, in __call__
    self.__name)
TypeError: "Collection" object is not callable. If you meant to call the "find" method on a "Database" object it is failing because no such method exists.
2018-10-11 19:30:41 [scrapy.core.engine] DEBUG: Crawled (200) <GET https://api.zsxq.com/v1.10/files/51114252822224/download_url> (referer: https://wx.zsxq.com/dweb/)
2018-10-11 19:30:41 [scrapy.core.engine] DEBUG: Crawled (200) <GET https://api.zsxq.com/v1.10/files/51114252822254/download_url> (referer: https://wx.zsxq.com/dweb/)
2018-10-11 19:30:41 [scrapy.core.scraper] ERROR: Error processing {"file_url": "https://files.zsxq.com/lpawwLc8YufA3n22KiwDGuc1-7Mu?attname=%E5%8D%8E%E7%99%BB%E5%9B%BD%E9%99%85-MEMS%EF%BC%8C%E6%9C%BA%E5%99%A8%E4%BA%BA%E5%92%8C%E6%99%BA%E8%83%BD%E6%B1%BD%E8%BD%A6-2018.03-32%E9%A1%B5.pdf&e=1874736000&token=kIxbL07-8jAj8w1n4s9zv64FuZZNEATmlU_Vm6zD:IfFwug2_hLLVlHfp8RUz-ky4x3A=",
 "name": "-MEMS-2018.03-32.pdf"}
Traceback (most recent call last)


import pymongo
from scrapy.exceptions import DropItem


class MongodbPipeline(object):

    def __init__(self, MongodbHost, MongodbPort, MongodbName, MongodbCollection):
        self.MongodbHost = MongodbHost
        self.MongodbPort = MongodbPort
        self.MongodbName = MongodbName
        self.MongodbCollection = MongodbCollection

    @classmethod
    def from_crawler(cls, crawler):
        return cls(MongodbHost=crawler.settings.get("MONGODB_HOST"), MongodbPort=crawler.settings.get("MONGODB_PORT"),
                   MongodbName=crawler.settings.get("MONGODB_DBNAME"),
                   MongodbCollection=crawler.settings.get("MONGODB_COLLECTION"))

    def open_spider(self, spider):
        self.Client = pymongo.MongoClient(self.MongodbHost, self.MongodbPort)
        self.Mongodb = self.Client[self.MongodbName]

    def process_item(self, item, spider):
        key_word = {"file_url": item["file_url"], "name": item["name"]}
        res = self.Mongodb.find(key_word)
        for i in res:
            if i:
                raise DropItem("Duplicate item found: %s" % item)
            else:

                self.Mongodb.insert({"file_url": item["file_url"], "name": item["name"]})

                return item

    def close_spider(self, spider):
        self.Client.close()

Aug.22,2021
The second line in the

process_item function, self.Mongodb.find, is incorrect.
the database object does not have a find function. You should use the Collection object in the database for lookup.
should be self.MongoDB [collection _ name] .find (xxx)

Menu