__author__ = 'DarkWeb' ''' Starting point of the Darkweb Markets Mining ''' from datetime import * from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox from MarketPlaces.MikesGrandStore.crawler_selenium import crawler as crawlerMikesGrandStore from MarketPlaces.DarkTor.crawler_selenium import crawler as crawlerDarkTor from MarketPlaces.AnonymousMarketplace.crawler_selenium import crawler as crawlerAnonymousMarketplace from MarketPlaces.CityMarket.crawler_selenium import crawler as crawlerCityMarket from MarketPlaces.M00nkeyMarket.crawler_selenium import crawler as crawlerM00nkeyMarket from MarketPlaces.ViceCity.crawler_selenium import crawler as crawlerViceCity from MarketPlaces.CypherMarketplace.crawler_selenium import crawler as crawlerCypher from MarketPlaces.PabloEscobarMarket.crawler_selenium import crawler as crawlerPabloEscobar from MarketPlaces.DarkBazar.crawler_selenium import crawler as crawlerDarkBazar from MarketPlaces.Sonanza.crawler_selenium import crawler as crawlerSonanza from MarketPlaces.Kingdom.crawler_selenium import crawler as crawlerKingdom from MarketPlaces.BlackPyramid.crawler_selenium import crawler as crawlerBlackPyramid from MarketPlaces.Quest.crawler_selenium import crawler as crawlerQuest from MarketPlaces.Ares.crawler_selenium import crawler as crawlerAres from MarketPlaces.Bohemia.crawler_selenium import crawler as crawlerBohemia from MarketPlaces.TheDarkMarket.crawler_selenium import crawler as crawlerTheDarkMarket from MarketPlaces.GoFish.crawler_selenium import crawler as crawlerGoFish from MarketPlaces.ZeroDay.crawler_selenium import crawler as crawlerZeroDay import configparser import os import subprocess config = configparser.ConfigParser() config.read('../../setup.ini') CURRENT_DATE = str("%02d" % date.today().month) + str("%02d" % date.today().day) + str("%04d" % date.today().year) # reads list of marketplaces def getMarkets(): mkts = [] with open('MarketsList.txt') as f: mkts = f.readlines() return mkts # Creates needed directories for marketplace if doesn't exist def createDirectory(mkt): # Package should already be there, holding crawler and parser # pagesDir = '../' + mkt + '/HTML_Pages' pagesDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + mkt + "/HTML_Pages") if not os.path.isdir(pagesDir): os.makedirs(pagesDir) currentDateDir = pagesDir + '/' + CURRENT_DATE if not os.path.isdir(currentDateDir): os.mkdir(currentDateDir) listingDir = currentDateDir + '/Listing' if not os.path.isdir(listingDir): os.mkdir(listingDir) listReadDir = listingDir + '/Read' if not os.path.isdir(listReadDir): os.mkdir(listReadDir) descriptionDir = currentDateDir + '/Description' if not os.path.isdir(descriptionDir): os.mkdir(descriptionDir) descReadDir = descriptionDir + '/Read' if not os.path.isdir(descReadDir): os.mkdir(descReadDir) # Opens Tor Browser def opentor(): global pid print("Connecting Tor...") pro = subprocess.Popen(config.get('TOR', 'firefox_binary_path')) pid = pro.pid # time.sleep(7.5) input('Press ENTER when Tor is connected to continue') return if __name__ == '__main__': opentor() mktsList = getMarkets() for mkt in mktsList: mkt = mkt.replace('\n','') print("\nCreating listing and description directories ... for " + mkt) createDirectory(mkt) print("Directories created.") if mkt == "DarkFox": # for base in json["DarkFox"]["base"]: # if crawlerDarkFox(base["url"], base["categories"]): # break crawlerDarkFox() elif mkt == "MikesGrandStore": crawlerMikesGrandStore() elif mkt == "DarkTor": crawlerDarkTor() elif mkt == "AnonymousMarketplace": crawlerAnonymousMarketplace() elif mkt == "CityMarket": crawlerCityMarket() elif mkt == "M00nkeyMarket": crawlerM00nkeyMarket() elif mkt == "ViceCity": crawlerViceCity() elif mkt == "CypherMarketplace": crawlerCypher() elif mkt == "PabloEscobarMarket": crawlerPabloEscobar() elif mkt == "DarkBazar": crawlerDarkBazar() elif mkt == "Sonanza": crawlerSonanza() elif mkt == "Kingdom": crawlerKingdom() elif mkt == "BlackPyramid": crawlerBlackPyramid() elif mkt == "Quest": crawlerQuest() elif mkt == "Ares": crawlerAres() elif mkt == "GoFish": crawlerGoFish() elif mkt == "TheDarkMarket": crawlerTheDarkMarket() elif mkt == "ZeroDay": crawlerZeroDay() print("\nScraping process completed!")