__author__ = 'DarkWeb' ''' Starting point of the Darkweb Mining Platform ''' import os from datetime import * from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox from MarketPlaces.Ares.crawler_selenium import crawler as crawlerAres from MarketPlaces.Quest.crawler_selenium import crawler as crawlerQuest from MarketPlaces.Royal.crawler_selenium import crawler as crawlerRoyal from MarketPlaces.Kingdom.crawler_selenium import crawler as crawlerKingdom from MarketPlaces.Tor2door.crawler_selenium import crawler as crawlerTor2door from MarketPlaces.WeTheNorth.crawler_selenium import crawler as crawlerWeTheNorth from MarketPlaces.Bohemia.crawler_selenium import crawler as crawlerBohemia # from MarketPlaces.Kerberos.crawler_selenium import crawler as crawlerKerberos import time # reads list of marketplaces def getMarkets(): mkts = [] with open('MarketsList.txt') as f: mkts = f.readlines() return mkts # Creates needed directories for marketplace if doesn't exist def createDirectory(mkt): # Package should already be there, holding crawler and parser pagesDir = '../' + mkt + '/HTML_Pages' # sharedFolderPath = r'\\VBoxSvr\VM_Files_(shared)' # pagesDir = os.path.join(sharedFolderPath, 'HTML/MarketPlaces/' + mkt + '/HTML_Pages') if not os.path.isdir(pagesDir): os.makedirs(pagesDir) currentDateDir = pagesDir + '/' + str("%02d" %date.today().month) + str("%02d" %date.today().day) + str("%04d" %date.today().year) if not os.path.isdir(currentDateDir): os.mkdir(currentDateDir) listingDir = currentDateDir + '/Listing' if not os.path.isdir(listingDir): os.mkdir(listingDir) listReadDir = listingDir + '/Read' if not os.path.isdir(listReadDir): os.mkdir(listReadDir) descriptionDir = currentDateDir + '/Description' if not os.path.isdir(descriptionDir): os.mkdir(descriptionDir) descReadDir = descriptionDir + '/Read' if not os.path.isdir(descReadDir): os.mkdir(descReadDir) if __name__ == '__main__': mktsList = getMarkets() for mkt in mktsList: mkt = mkt.replace('\n','') print("Creating listing and description directories ...") createDirectory(mkt) time.sleep(5) # input("Directories created successfully. Press ENTER to continue\n") # if mkt == "DarkFox": # crawlerDarkFox() # elif mkt == "Ares": # crawlerAres() # elif mkt == "Quest": # crawlerQuest() # elif mkt == "Royal": # crawlerRoyal() # elif mkt == 'Kingdom': # crawlerKingdom() # elif mkt == 'Tor2door': # crawlerTor2door() # elif mkt == 'WeTheNorth': # crawlerWeTheNorth() # elif mkt == 'Bohemia': # crawlerBohemia() print("Scraping process completed successfully!")