__author__ = 'DarkWeb'

'''
Starting point of the Darkweb Markets Mining
'''

from datetime import *
from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox
from MarketPlaces.MikesGrandStore.crawler_selenium import crawler as crawlerMikesGrandStore
from MarketPlaces.DarkTor.crawler_selenium import crawler as crawlerDarkTor
from MarketPlaces.AnonymousMarketplace.crawler_selenium import crawler as crawlerAnonymousMarketplace
from MarketPlaces.CityMarket.crawler_selenium import crawler as crawlerCityMarket
from MarketPlaces.M00nkeyMarket.crawler_selenium import crawler as crawlerM00nkeyMarket
from MarketPlaces.ViceCity.crawler_selenium import crawler as crawlerViceCity
from MarketPlaces.CypherMarketplace.crawler_selenium import crawler as crawlerCypher
from MarketPlaces.PabloEscobarMarket.crawler_selenium import crawler as crawlerPabloEscobar
from MarketPlaces.Ares.crawler_selenium import crawler as crawlerAres

import configparser
import os
import subprocess

config = configparser.ConfigParser()
config.read('../../setup.ini')
CURRENT_DATE = str("%02d" % date.today().month) + str("%02d" % date.today().day) + str("%04d" % date.today().year)


# reads list of marketplaces
def getMarkets():
    mkts = []
    with open('MarketsList.txt') as f:
        mkts = f.readlines()
    return mkts


# Creates needed directories for marketplace if doesn't exist
def createDirectory(mkt):

    # Package should already be there, holding crawler and parser
    # pagesDir = '../' + mkt + '/HTML_Pages'
    pagesDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + mkt + "/HTML_Pages")
    if not os.path.isdir(pagesDir):
        os.makedirs(pagesDir)

    currentDateDir = pagesDir + '/' + CURRENT_DATE
    if not os.path.isdir(currentDateDir):
        os.mkdir(currentDateDir)

    listingDir = currentDateDir + '/Listing'
    if not os.path.isdir(listingDir):
        os.mkdir(listingDir)

    listReadDir = listingDir + '/Read'
    if not os.path.isdir(listReadDir):
        os.mkdir(listReadDir)

    descriptionDir = currentDateDir + '/Description'
    if not os.path.isdir(descriptionDir):
        os.mkdir(descriptionDir)

    descReadDir = descriptionDir + '/Read'
    if not os.path.isdir(descReadDir):
        os.mkdir(descReadDir)


# Opens Tor Browser
def opentor():
    global pid
    print("Connecting Tor...")
    pro = subprocess.Popen(config.get('TOR', 'firefox_binary_path'))
    pid = pro.pid
    # time.sleep(7.5)
    input('Press ENTER when Tor is connected to continue')
    return


if __name__ == '__main__':

    opentor()

    mktsList = getMarkets()

    for mkt in mktsList:
        mkt = mkt.replace('\n','')

        print("\nCreating listing and description directories ... for " + mkt)
        createDirectory(mkt)
        print("Directories created.")

        if mkt == "DarkFox":
            # for base in json["DarkFox"]["base"]:
            #     if crawlerDarkFox(base["url"], base["categories"]):
            #         break
            crawlerDarkFox()
        elif mkt == "MikesGrandStore":
            crawlerMikesGrandStore()
        elif mkt == "DarkTor":
            crawlerDarkTor()
        elif mkt == "AnonymousMarketplace":
            crawlerAnonymousMarketplace()
        elif mkt == "CityMarket":
            crawlerCityMarket()
        elif mkt == "M00nkeyMarket":
            crawlerM00nkeyMarket()
        elif mkt == "ViceCity":
            crawlerViceCity()
        elif mkt == "CypherMarketplace":
            crawlerCypher()
        elif mkt == "PabloEscobarMarket":
            crawlerPabloEscobar()
        elif mkt == "Ares":
            crawlerAres()

    print("\nScraping process completed!")