__author__ = 'DarkWeb'
|
|
|
|
'''
|
|
Starting point of the Darkweb Mining Platform
|
|
'''
|
|
|
|
import os
|
|
from datetime import *
|
|
from Forums.BestCardingWorld.crawler_selenium import crawler as crawlerBestCardingWorld
|
|
from Forums.CryptBB.crawler_selenium import crawler as crawlerCryptBB
|
|
from Forums.DWForums.crawler_selenium import crawler as crawlerDWForums
|
|
from Forums.Dread.crawler_selenium import crawler as crawlerDread
|
|
from Forums.Helium.crawler_selenium import crawler as crawlerHelium
|
|
# from Forums.Nulled.crawler_selenium import crawler as crawlerNulled
|
|
|
|
import time
|
|
|
|
|
|
# reads list of marketplaces
|
|
def getForums():
|
|
forums = []
|
|
with open('forumsList.txt') as f:
|
|
forums = f.readlines()
|
|
return forums
|
|
|
|
|
|
# Creates needed directories for marketplace if doesn't exist
|
|
def createDirectory(forum):
|
|
|
|
# Package should already be there, holding crawler and parser
|
|
if forum == 'Reddits':
|
|
pagesMainDir = '../' + forum
|
|
else:
|
|
pagesMainDir = '../' + forum + "/HTML_Pages"
|
|
# sharedFolderPath = r'\\VBoxSvr\VM_Files_(shared)'
|
|
# pagesMainDir = os.path.join(sharedFolderPath, 'HTML/Forums/' + forum + '/HTML_Pages')
|
|
|
|
if not os.path.isdir(pagesMainDir):
|
|
os.makedirs(pagesMainDir)
|
|
|
|
if forum == 'Reddits':
|
|
createRedditsSubdirectories(pagesMainDir)
|
|
else:
|
|
createSubdirectories(pagesMainDir)
|
|
|
|
|
|
def createRedditsSubdirectories(pagesMainDir):
|
|
|
|
with open('../Reddits/redditsList.txt', 'r') as f:
|
|
reddits = f.readlines()
|
|
|
|
for reddit in reddits:
|
|
reddit = reddit.strip('\n')
|
|
redditMainDir = pagesMainDir + '/' + reddit + '/HTML_Pages'
|
|
if not os.path.isdir(redditMainDir):
|
|
os.mkdir(redditMainDir)
|
|
# Create inner time folders
|
|
createSubdirectories(redditMainDir)
|
|
|
|
|
|
def createSubdirectories(pagesDir):
|
|
|
|
currentDateDir = pagesDir + '/' + str("%02d" %date.today().month) + str("%02d" %date.today().day) + str("%04d" %date.today().year)
|
|
if not os.path.isdir(currentDateDir):
|
|
os.mkdir(currentDateDir)
|
|
|
|
listingDir = currentDateDir + '/Listing'
|
|
if not os.path.isdir(listingDir):
|
|
os.mkdir(listingDir)
|
|
|
|
listReadDir = listingDir + '/Read'
|
|
if not os.path.isdir(listReadDir):
|
|
os.mkdir(listReadDir)
|
|
|
|
descriptionDir = currentDateDir + '/Description'
|
|
if not os.path.isdir(descriptionDir):
|
|
os.mkdir(descriptionDir)
|
|
|
|
descReadDir = descriptionDir + '/Read'
|
|
if not os.path.isdir(descReadDir):
|
|
os.mkdir(descReadDir)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
forumsList = getForums()
|
|
|
|
for forum in forumsList:
|
|
forum = forum.replace('\n','')
|
|
|
|
print("Creating listing and description directories ...")
|
|
createDirectory(forum)
|
|
time.sleep(5)
|
|
# input("Directories created successfully. Press ENTER to continue\n")
|
|
|
|
# if forum == "BestCardingWorld":
|
|
# crawlerBestCardingWorld()
|
|
# elif forum == "CryptBB":
|
|
# crawlerCryptBB()
|
|
# elif forum == "DWForums":
|
|
# crawlerDWForums()
|
|
# elif forum == "Dread":
|
|
# crawlerDread()
|
|
# elif forum == "Helium":
|
|
# crawlerHelium()
|
|
# elif forum == "Nulled":
|
|
# crawlerNulled()
|
|
|
|
print("Scraping process completed successfully!")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|