this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

142 lines
4.9 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. '''
  3. Starting point of the Darkweb Markets Mining
  4. '''
  5. from datetime import *
  6. from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox
  7. from MarketPlaces.MikesGrandStore.crawler_selenium import crawler as crawlerMikesGrandStore
  8. from MarketPlaces.DarkTor.crawler_selenium import crawler as crawlerDarkTor
  9. from MarketPlaces.AnonymousMarketplace.crawler_selenium import crawler as crawlerAnonymousMarketplace
  10. from MarketPlaces.CityMarket.crawler_selenium import crawler as crawlerCityMarket
  11. from MarketPlaces.M00nkeyMarket.crawler_selenium import crawler as crawlerM00nkeyMarket
  12. from MarketPlaces.ViceCity.crawler_selenium import crawler as crawlerViceCity
  13. from MarketPlaces.CypherMarketplace.crawler_selenium import crawler as crawlerCypher
  14. from MarketPlaces.PabloEscobarMarket.crawler_selenium import crawler as crawlerPabloEscobar
  15. from MarketPlaces.DarkBazar.crawler_selenium import crawler as crawlerDarkBazar
  16. from MarketPlaces.Sonanza.crawler_selenium import crawler as crawlerSonanza
  17. from MarketPlaces.Kingdom.crawler_selenium import crawler as crawlerKingdom
  18. from MarketPlaces.BlackPyramid.crawler_selenium import crawler as crawlerBlackPyramid
  19. from MarketPlaces.Quest.crawler_selenium import crawler as crawlerQuest
  20. from MarketPlaces.Ares.crawler_selenium import crawler as crawlerAres
  21. from MarketPlaces.Bohemia.crawler_selenium import crawler as crawlerBohemia
  22. from MarketPlaces.TheDarkMarket.crawler_selenium import crawler as crawlerTheDarkMarket
  23. from MarketPlaces.GoFish.crawler_selenium import crawler as crawlerGoFish
  24. from MarketPlaces.ZeroDay.crawler_selenium import crawler as crawlerZeroDay
  25. from MarketPlaces.Torzon.crawler_selenium import crawler as crawlerTorzon
  26. import configparser
  27. import os
  28. import subprocess
  29. config = configparser.ConfigParser()
  30. config.read('../../setup.ini')
  31. CURRENT_DATE = str("%02d" % date.today().month) + str("%02d" % date.today().day) + str("%04d" % date.today().year)
  32. # reads list of marketplaces
  33. def getMarkets():
  34. mkts = []
  35. with open('MarketsList.txt') as f:
  36. mkts = f.readlines()
  37. return mkts
  38. # Creates needed directories for marketplace if doesn't exist
  39. def createDirectory(mkt):
  40. # Package should already be there, holding crawler and parser
  41. # pagesDir = '../' + mkt + '/HTML_Pages'
  42. pagesDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + mkt + "/HTML_Pages")
  43. if not os.path.isdir(pagesDir):
  44. os.makedirs(pagesDir)
  45. currentDateDir = pagesDir + '/' + CURRENT_DATE
  46. if not os.path.isdir(currentDateDir):
  47. os.mkdir(currentDateDir)
  48. listingDir = currentDateDir + '/Listing'
  49. if not os.path.isdir(listingDir):
  50. os.mkdir(listingDir)
  51. listReadDir = listingDir + '/Read'
  52. if not os.path.isdir(listReadDir):
  53. os.mkdir(listReadDir)
  54. descriptionDir = currentDateDir + '/Description'
  55. if not os.path.isdir(descriptionDir):
  56. os.mkdir(descriptionDir)
  57. descReadDir = descriptionDir + '/Read'
  58. if not os.path.isdir(descReadDir):
  59. os.mkdir(descReadDir)
  60. # Opens Tor Browser
  61. def opentor():
  62. global pid
  63. print("Connecting Tor...")
  64. pro = subprocess.Popen(config.get('TOR', 'firefox_binary_path'))
  65. pid = pro.pid
  66. # time.sleep(7.5)
  67. input('Press ENTER when Tor is connected to continue')
  68. return
  69. if __name__ == '__main__':
  70. opentor()
  71. mktsList = getMarkets()
  72. for mkt in mktsList:
  73. mkt = mkt.replace('\n','')
  74. print("\nCreating listing and description directories ... for " + mkt)
  75. createDirectory(mkt)
  76. print("Directories created.")
  77. if mkt == "DarkFox":
  78. # for base in json["DarkFox"]["base"]:
  79. # if crawlerDarkFox(base["url"], base["categories"]):
  80. # break
  81. crawlerDarkFox()
  82. elif mkt == "MikesGrandStore":
  83. crawlerMikesGrandStore()
  84. elif mkt == "DarkTor":
  85. crawlerDarkTor()
  86. elif mkt == "AnonymousMarketplace":
  87. crawlerAnonymousMarketplace()
  88. elif mkt == "CityMarket":
  89. crawlerCityMarket()
  90. elif mkt == "M00nkeyMarket":
  91. crawlerM00nkeyMarket()
  92. elif mkt == "ViceCity":
  93. crawlerViceCity()
  94. elif mkt == "CypherMarketplace":
  95. crawlerCypher()
  96. elif mkt == "PabloEscobarMarket":
  97. crawlerPabloEscobar()
  98. elif mkt == "DarkBazar":
  99. crawlerDarkBazar()
  100. elif mkt == "Sonanza":
  101. crawlerSonanza()
  102. elif mkt == "Kingdom":
  103. crawlerKingdom()
  104. elif mkt == "BlackPyramid":
  105. crawlerBlackPyramid()
  106. elif mkt == "Quest":
  107. crawlerQuest()
  108. elif mkt == "Ares":
  109. crawlerAres()
  110. elif mkt == "GoFish":
  111. crawlerGoFish()
  112. elif mkt == "TheDarkMarket":
  113. crawlerTheDarkMarket()
  114. elif mkt == "ZeroDay":
  115. crawlerZeroDay()
  116. elif mkt == "Torzon":
  117. crawlerTorzon()
  118. print("\nScraping process completed!")