this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

139 lines
4.8 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. '''
  3. Starting point of the Darkweb Markets Mining
  4. '''
  5. from datetime import *
  6. from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox
  7. from MarketPlaces.MikesGrandStore.crawler_selenium import crawler as crawlerMikesGrandStore
  8. from MarketPlaces.DarkTor.crawler_selenium import crawler as crawlerDarkTor
  9. from MarketPlaces.AnonymousMarketplace.crawler_selenium import crawler as crawlerAnonymousMarketplace
  10. from MarketPlaces.CityMarket.crawler_selenium import crawler as crawlerCityMarket
  11. from MarketPlaces.M00nkeyMarket.crawler_selenium import crawler as crawlerM00nkeyMarket
  12. from MarketPlaces.ViceCity.crawler_selenium import crawler as crawlerViceCity
  13. from MarketPlaces.CypherMarketplace.crawler_selenium import crawler as crawlerCypher
  14. from MarketPlaces.PabloEscobarMarket.crawler_selenium import crawler as crawlerPabloEscobar
  15. from MarketPlaces.DarkBazar.crawler_selenium import crawler as crawlerDarkBazar
  16. from MarketPlaces.Sonanza.crawler_selenium import crawler as crawlerSonanza
  17. from MarketPlaces.Kingdom.crawler_selenium import crawler as crawlerKingdom
  18. from MarketPlaces.BlackPyramid.crawler_selenium import crawler as crawlerBlackPyramid
  19. from MarketPlaces.Quest.crawler_selenium import crawler as crawlerQuest
  20. from MarketPlaces.Ares.crawler_selenium import crawler as crawlerAres
  21. from MarketPlaces.Bohemia.crawler_selenium import crawler as crawlerBohemia
  22. from MarketPlaces.TheDarkMarket.crawler_selenium import crawler as crawlerTheDarkMarket
  23. from MarketPlaces.GoFish.crawler_selenium import crawler as crawlerGoFish
  24. from MarketPlaces.ZeroDay.crawler_selenium import crawler as crawlerZeroDay
  25. import configparser
  26. import os
  27. import subprocess
  28. config = configparser.ConfigParser()
  29. config.read('../../setup.ini')
  30. CURRENT_DATE = str("%02d" % date.today().month) + str("%02d" % date.today().day) + str("%04d" % date.today().year)
  31. # reads list of marketplaces
  32. def getMarkets():
  33. mkts = []
  34. with open('MarketsList.txt') as f:
  35. mkts = f.readlines()
  36. return mkts
  37. # Creates needed directories for marketplace if doesn't exist
  38. def createDirectory(mkt):
  39. # Package should already be there, holding crawler and parser
  40. # pagesDir = '../' + mkt + '/HTML_Pages'
  41. pagesDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + mkt + "/HTML_Pages")
  42. if not os.path.isdir(pagesDir):
  43. os.makedirs(pagesDir)
  44. currentDateDir = pagesDir + '/' + CURRENT_DATE
  45. if not os.path.isdir(currentDateDir):
  46. os.mkdir(currentDateDir)
  47. listingDir = currentDateDir + '/Listing'
  48. if not os.path.isdir(listingDir):
  49. os.mkdir(listingDir)
  50. listReadDir = listingDir + '/Read'
  51. if not os.path.isdir(listReadDir):
  52. os.mkdir(listReadDir)
  53. descriptionDir = currentDateDir + '/Description'
  54. if not os.path.isdir(descriptionDir):
  55. os.mkdir(descriptionDir)
  56. descReadDir = descriptionDir + '/Read'
  57. if not os.path.isdir(descReadDir):
  58. os.mkdir(descReadDir)
  59. # Opens Tor Browser
  60. def opentor():
  61. global pid
  62. print("Connecting Tor...")
  63. pro = subprocess.Popen(config.get('TOR', 'firefox_binary_path'))
  64. pid = pro.pid
  65. # time.sleep(7.5)
  66. input('Press ENTER when Tor is connected to continue')
  67. return
  68. if __name__ == '__main__':
  69. opentor()
  70. mktsList = getMarkets()
  71. for mkt in mktsList:
  72. mkt = mkt.replace('\n','')
  73. print("\nCreating listing and description directories ... for " + mkt)
  74. createDirectory(mkt)
  75. print("Directories created.")
  76. if mkt == "DarkFox":
  77. # for base in json["DarkFox"]["base"]:
  78. # if crawlerDarkFox(base["url"], base["categories"]):
  79. # break
  80. crawlerDarkFox()
  81. elif mkt == "MikesGrandStore":
  82. crawlerMikesGrandStore()
  83. elif mkt == "DarkTor":
  84. crawlerDarkTor()
  85. elif mkt == "AnonymousMarketplace":
  86. crawlerAnonymousMarketplace()
  87. elif mkt == "CityMarket":
  88. crawlerCityMarket()
  89. elif mkt == "M00nkeyMarket":
  90. crawlerM00nkeyMarket()
  91. elif mkt == "ViceCity":
  92. crawlerViceCity()
  93. elif mkt == "CypherMarketplace":
  94. crawlerCypher()
  95. elif mkt == "PabloEscobarMarket":
  96. crawlerPabloEscobar()
  97. elif mkt == "DarkBazar":
  98. crawlerDarkBazar()
  99. elif mkt == "Sonanza":
  100. crawlerSonanza()
  101. elif mkt == "Kingdom":
  102. crawlerKingdom()
  103. elif mkt == "BlackPyramid":
  104. crawlerBlackPyramid()
  105. elif mkt == "Quest":
  106. crawlerQuest()
  107. elif mkt == "Ares":
  108. crawlerAres()
  109. elif mkt == "GoFish":
  110. crawlerGoFish()
  111. elif mkt == "TheDarkMarket":
  112. crawlerTheDarkMarket()
  113. elif mkt == "ZeroDay":
  114. crawlerZeroDay()
  115. print("\nScraping process completed!")