this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

111 lines
3.5 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. '''
  3. Starting point of the Darkweb Markets Mining
  4. '''
  5. from datetime import *
  6. from MarketPlaces.DarkFox.crawler_selenium import crawler as crawlerDarkFox
  7. from MarketPlaces.MikesGrandStore.crawler_selenium import crawler as crawlerMikesGrandStore
  8. from MarketPlaces.DarkTor.crawler_selenium import crawler as crawlerDarkTor
  9. from MarketPlaces.AnonymousMarketplace.crawler_selenium import crawler as crawlerAnonymousMarketplace
  10. from MarketPlaces.CityMarket.crawler_selenium import crawler as crawlerCityMarket
  11. from MarketPlaces.M00nkeyMarket.crawler_selenium import crawler as crawlerM00nkeyMarket
  12. from MarketPlaces.ViceCity.crawler_selenium import crawler as crawlerViceCity
  13. from MarketPlaces.CypherMarketplace.crawler_selenium import crawler as crawlerCypher
  14. from MarketPlaces.PabloEscobarMarket.crawler_selenium import crawler as crawlerPabloEscobar
  15. import configparser
  16. import os
  17. import subprocess
  18. config = configparser.ConfigParser()
  19. config.read('../../setup.ini')
  20. CURRENT_DATE = str("%02d" % date.today().month) + str("%02d" % date.today().day) + str("%04d" % date.today().year)
  21. # reads list of marketplaces
  22. def getMarkets():
  23. mkts = []
  24. with open('MarketsList.txt') as f:
  25. mkts = f.readlines()
  26. return mkts
  27. # Creates needed directories for marketplace if doesn't exist
  28. def createDirectory(mkt):
  29. # Package should already be there, holding crawler and parser
  30. # pagesDir = '../' + mkt + '/HTML_Pages'
  31. pagesDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + mkt + "/HTML_Pages")
  32. if not os.path.isdir(pagesDir):
  33. os.makedirs(pagesDir)
  34. currentDateDir = pagesDir + '/' + CURRENT_DATE
  35. if not os.path.isdir(currentDateDir):
  36. os.mkdir(currentDateDir)
  37. listingDir = currentDateDir + '/Listing'
  38. if not os.path.isdir(listingDir):
  39. os.mkdir(listingDir)
  40. listReadDir = listingDir + '/Read'
  41. if not os.path.isdir(listReadDir):
  42. os.mkdir(listReadDir)
  43. descriptionDir = currentDateDir + '/Description'
  44. if not os.path.isdir(descriptionDir):
  45. os.mkdir(descriptionDir)
  46. descReadDir = descriptionDir + '/Read'
  47. if not os.path.isdir(descReadDir):
  48. os.mkdir(descReadDir)
  49. # Opens Tor Browser
  50. def opentor():
  51. global pid
  52. print("Connecting Tor...")
  53. pro = subprocess.Popen(config.get('TOR', 'firefox_binary_path'))
  54. pid = pro.pid
  55. # time.sleep(7.5)
  56. input('Press ENTER when Tor is connected to continue')
  57. return
  58. if __name__ == '__main__':
  59. opentor()
  60. mktsList = getMarkets()
  61. for mkt in mktsList:
  62. mkt = mkt.replace('\n','')
  63. print("\nCreating listing and description directories ... for " + mkt)
  64. createDirectory(mkt)
  65. print("Directories created.")
  66. if mkt == "DarkFox":
  67. # for base in json["DarkFox"]["base"]:
  68. # if crawlerDarkFox(base["url"], base["categories"]):
  69. # break
  70. crawlerDarkFox()
  71. elif mkt == "MikesGrandStore":
  72. crawlerMikesGrandStore()
  73. elif mkt == "DarkTor":
  74. crawlerDarkTor()
  75. elif mkt == "AnonymousMarketplace":
  76. crawlerAnonymousMarketplace()
  77. elif mkt == "CityMarket":
  78. crawlerCityMarket()
  79. elif mkt == "M00nkeyMarket":
  80. crawlerM00nkeyMarket()
  81. elif mkt == "ViceCity":
  82. crawlerViceCity()
  83. elif mkt == "CypherMarketplace":
  84. crawlerCypher()
  85. elif mkt == "PabloEscobarMarket":
  86. crawlerPabloEscobar()
  87. print("\nScraping process completed!")