this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

328 lines
11 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. '''
  3. Helium Forum Crawler (Selenium)
  4. '''
  5. from selenium import webdriver
  6. from selenium.common.exceptions import NoSuchElementException
  7. from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
  8. from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
  9. from selenium.webdriver.firefox.service import Service
  10. from selenium.webdriver.support.ui import WebDriverWait
  11. from selenium.webdriver.support import expected_conditions as EC
  12. from selenium.webdriver.common.by import By
  13. from PIL import Image
  14. import urllib.parse as urlparse
  15. import os, time
  16. from datetime import date
  17. import subprocess
  18. from bs4 import BeautifulSoup
  19. from Forums.Initialization.prepare_parser import new_parse
  20. from Forums.Helium.parser import helium_links_parser
  21. from Forums.Utilities.utilities import cleanHTML
  22. counter = 1
  23. baseURL = 'http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/'
  24. # Opens Tor Browser, crawls the website
  25. def startCrawling():
  26. # opentor()
  27. # forumName = getForumName()
  28. driver = getAccess()
  29. if driver != 'down':
  30. try:
  31. login(driver)
  32. crawlForum(driver)
  33. except Exception as e:
  34. print(driver.current_url, e)
  35. closetor(driver)
  36. # new_parse(forumName, False)
  37. # Opens Tor Browser
  38. def opentor():
  39. global pid
  40. print("Connecting Tor...")
  41. path = open('../../path.txt').readline().strip()
  42. pro = subprocess.Popen(path)
  43. pid = pro.pid
  44. time.sleep(7.5)
  45. input('Tor Connected. Press ENTER to continue\n')
  46. return
  47. # Login using premade account credentials and do login captcha manually
  48. def login(driver):
  49. #wait for login page
  50. WebDriverWait(driver, 100).until(EC.visibility_of_element_located(
  51. (By.XPATH, "/html/body/div[2]/div/div[1]/div/div/div[2]/form/div[5]/div/button")))
  52. #entering username and password into input boxes
  53. usernameBox = driver.find_element(by=By.XPATH, value='//*[@id="username"]')
  54. #Username here
  55. usernameBox.send_keys('holyre')
  56. passwordBox = driver.find_element(by=By.XPATH, value='//*[@id="password"]')
  57. #Password here
  58. passwordBox.send_keys('PlatinumBorn2')
  59. '''
  60. # wait for captcha page show up
  61. WebDriverWait(driver, 100).until(EC.visibility_of_element_located(
  62. (By.XPATH, '//*[@id="captcha_img"]')))
  63. # save captcha to local
  64. driver.find_element(by=By.XPATH, value='//*[@id="captcha_img"]').screenshot(r'..\Helium\captcha.png')
  65. # This method will show image in any image viewer
  66. im = Image.open(r'..\Helium\captcha.png')
  67. im.show()
  68. # wait until input space show up
  69. inputBox = driver.find_element(by=By.XPATH, value='//*[@id="captcha"]')
  70. # ask user input captcha solution in terminal
  71. userIn = input("Enter solution: ")
  72. # send user solution into the input space
  73. inputBox.send_keys(userIn)
  74. # click the verify(submit) button
  75. driver.find_element(by=By.XPATH, value="/html/body/div[2]/div/div[1]/div/div/div[2]/form/div[5]/div/button").click()
  76. '''
  77. input("Press ENTER when CAPTCHA is completed\n")
  78. # wait for listing page show up (This Xpath may need to change based on different seed url)
  79. WebDriverWait(driver, 50).until(EC.visibility_of_element_located(
  80. (By.XPATH, '/html/body/div[2]/div/p')))
  81. # Returns the name of the website
  82. def getForumName():
  83. name = 'Helium'
  84. return name
  85. # Return the link of the website
  86. def getFixedURL():
  87. url = 'http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/login'
  88. return url
  89. # Closes Tor Browser
  90. def closetor(driver):
  91. # global pid
  92. # os.system("taskkill /pid " + str(pro.pid))
  93. # os.system("taskkill /t /f /im tor.exe")
  94. print('Closing Tor...')
  95. driver.close()
  96. time.sleep(3)
  97. return
  98. # Creates FireFox 'driver' and configure its 'Profile'
  99. # to use Tor proxy and socket
  100. def createFFDriver():
  101. file = open('../../path.txt', 'r')
  102. lines = file.readlines()
  103. ff_binary = FirefoxBinary(lines[0].strip())
  104. ff_prof = FirefoxProfile(lines[1].strip())
  105. ff_prof.set_preference("places.history.enabled", False)
  106. ff_prof.set_preference("privacy.clearOnShutdown.offlineApps", True)
  107. ff_prof.set_preference("privacy.clearOnShutdown.passwords", True)
  108. ff_prof.set_preference("privacy.clearOnShutdown.siteSettings", True)
  109. ff_prof.set_preference("privacy.sanitize.sanitizeOnShutdown", True)
  110. ff_prof.set_preference("signon.rememberSignons", False)
  111. ff_prof.set_preference("network.cookie.lifetimePolicy", 2)
  112. ff_prof.set_preference("network.dns.disablePrefetch", True)
  113. ff_prof.set_preference("network.http.sendRefererHeader", 0)
  114. # ff_prof.set_preference("permissions.default.image", 2)
  115. ff_prof.set_preference("browser.download.folderList", 2)
  116. ff_prof.set_preference("browser.download.manager.showWhenStarting", False)
  117. ff_prof.set_preference("browser.helperApps.neverAsk.saveToDisk", "text/plain")
  118. ff_prof.set_preference('network.proxy.type', 1)
  119. ff_prof.set_preference("network.proxy.socks_version", 5)
  120. ff_prof.set_preference('network.proxy.socks', '127.0.0.1')
  121. ff_prof.set_preference('network.proxy.socks_port', 9150)
  122. ff_prof.set_preference('network.proxy.socks_remote_dns', True)
  123. ff_prof.set_preference("javascript.enabled", True)
  124. ff_prof.update_preferences()
  125. service = Service(lines[2].strip())
  126. driver = webdriver.Firefox(firefox_binary=ff_binary, firefox_profile=ff_prof, service=service)
  127. return driver
  128. def getAccess():
  129. url = getFixedURL()
  130. driver = createFFDriver()
  131. try:
  132. driver.get(url)
  133. return driver
  134. except:
  135. driver.close()
  136. return 'down'
  137. # Saves the crawled html page
  138. def savePage(page, url):
  139. cleanPage = cleanHTML(page)
  140. filePath = getFullPathName(url)
  141. os.makedirs(os.path.dirname(filePath), exist_ok=True)
  142. open(filePath, 'wb').write(cleanPage.encode('utf-8'))
  143. return
  144. # Gets the full path of the page to be saved along with its appropriate file name
  145. def getFullPathName(url):
  146. fileName = getNameFromURL(url)
  147. if isDescriptionLink(url):
  148. fullPath = r'..\Helium\HTML_Pages\\' + str(
  149. "%02d" % date.today().month) + str("%02d" % date.today().day) + str(
  150. "%04d" % date.today().year) + r'\\' + r'Description\\' + fileName + '.html'
  151. else:
  152. fullPath = r'..\Helium\HTML_Pages\\' + str(
  153. "%02d" % date.today().month) + str("%02d" % date.today().day) + str(
  154. "%04d" % date.today().year) + r'\\' + r'Listing\\' + fileName + '.html'
  155. return fullPath
  156. # Creates the file name from passed URL
  157. def getNameFromURL(url):
  158. global counter
  159. name = ''.join(e for e in url if e.isalnum())
  160. if name == '':
  161. name = str(counter)
  162. counter = counter + 1
  163. return name
  164. def getInterestedLinks():
  165. links = []
  166. # # General Discussion
  167. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/6')
  168. # # Anonymity and Security
  169. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/8')
  170. # # Programming
  171. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/9')
  172. # # Carding Discussions
  173. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/10')
  174. # # Hacked Database (free)
  175. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/11')
  176. # Hacking tools, exploits and POC
  177. links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/17')
  178. # # Hacked Database
  179. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/12')
  180. # # Hacking and other Services
  181. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/13')
  182. # # Selling/Buying Malware, Exploits etc
  183. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/22')
  184. # # General Tutorials
  185. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/18')
  186. # # Hacking Tutorials
  187. # links.append('http://fahue6hb7odzns36vfoi2dqfvqvjq4btt7vo52a67jivmyz6a6h3vzqd.onion/board/19')
  188. return links
  189. def crawlForum(driver):
  190. print("Crawling the Helium forum")
  191. linksToCrawl = getInterestedLinks()
  192. # visited = set(linksToCrawl)
  193. # initialTime = time.time()
  194. i = 0
  195. count = 0
  196. while i < len(linksToCrawl):
  197. link = linksToCrawl[i]
  198. print('Crawling :', link)
  199. try:
  200. try:
  201. driver.get(link)
  202. except:
  203. driver.refresh()
  204. html = driver.page_source
  205. savePage(html, link)
  206. has_next_page = True
  207. while has_next_page:
  208. list = topicPages(html)
  209. for item in list:
  210. itemURL = urlparse.urljoin(baseURL, str(item))
  211. try:
  212. driver.get(itemURL)
  213. except:
  214. driver.refresh()
  215. savePage(driver.page_source, item)
  216. driver.back()
  217. # comment out
  218. break
  219. # comment out
  220. if count == 1:
  221. count = 0
  222. break
  223. try:
  224. bar = driver.find_element(by=By.XPATH, value=
  225. '/html/body/div[2]/div/div[3]/ul')
  226. li = bar.find_elements(By.TAG_NAME, 'li')[-1]
  227. link = li.find_element(By.TAG_NAME, 'a').get_attribute('href')
  228. if link == "":
  229. raise NoSuchElementException
  230. try:
  231. driver.get(link)
  232. except:
  233. driver.refresh()
  234. html = driver.page_source
  235. savePage(html, link)
  236. count += 1
  237. except NoSuchElementException:
  238. has_next_page = False
  239. except Exception as e:
  240. print(link, e)
  241. i += 1
  242. # finalTime = time.time()
  243. # print finalTime - initialTime
  244. input("Crawling Helium forum done successfully. Press ENTER to continue\n")
  245. # Returns 'True' if the link is Topic link
  246. def isDescriptionLink(url):
  247. if 'topic' in url:
  248. return True
  249. return False
  250. # Returns True if the link is a listingPage link
  251. def isListingLink(url):
  252. if 'board' in url:
  253. return True
  254. return False
  255. # calling the parser to define the links
  256. def topicPages(html):
  257. soup = BeautifulSoup(html, "html.parser")
  258. return helium_links_parser(soup)
  259. def crawler():
  260. startCrawling()
  261. # print("Crawling and Parsing BestCardingWorld .... DONE!")