this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

404 lines
14 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Tor2door.parser import *
  9. from MarketPlaces.Apocalypse.parser import *
  10. from MarketPlaces.ThiefWorld.parser import *
  11. from MarketPlaces.AnonymousMarketplace.parser import *
  12. from MarketPlaces.ViceCity.parser import *
  13. from MarketPlaces.TorBay.parser import *
  14. from MarketPlaces.M00nkeyMarket.parser import *
  15. from MarketPlaces.DarkMatter.parser import *
  16. from MarketPlaces.DigitalThriftShop.parser import *
  17. from MarketPlaces.LionMarketplace.parser import *
  18. from MarketPlaces.TorMarket.parser import *
  19. from MarketPlaces.HiddenMarket.parser import *
  20. from MarketPlaces.RobinhoodMarket.parser import *
  21. from MarketPlaces.Nexus.parser import *
  22. from MarketPlaces.MikesGrandStore.parser import *
  23. from MarketPlaces.DarkBazar.parser import *
  24. from MarketPlaces.Classifier.classify_product import predict
  25. nError = 0
  26. def mergePages(rmm, rec):
  27. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  28. # key = rec[23]
  29. print("----------------- Matched: " + rec[4] + "--------------------")
  30. if rec[1] == "-1": # name_vendor
  31. rec[1] = rmm[0]
  32. if rec[2] == "-1": # rating_vendor
  33. rec[2] = rmm[1]
  34. if rec[3] == "-1": # success_vendor
  35. rec[3] = rmm[2]
  36. if rec[4] == "-1": # name_item
  37. rec[4] = rmm[3]
  38. if rec[5] == "-1": # description_item
  39. rec[5] = rmm[4]
  40. if rec[6] == "-1": # cve_item
  41. rec[6] = rmm[5]
  42. if rec[7] == "-1": # ms_item
  43. rec[7] = rmm[6]
  44. if rec[8] == "-1": # category_item
  45. rec[8] = rmm[7]
  46. if rec[9] == "-1": # views_item
  47. rec[9] = rmm[8]
  48. if rec[10] == "-1": # reviews_item
  49. rec[10] = rmm[9]
  50. if rec[11] == "-1": # rating_item
  51. rec[11] = rmm[10]
  52. if rec[12] == "-1": # adddate_item
  53. rec[12] = rmm[11]
  54. if rec[13] == "-1": # btc_item
  55. rec[13] = rmm[12]
  56. if rec[14] == "-1": # usd_item
  57. rec[14] = rmm[13]
  58. if rec[15] == "-1": # euro_item
  59. rec[15] = rmm[14]
  60. if rec[16] == "-1": # quantitysold_item
  61. rec[16] = rmm[15]
  62. if rec[17] == "-1": # quantityleft_item
  63. rec[17] = rmm[16]
  64. if rec[18] == "-1": # shippedfrom_item
  65. rec[18] = rmm[17]
  66. if rec[19] == "-1": # shippedto_item
  67. rec[19] = rmm[18]
  68. return rec
  69. def persist_data(url, row, cur):
  70. marketPlace = create_marketPlace(cur, row, url)
  71. vendor = create_vendor(cur, row, marketPlace)
  72. create_items(cur, row, marketPlace, vendor)
  73. def incrementError():
  74. global nError
  75. nError += 1
  76. def read_file(filePath, createLog, logFile):
  77. try:
  78. html = codecs.open(filePath.strip('\n'), encoding='utf8')
  79. soup = BeautifulSoup(html, "html.parser")
  80. html.close()
  81. return soup
  82. except:
  83. try:
  84. html = open(filePath.strip('\n'))
  85. soup = BeautifulSoup(html, "html.parser")
  86. html.close()
  87. return soup
  88. except:
  89. incrementError()
  90. print("There was a problem to read the file " + filePath)
  91. if createLog:
  92. logFile.write(
  93. str(nError) + ". There was a problem to read the file " + filePath + "\n")
  94. return None
  95. def parse_listing(marketPlace, listingFile, soup, createLog, logFile):
  96. try:
  97. if marketPlace == "DarkFox":
  98. rw = darkfox_listing_parser(soup)
  99. elif marketPlace == "Tor2door":
  100. rw = tor2door_listing_parser(soup)
  101. elif marketPlace == "Apocalypse":
  102. rw = apocalypse_listing_parser(soup)
  103. elif marketPlace == "ThiefWorld":
  104. rw = thiefWorld_listing_parser(soup)
  105. elif marketPlace == "AnonymousMarketplace":
  106. rw = anonymousMarketplace_listing_parser(soup)
  107. elif marketPlace == "ViceCity":
  108. rw = vicecity_listing_parser(soup)
  109. elif marketPlace == "TorBay":
  110. rw = torbay_listing_parser(soup)
  111. elif marketPlace == "M00nkeyMarket":
  112. rw = m00nkey_listing_parser(soup)
  113. elif marketPlace == "HiddenMarket":
  114. rw = hiddenmarket_listing_parser(soup)
  115. elif marketPlace == "DarkMatter":
  116. rw = darkmatter_listing_parser(soup)
  117. elif marketPlace == "DigitalThriftShop":
  118. rw = digitalThriftShop_listing_parser(soup)
  119. elif marketPlace == "LionMarketplace":
  120. rw = lionmarketplace_listing_parser(soup)
  121. elif marketPlace == "TorMarket":
  122. rw = tormarket_listing_parser(soup)
  123. elif marketPlace == "RobinhoodMarket":
  124. rw = Robinhood_listing_parser(soup)
  125. elif marketPlace == "Nexus":
  126. rw = nexus_listing_parser(soup)
  127. elif marketPlace == "MikesGrandStore":
  128. rw = mikesGrandStore_listing_parser(soup)
  129. elif marketPlace == "DarkBazar":
  130. rw = darkbazar_listing_parser(soup)
  131. else:
  132. print("MISSING CALL TO LISTING PARSER IN PREPARE_PARSER.PY!")
  133. raise Exception
  134. return rw
  135. except:
  136. incrementError()
  137. print("There was a problem to parse the file " + listingFile + " in the listing section!")
  138. traceback.print_exc()
  139. if createLog:
  140. logFile.write(
  141. str(nError) + ". There was a problem to parse the file " + listingFile + " in the Listing section.\n")
  142. return None
  143. def parse_description(marketPlace, descriptionFile, soup, createLog, logFile):
  144. try:
  145. if marketPlace == "DarkFox":
  146. rmm = darkfox_description_parser(soup)
  147. elif marketPlace == "Tor2door":
  148. rmm = tor2door_description_parser(soup)
  149. elif marketPlace == "Apocalypse":
  150. rmm = apocalypse_description_parser(soup)
  151. elif marketPlace == "ThiefWorld":
  152. rmm = thiefWorld_description_parser(soup)
  153. elif marketPlace == "AnonymousMarketplace":
  154. rmm = anonymousMarketplace_description_parser(soup)
  155. elif marketPlace == "ViceCity":
  156. rmm = vicecity_description_parser(soup)
  157. elif marketPlace == "TorBay":
  158. rmm = torbay_description_parser(soup)
  159. elif marketPlace == "M00nkeyMarket":
  160. rmm = m00nkey_description_parser(soup)
  161. elif marketPlace == "HiddenMarket":
  162. rmm = hiddenmarket_description_parser(soup)
  163. elif marketPlace == "DarkMatter":
  164. rmm = darkmatter_description_parser(soup)
  165. elif marketPlace == "DigitalThriftShop":
  166. rmm = digitalThriftShop_description_parser(soup)
  167. elif marketPlace == "LionMarketplace":
  168. rmm = lionmarketplace_description_parser(soup)
  169. elif marketPlace == "TorMarket":
  170. rmm = tormarket_description_parser(soup)
  171. elif marketPlace == "RobinhoodMarket":
  172. rmm = Robinhood_description_parser(soup)
  173. elif marketPlace == "Nexus":
  174. rmm = nexus_description_parser(soup)
  175. elif marketPlace == "MikesGrandStore":
  176. rmm = mikesGrandStore_description_parser(soup)
  177. elif marketPlace == "DarkBazar":
  178. rmm = darkbazar_description_parser(soup)
  179. else:
  180. print("MISSING CALL TO DESCRIPTION PARSER IN PREPARE_PARSER.PY!")
  181. raise Exception
  182. return rmm
  183. except:
  184. incrementError()
  185. print("There was a problem to parse the file " + descriptionFile + " in the Description section!")
  186. traceback.print_exc()
  187. if createLog:
  188. logFile.write(
  189. str(nError) + ". There was a problem to parse the file " + descriptionFile + " in the Description section.\n")
  190. return None
  191. def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descriptionFile):
  192. try:
  193. persist_data(url, tuple(rec), cur)
  194. con.commit()
  195. return True
  196. except:
  197. con.rollback()
  198. trace = traceback.format_exc()
  199. if trace.find("already exists") == -1:
  200. incrementError()
  201. print(f"There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!")
  202. traceback.print_exc()
  203. if createLog:
  204. logFile.write(
  205. str(nError) + f". There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!\n")
  206. return False
  207. else:
  208. return True
  209. def move_file(filePath, createLog, logFile):
  210. # source = line2.replace(os.path.basename(line2), "") + filename
  211. source = filePath
  212. destination = filePath.replace(os.path.basename(filePath), "") + r'Read/'
  213. try:
  214. shutil.move(source, destination)
  215. return True
  216. except:
  217. print("There was a problem to move the file " + filePath)
  218. incrementError()
  219. if createLog:
  220. logFile.write(
  221. str(nError) + ". There was a problem to move the file " + filePath + "\n")
  222. return False
  223. def new_parse(marketPlace, url, createLog):
  224. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  225. print("Parsing the " + marketPlace + " market and conduct data classification to store the information in the database.")
  226. # Connecting to the database
  227. con = connectDataBase()
  228. cur = con.cursor()
  229. # Creating the tables (The database should be created manually)
  230. create_database(cur, con)
  231. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + marketPlace + "/HTML_Pages")
  232. # Creating the log file for each Forum
  233. if createLog:
  234. try:
  235. logFile = open(mainDir + f"/{CURRENT_DATE}/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  236. except:
  237. print("Could not open log file!")
  238. createLog = False
  239. logFile = None
  240. # raise SystemExit
  241. else:
  242. logFile = None
  243. # Reading the Listing Html Pages
  244. listings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html'))
  245. for listingIndex, listingFile in enumerate(listings):
  246. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(listingFile) + "', index= " + str(
  247. listingIndex + 1) + " ... " + str(len(listings)))
  248. listingSoup = read_file(listingFile, createLog, logFile)
  249. # listing flags
  250. doParseListing = listingSoup is not None
  251. doDescription = False
  252. readDescriptionError = False
  253. parseDescriptionError = False
  254. persistDescriptionError = False
  255. moveDescriptionError = False
  256. findDescriptionError = False
  257. rw = []
  258. if doParseListing:
  259. rw = parse_listing(marketPlace, listingFile, listingSoup, createLog, logFile)
  260. doDescription = rw is not None
  261. if doDescription:
  262. nFound = 0
  263. for rec in rw:
  264. rec = rec.split(',')
  265. descriptionPattern = cleanLink(rec[20]) + ".html"
  266. # Reading the associated description Html Pages
  267. descriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", descriptionPattern))
  268. nFound += len(descriptions)
  269. for descriptionIndex, descriptionFile in enumerate(descriptions):
  270. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(
  271. descriptionFile) + "', index= " + str(descriptionIndex + 1) + " ... " + str(len(descriptions)))
  272. descriptionSoup = read_file(descriptionFile, createLog, logFile)
  273. # description flags
  274. doParseDescription = descriptionSoup is not None
  275. doPersistRecord = False
  276. doMoveDescription = False
  277. rmm = []
  278. if doParseDescription:
  279. rmm = parse_description(marketPlace, descriptionFile, descriptionSoup, createLog, logFile)
  280. doPersistRecord = rmm is not None
  281. else:
  282. readDescriptionError = True
  283. parseDescriptionError = True
  284. if doPersistRecord:
  285. # Combining the information from Listing and Description Pages
  286. rec = mergePages(rmm, rec)
  287. # Append to the list the classification of the topic
  288. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  289. # Persisting the information in the database
  290. persistSuccess = persist_record(url, rec, cur, con, createLog, logFile, listingFile,
  291. descriptionFile)
  292. doMoveDescription = persistSuccess
  293. else:
  294. parseDescriptionError = True
  295. if doMoveDescription:
  296. # move description files of completed folder
  297. moveSuccess = move_file(descriptionFile, createLog, logFile)
  298. if not moveSuccess:
  299. moveDescriptionError = True
  300. else:
  301. moveDescriptionError = True
  302. if not (nFound > 0):
  303. findDescriptionError = True
  304. incrementError()
  305. print(f"There was a problem to locate the file(s) for {listingFile} in the Description section!")
  306. if createLog:
  307. logFile.write(
  308. str(nError) + f". There was a problem to locate the file(s) for {listingFile}"
  309. f" in the Description section!\n")
  310. if not (readDescriptionError or parseDescriptionError or persistDescriptionError
  311. or moveDescriptionError or findDescriptionError):
  312. # move listing files of completed folder
  313. move_file(listingFile, createLog, logFile)
  314. if createLog:
  315. logFile.close()
  316. print("Parsing the " + marketPlace + " market and data classification done.")