this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

447 lines
16 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. import traceback
  7. from psycopg2.extras import RealDictCursor
  8. from MarketPlaces.DB_Connection.db_connection import *
  9. from MarketPlaces.DarkFox.parser import *
  10. from MarketPlaces.AnonymousMarketplace.parser import *
  11. from MarketPlaces.TheDarkMarket.parser import *
  12. from MarketPlaces.ViceCity.parser import *
  13. from MarketPlaces.M00nkeyMarket.parser import *
  14. from MarketPlaces.MikesGrandStore.parser import *
  15. from MarketPlaces.PabloEscobarMarket.parser import *
  16. from MarketPlaces.CityMarket.parser import *
  17. from MarketPlaces.DarkBazar.parser import *
  18. from MarketPlaces.Sonanza.parser import *
  19. from MarketPlaces.Kingdom.parser import *
  20. from MarketPlaces.BlackPyramid.parser import *
  21. from MarketPlaces.Quest.parser import *
  22. from MarketPlaces.Ares.parser import *
  23. from MarketPlaces.CypherMarketplace.parser import *
  24. from MarketPlaces.WeTheNorth.parser import *
  25. from MarketPlaces.Torzon.parser import *
  26. from MarketPlaces.GoFish.parser import *
  27. from MarketPlaces.ZeroDay.parser import *
  28. from MarketPlaces.Classifier.classify_product import predict
  29. nError = 0
  30. def mergePages(rmm, rec):
  31. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  32. # key = rec[23]
  33. print("----------------- Matched: " + rec[4] + "--------------------")
  34. if rec[1] == "-1": # name_vendor
  35. rec[1] = rmm[0]
  36. if rec[2] == "-1": # rating_vendor
  37. rec[2] = rmm[1]
  38. if rec[3] == "-1": # success_vendor
  39. rec[3] = rmm[2]
  40. if rec[4] == "-1": # name_item
  41. rec[4] = rmm[3]
  42. if rec[5] == "-1": # description_item
  43. rec[5] = rmm[4]
  44. if rec[6] == "-1": # cve_item
  45. rec[6] = rmm[5]
  46. if rec[7] == "-1": # ms_item
  47. rec[7] = rmm[6]
  48. if rec[8] == "-1": # category_item
  49. rec[8] = rmm[7]
  50. if rec[9] == "-1": # views_item
  51. rec[9] = rmm[8]
  52. if rec[10] == "-1": # reviews_item
  53. rec[10] = rmm[9]
  54. if rec[11] == "-1": # rating_item
  55. rec[11] = rmm[10]
  56. if rec[12] == "-1": # adddate_item
  57. rec[12] = rmm[11]
  58. if rec[13] == "-1": # btc_item
  59. rec[13] = rmm[12]
  60. if rec[14] == "-1": # usd_item
  61. rec[14] = rmm[13]
  62. if rec[15] == "-1": # euro_item
  63. rec[15] = rmm[14]
  64. if rec[16] == "-1": # quantitysold_item
  65. rec[16] = rmm[15]
  66. if rec[17] == "-1": # quantityleft_item
  67. rec[17] = rmm[16]
  68. if rec[18] == "-1": # shippedfrom_item
  69. rec[18] = rmm[17]
  70. if rec[19] == "-1": # shippedto_item
  71. rec[19] = rmm[18]
  72. if rmm[19] != "-1": # image
  73. rec[20] = rmm[19]
  74. if rmm[20] != "-1": # image_vendor
  75. rec[21] = rmm[20]
  76. return rec
  77. def persist_data(url, row, cur):
  78. marketPlace = create_marketPlace(cur, row, url)
  79. vendor = create_vendor(cur, row, marketPlace)
  80. create_items(cur, row, marketPlace, vendor)
  81. def incrementError():
  82. global nError
  83. nError += 1
  84. def read_file(filePath, createLog, logFile):
  85. try:
  86. html = codecs.open(filePath.strip('\n'), encoding='utf8')
  87. soup = BeautifulSoup(html, "html.parser")
  88. html.close()
  89. time.sleep(0.01) # making sure the file is closed before returning soup object
  90. return soup
  91. except:
  92. try:
  93. html = open(filePath.strip('\n'))
  94. soup = BeautifulSoup(html, "html.parser")
  95. html.close()
  96. time.sleep(0.01) # making sure the file is closed before returning soup object
  97. return soup
  98. except:
  99. incrementError()
  100. print("There was a problem to read the file " + filePath)
  101. if createLog:
  102. logFile.write(
  103. str(nError) + ". There was a problem to read the file " + filePath + "\n" + traceback.format_exc() + "\n")
  104. return None
  105. def parse_listing(marketPlace, listingFile, soup, createLog, logFile):
  106. try:
  107. if marketPlace == "DarkFox":
  108. rw = darkfox_listing_parser(soup)
  109. elif marketPlace == "AnonymousMarketplace":
  110. rw = anonymousMarketplace_listing_parser(soup)
  111. elif marketPlace == "ViceCity":
  112. rw = vicecity_listing_parser(soup)
  113. elif marketPlace == "M00nkeyMarket":
  114. rw = m00nkey_listing_parser(soup)
  115. elif marketPlace == "MikesGrandStore":
  116. rw = MikesGrandStore_listing_parser(soup)
  117. elif marketPlace == "PabloEscobarMarket":
  118. rw = pabloescobarmarket_listing_parser(soup)
  119. elif marketPlace == "CityMarket":
  120. rw = city_listing_parser(soup)
  121. elif marketPlace == "Ares":
  122. rw = ares_listing_parser(soup)
  123. elif marketPlace == "DarkBazar":
  124. rw = darkbazar_listing_parser(soup)
  125. elif marketPlace == "Sonanza":
  126. rw = sonanza_listing_parser(soup)
  127. elif marketPlace == "Kingdom":
  128. rw = kingdom_listing_parser(soup)
  129. elif marketPlace == "BlackPyramid":
  130. rw = blackpyramid_listing_parser(soup)
  131. elif marketPlace == "Quest":
  132. rw = quest_listing_parser(soup)
  133. elif marketPlace == "CypherMarketplace":
  134. rw = cyphermarketplace_listing_parser(soup)
  135. elif marketPlace == "TheDarkMarket":
  136. rw = darkmarket_listing_parser(soup)
  137. elif marketPlace == "WeTheNorth":
  138. rw = wethenorth_listing_parser(soup)
  139. elif marketPlace == "GoFish":
  140. rw = gofish_listing_parser(soup)
  141. elif marketPlace == "ZeroDay":
  142. rw = zeroday_listing_parser(soup)
  143. elif marketPlace == "Torzon":
  144. rw = torzon_listing_parser(soup)
  145. else:
  146. print("MISSING CALL TO LISTING PARSER IN PREPARE_PARSER.PY!")
  147. raise Exception
  148. return rw
  149. except:
  150. incrementError()
  151. print("There was a problem to parse the file " + listingFile + " in the listing section!")
  152. traceback.print_exc()
  153. if createLog:
  154. logFile.write(
  155. str(nError) + ". There was a problem to parse the file " + listingFile + " in the Listing section.\n"
  156. + traceback.format_exc() + "\n")
  157. return None
  158. def parse_description(marketPlace, descriptionFile, soup, createLog, logFile):
  159. try:
  160. if marketPlace == "DarkFox":
  161. rmm = darkfox_description_parser(soup)
  162. elif marketPlace == "AnonymousMarketplace":
  163. rmm = anonymousMarketplace_description_parser(soup)
  164. elif marketPlace == "ViceCity":
  165. rmm = vicecity_description_parser(soup)
  166. elif marketPlace == "M00nkeyMarket":
  167. rmm = m00nkey_description_parser(soup)
  168. elif marketPlace == "MikesGrandStore":
  169. rmm = MikesGrandStore_description_parser(soup)
  170. elif marketPlace == "PabloEscobarMarket":
  171. rmm = pabloescobarmarket_description_parser(soup)
  172. elif marketPlace == "CityMarket":
  173. rmm = city_description_parser(soup)
  174. elif marketPlace == "Ares":
  175. rmm = ares_description_parser(soup)
  176. elif marketPlace == "DarkBazar":
  177. rmm = darkbazar_description_parser(soup)
  178. elif marketPlace == "Sonanza":
  179. rmm = sonanza_description_parser(soup)
  180. elif marketPlace == "Kingdom":
  181. rmm = kingdom_description_parser(soup)
  182. elif marketPlace == "BlackPyramid":
  183. rmm = blackpyramid_description_parser(soup)
  184. elif marketPlace == "Quest":
  185. rmm = quest_description_parser(soup)
  186. elif marketPlace == "CypherMarketplace":
  187. rmm = cyphermarketplace_description_parser(soup)
  188. elif marketPlace == "TheDarkMarket":
  189. rmm = darkmarket_description_parser(soup)
  190. elif marketPlace == "WeTheNorth":
  191. rmm = wethenorth_description_parser(soup)
  192. elif marketPlace == "GoFish":
  193. rmm = gofish_description_parser(soup)
  194. elif marketPlace == "ZeroDay":
  195. rmm = zeroday_description_parser(soup)
  196. elif marketPlace == "Torzon":
  197. rmm = torzon_description_parser(soup)
  198. else:
  199. print("MISSING CALL TO DESCRIPTION PARSER IN PREPARE_PARSER.PY!")
  200. raise Exception
  201. return rmm
  202. except:
  203. incrementError()
  204. print("There was a problem to parse the file " + descriptionFile + " in the Description section!")
  205. traceback.print_exc()
  206. if createLog:
  207. logFile.write(
  208. str(nError) + ". There was a problem to parse the file " + descriptionFile + " in the Description section.\n"
  209. + traceback.format_exc() + "\n")
  210. return None
  211. def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descriptionFile):
  212. try:
  213. persist_data(url, tuple(rec), cur)
  214. con.commit()
  215. return True
  216. except:
  217. con.rollback()
  218. incrementError()
  219. print(f"There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!")
  220. traceback.print_exc()
  221. if createLog:
  222. logFile.write(
  223. str(nError) + f". There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!\n"
  224. + traceback.format_exc() + "\n")
  225. return False
  226. def move_file(filePath, createLog, logFile):
  227. source = filePath
  228. destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
  229. try:
  230. shutil.move(source, destination, shutil.copy2)
  231. return True
  232. except:
  233. try:
  234. shutil.move(source, destination, shutil.copytree)
  235. return True
  236. except:
  237. incrementError()
  238. print("There was a problem to move the file " + filePath)
  239. traceback.print_exc()
  240. if createLog:
  241. logFile.write(
  242. str(nError) + ". There was a problem to move the file " + filePath + "\n" + traceback.format_exc() + "\n")
  243. return False
  244. def new_parse(marketPlace, url, createLog):
  245. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  246. global nError
  247. nError = 0
  248. print("Parsing the " + marketPlace + " market and conduct data classification to store the information in the database.")
  249. # Connecting to the database
  250. con = connectDataBase()
  251. cur = con.cursor(cursor_factory=RealDictCursor)
  252. # Creating the tables (The database should be created manually)
  253. create_database(cur, con)
  254. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces\\" + marketPlace + "\\HTML_Pages")
  255. # Creating the log file for each Forum
  256. if createLog:
  257. try:
  258. logFile = open(mainDir + f"/{CURRENT_DATE}/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  259. except:
  260. print("Could not open log file!")
  261. createLog = False
  262. logFile = None
  263. # raise SystemExit
  264. else:
  265. logFile = None
  266. # Reading the Listing Html Pages
  267. listings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html'))
  268. listings.sort(key=os.path.getmtime)
  269. for listingIndex, listingFile in enumerate(listings):
  270. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(listingFile) + "', index= " + str(
  271. listingIndex + 1) + " ... " + str(len(listings)))
  272. listingSoup = read_file(listingFile, createLog, logFile)
  273. # listing flags
  274. doParseListing = listingSoup is not None
  275. doDescription = False
  276. readDescriptionError = False
  277. parseDescriptionError = False
  278. persistDescriptionError = False
  279. moveDescriptionError = False
  280. findDescriptionError = False
  281. rw = []
  282. if doParseListing:
  283. rw = parse_listing(marketPlace, listingFile, listingSoup, createLog, logFile)
  284. doDescription = rw is not None
  285. if doDescription:
  286. nFound = 0
  287. for rec in rw:
  288. rec = rec.split(',')
  289. descriptionPattern = cleanLink(rec[22]) + ".html"
  290. # Reading the associated description Html Pages
  291. descriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", descriptionPattern))
  292. descriptions.sort(key=os.path.getmtime)
  293. nFound += len(descriptions)
  294. for descriptionIndex, descriptionFile in enumerate(descriptions):
  295. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(
  296. descriptionFile) + "', index= " + str(descriptionIndex + 1) + " ... " + str(len(descriptions)))
  297. descriptionSoup = read_file(descriptionFile, createLog, logFile)
  298. # description flags
  299. doParseDescription = descriptionSoup is not None
  300. doPersistRecord = False
  301. doMoveDescription = False
  302. rmm = []
  303. if doParseDescription:
  304. rmm = parse_description(marketPlace, descriptionFile, descriptionSoup, createLog, logFile)
  305. doPersistRecord = rmm is not None
  306. else:
  307. readDescriptionError = True
  308. parseDescriptionError = True
  309. if doPersistRecord:
  310. # Combining the information from Listing and Description Pages
  311. rec = mergePages(rmm, rec)
  312. # Append to the list the classification of the topic
  313. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  314. # Persisting the information in the database
  315. persistSuccess = persist_record(url, rec, cur, con, createLog, logFile, listingFile,
  316. descriptionFile)
  317. doMoveDescription = persistSuccess
  318. else:
  319. parseDescriptionError = True
  320. if doMoveDescription:
  321. # move description files of completed folder
  322. moveSuccess = move_file(descriptionFile, createLog, logFile)
  323. if not moveSuccess:
  324. moveDescriptionError = True
  325. else:
  326. moveDescriptionError = True
  327. if not (nFound > 0):
  328. findDescriptionError = True
  329. incrementError()
  330. print(f"There was a problem to locate the file(s) for {listingFile} in the Description section!")
  331. if createLog:
  332. logFile.write(
  333. str(nError) + f". There was a problem to locate the file(s) for {listingFile}"
  334. f" in the Description section!\n\n")
  335. if not (readDescriptionError or parseDescriptionError or persistDescriptionError
  336. or moveDescriptionError or findDescriptionError):
  337. # move listing files of completed folder
  338. move_file(listingFile, createLog, logFile)
  339. # registering the current forum status (up/down) and the number of scraped pages in the database
  340. marketId = verifyMarketPlace(cur, marketPlace)
  341. if (marketId > 0):
  342. readListings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing\\read", '*.html'))
  343. readDescriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description\\read", '*.html'))
  344. create_status(cur, marketId, CURRENT_DATE, len(readListings), len(readDescriptions), '1' if len(listings) > 0 else '0')
  345. con.commit()
  346. if createLog:
  347. logFile.close()
  348. cur.close()
  349. con.close()
  350. print("Parsing the " + marketPlace + " market and data classification done.")