this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

467 lines
16 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. import traceback
  7. from psycopg2.extras import RealDictCursor
  8. from MarketPlaces.DB_Connection.db_connection import *
  9. from MarketPlaces.DarkFox.parser import *
  10. from MarketPlaces.AnonymousMarketplace.parser import *
  11. from MarketPlaces.TheDarkMarket.parser import *
  12. from MarketPlaces.ViceCity.parser import *
  13. from MarketPlaces.M00nkeyMarket.parser import *
  14. from MarketPlaces.MikesGrandStore.parser import *
  15. from MarketPlaces.PabloEscobarMarket.parser import *
  16. from MarketPlaces.CityMarket.parser import *
  17. from MarketPlaces.DarkBazar.parser import *
  18. from MarketPlaces.Sonanza.parser import *
  19. from MarketPlaces.Kingdom.parser import *
  20. from MarketPlaces.BlackPyramid.parser import *
  21. from MarketPlaces.Quest.parser import *
  22. from MarketPlaces.Ares.parser import *
  23. from MarketPlaces.CypherMarketplace.parser import *
  24. from MarketPlaces.WeTheNorth.parser import *
  25. from MarketPlaces.Torzon.parser import *
  26. from MarketPlaces.GoFish.parser import *
  27. from MarketPlaces.ZeroDay.parser import *
  28. from MarketPlaces.Classifier.classify_product import predict
  29. from Translator.translate import translate
  30. nError = 0
  31. def mergePages(rmm, rec):
  32. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  33. # key = rec[23]
  34. print("----------------- Matched: " + rec[4] + "--------------------")
  35. if rec[1] == "-1": # name_vendor
  36. rec[1] = rmm[0]
  37. if rec[2] == "-1": # rating_vendor
  38. rec[2] = rmm[1]
  39. if rec[3] == "-1": # success_vendor
  40. rec[3] = rmm[2]
  41. if rec[4] == "-1": # name_item
  42. rec[4] = rmm[3]
  43. if rec[5] == "-1": # description_item
  44. rec[5] = rmm[4]
  45. if rec[6] == "-1": # cve_item
  46. rec[6] = rmm[5]
  47. if rec[7] == "-1": # ms_item
  48. rec[7] = rmm[6]
  49. if rec[8] == "-1": # category_item
  50. rec[8] = rmm[7]
  51. if rec[9] == "-1": # views_item
  52. rec[9] = rmm[8]
  53. if rec[10] == "-1": # reviews_item
  54. rec[10] = rmm[9]
  55. if rec[11] == "-1": # rating_item
  56. rec[11] = rmm[10]
  57. if rec[12] == "-1": # adddate_item
  58. rec[12] = rmm[11]
  59. if rec[13] == "-1": # btc_item
  60. rec[13] = rmm[12]
  61. if rec[14] == "-1": # usd_item
  62. rec[14] = rmm[13]
  63. if rec[15] == "-1": # euro_item
  64. rec[15] = rmm[14]
  65. if rec[16] == "-1": # quantitysold_item
  66. rec[16] = rmm[15]
  67. if rec[17] == "-1": # quantityleft_item
  68. rec[17] = rmm[16]
  69. if rec[18] == "-1": # shippedfrom_item
  70. rec[18] = rmm[17]
  71. if rec[19] == "-1": # shippedto_item
  72. rec[19] = rmm[18]
  73. if rmm[19] != "-1": # image
  74. rec[20] = rmm[19]
  75. if rmm[20] != "-1": # image_vendor
  76. rec[21] = rmm[20]
  77. return rec
  78. def persist_data(url, row, cur):
  79. marketPlace = create_marketPlace(cur, row, url)
  80. vendor = create_vendor(cur, row, marketPlace)
  81. create_items(cur, row, marketPlace, vendor)
  82. def incrementError():
  83. global nError
  84. nError += 1
  85. def read_file(filePath, createLog, logFile):
  86. try:
  87. html = codecs.open(filePath.strip('\n'), encoding='utf8')
  88. soup = BeautifulSoup(html, "html.parser")
  89. html.close()
  90. time.sleep(0.01) # making sure the file is closed before returning soup object
  91. return soup
  92. except:
  93. try:
  94. html = open(filePath.strip('\n'))
  95. soup = BeautifulSoup(html, "html.parser")
  96. html.close()
  97. time.sleep(0.01) # making sure the file is closed before returning soup object
  98. return soup
  99. except:
  100. incrementError()
  101. print("There was a problem to read the file " + filePath)
  102. if createLog:
  103. logFile.write(
  104. str(nError) + ". There was a problem to read the file " + filePath + "\n" + traceback.format_exc() + "\n")
  105. return None
  106. def parse_listing(marketPlace, listingFile, soup, createLog, logFile):
  107. try:
  108. if marketPlace == "DarkFox":
  109. rw = darkfox_listing_parser(soup)
  110. elif marketPlace == "AnonymousMarketplace":
  111. rw = anonymousMarketplace_listing_parser(soup)
  112. elif marketPlace == "ViceCity":
  113. rw = vicecity_listing_parser(soup)
  114. elif marketPlace == "M00nkeyMarket":
  115. rw = m00nkey_listing_parser(soup)
  116. elif marketPlace == "MikesGrandStore":
  117. rw = MikesGrandStore_listing_parser(soup)
  118. elif marketPlace == "PabloEscobarMarket":
  119. rw = pabloescobarmarket_listing_parser(soup)
  120. elif marketPlace == "CityMarket":
  121. rw = city_listing_parser(soup)
  122. elif marketPlace == "Ares":
  123. rw = ares_listing_parser(soup)
  124. elif marketPlace == "DarkBazar":
  125. rw = darkbazar_listing_parser(soup)
  126. elif marketPlace == "Sonanza":
  127. rw = sonanza_listing_parser(soup)
  128. elif marketPlace == "Kingdom":
  129. rw = kingdom_listing_parser(soup)
  130. elif marketPlace == "BlackPyramid":
  131. rw = blackpyramid_listing_parser(soup)
  132. elif marketPlace == "Quest":
  133. rw = quest_listing_parser(soup)
  134. elif marketPlace == "CypherMarketplace":
  135. rw = cyphermarketplace_listing_parser(soup)
  136. elif marketPlace == "TheDarkMarket":
  137. rw = darkmarket_listing_parser(soup)
  138. elif marketPlace == "WeTheNorth":
  139. rw = wethenorth_listing_parser(soup)
  140. elif marketPlace == "GoFish":
  141. rw = gofish_listing_parser(soup)
  142. elif marketPlace == "ZeroDay":
  143. rw = zeroday_listing_parser(soup)
  144. elif marketPlace == "Torzon":
  145. rw = torzon_listing_parser(soup)
  146. else:
  147. print("MISSING CALL TO LISTING PARSER IN PREPARE_PARSER.PY!")
  148. raise Exception
  149. return rw
  150. except:
  151. incrementError()
  152. print("There was a problem to parse the file " + listingFile + " in the listing section!")
  153. traceback.print_exc()
  154. if createLog:
  155. logFile.write(
  156. str(nError) + ". There was a problem to parse the file " + listingFile + " in the Listing section.\n"
  157. + traceback.format_exc() + "\n")
  158. return None
  159. def parse_description(marketPlace, descriptionFile, soup, createLog, logFile):
  160. try:
  161. if marketPlace == "DarkFox":
  162. rmm = darkfox_description_parser(soup)
  163. elif marketPlace == "AnonymousMarketplace":
  164. rmm = anonymousMarketplace_description_parser(soup)
  165. elif marketPlace == "ViceCity":
  166. rmm = vicecity_description_parser(soup)
  167. elif marketPlace == "M00nkeyMarket":
  168. rmm = m00nkey_description_parser(soup)
  169. elif marketPlace == "MikesGrandStore":
  170. rmm = MikesGrandStore_description_parser(soup)
  171. elif marketPlace == "PabloEscobarMarket":
  172. rmm = pabloescobarmarket_description_parser(soup)
  173. elif marketPlace == "CityMarket":
  174. rmm = city_description_parser(soup)
  175. elif marketPlace == "Ares":
  176. rmm = ares_description_parser(soup)
  177. elif marketPlace == "DarkBazar":
  178. rmm = darkbazar_description_parser(soup)
  179. elif marketPlace == "Sonanza":
  180. rmm = sonanza_description_parser(soup)
  181. elif marketPlace == "Kingdom":
  182. rmm = kingdom_description_parser(soup)
  183. elif marketPlace == "BlackPyramid":
  184. rmm = blackpyramid_description_parser(soup)
  185. elif marketPlace == "Quest":
  186. rmm = quest_description_parser(soup)
  187. elif marketPlace == "CypherMarketplace":
  188. rmm = cyphermarketplace_description_parser(soup)
  189. elif marketPlace == "TheDarkMarket":
  190. rmm = darkmarket_description_parser(soup)
  191. elif marketPlace == "WeTheNorth":
  192. rmm = wethenorth_description_parser(soup)
  193. elif marketPlace == "GoFish":
  194. rmm = gofish_description_parser(soup)
  195. elif marketPlace == "ZeroDay":
  196. rmm = zeroday_description_parser(soup)
  197. elif marketPlace == "Torzon":
  198. rmm = torzon_description_parser(soup)
  199. else:
  200. print("MISSING CALL TO DESCRIPTION PARSER IN PREPARE_PARSER.PY!")
  201. raise Exception
  202. return rmm
  203. except:
  204. incrementError()
  205. print("There was a problem to parse the file " + descriptionFile + " in the Description section!")
  206. traceback.print_exc()
  207. if createLog:
  208. logFile.write(
  209. str(nError) + ". There was a problem to parse the file " + descriptionFile + " in the Description section.\n"
  210. + traceback.format_exc() + "\n")
  211. return None
  212. def get_source_language(marketPlace):
  213. if marketPlace == "BestCardingWorld":
  214. lang = 'english'
  215. elif marketPlace == "CryptBB":
  216. lang = 'english'
  217. elif marketPlace == "Incogsnoo":
  218. lang = 'english'
  219. else:
  220. print("MISSING CALL TO GET LANGUAGE IN PREPARE_PARSER.PY!")
  221. lang = 'auto'
  222. return lang
  223. def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descriptionFile):
  224. try:
  225. persist_data(url, tuple(rec), cur)
  226. con.commit()
  227. return True
  228. except:
  229. con.rollback()
  230. incrementError()
  231. print(f"There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!")
  232. traceback.print_exc()
  233. if createLog:
  234. logFile.write(
  235. str(nError) + f". There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!\n"
  236. + traceback.format_exc() + "\n")
  237. return False
  238. def move_file(filePath, createLog, logFile):
  239. source = filePath
  240. destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
  241. try:
  242. shutil.move(source, destination, shutil.copy2)
  243. return True
  244. except:
  245. try:
  246. shutil.move(source, destination, shutil.copytree)
  247. return True
  248. except:
  249. incrementError()
  250. print("There was a problem to move the file " + filePath)
  251. traceback.print_exc()
  252. if createLog:
  253. logFile.write(
  254. str(nError) + ". There was a problem to move the file " + filePath + "\n" + traceback.format_exc() + "\n")
  255. return False
  256. def new_parse(marketPlace, url, createLog):
  257. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  258. global nError
  259. nError = 0
  260. print("Parsing the " + marketPlace + " market and conduct data classification to store the information in the database.")
  261. # Connecting to the database
  262. con = connectDataBase()
  263. cur = con.cursor(cursor_factory=RealDictCursor)
  264. # Creating the tables (The database should be created manually)
  265. create_database(cur, con)
  266. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces\\" + marketPlace + "\\HTML_Pages")
  267. # Creating the log file for each Forum
  268. if createLog:
  269. try:
  270. logFile = open(mainDir + f"/{CURRENT_DATE}/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  271. except:
  272. print("Could not open log file!")
  273. createLog = False
  274. logFile = None
  275. # raise SystemExit
  276. else:
  277. logFile = None
  278. source_lang = get_source_language(marketPlace)
  279. # Reading the Listing Html Pages
  280. listings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html'))
  281. listings.sort(key=os.path.getmtime)
  282. for listingIndex, listingFile in enumerate(listings):
  283. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(listingFile) + "', index= " + str(
  284. listingIndex + 1) + " ... " + str(len(listings)))
  285. listingSoup = read_file(listingFile, createLog, logFile)
  286. # listing flags
  287. doParseListing = listingSoup is not None
  288. doDescription = False
  289. readDescriptionError = False
  290. parseDescriptionError = False
  291. persistDescriptionError = False
  292. moveDescriptionError = False
  293. findDescriptionError = False
  294. rw = []
  295. if doParseListing:
  296. rw = parse_listing(marketPlace, listingFile, listingSoup, createLog, logFile)
  297. doDescription = rw is not None
  298. if doDescription:
  299. nFound = 0
  300. for rec in rw:
  301. rec = rec.split(',')
  302. descriptionPattern = cleanLink(rec[22]) + ".html"
  303. # Reading the associated description Html Pages
  304. descriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", descriptionPattern))
  305. descriptions.sort(key=os.path.getmtime)
  306. nFound += len(descriptions)
  307. for descriptionIndex, descriptionFile in enumerate(descriptions):
  308. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(
  309. descriptionFile) + "', index= " + str(descriptionIndex + 1) + " ... " + str(len(descriptions)))
  310. descriptionSoup = read_file(descriptionFile, createLog, logFile)
  311. # description flags
  312. doParseDescription = descriptionSoup is not None
  313. doPersistRecord = False
  314. doMoveDescription = False
  315. rmm = []
  316. if doParseDescription:
  317. rmm = parse_description(marketPlace, descriptionFile, descriptionSoup, createLog, logFile)
  318. doPersistRecord = rmm is not None
  319. else:
  320. readDescriptionError = True
  321. parseDescriptionError = True
  322. if doPersistRecord:
  323. # Combining the information from Listing and Description Pages
  324. rec = mergePages(rmm, rec)
  325. title = translate(rec[4], source_lang)
  326. content = translate(rec[5], source_lang)
  327. # Append to the list the classification of the topic
  328. rec.append(str(predict(title, content, language='sup_english')))
  329. # Persisting the information in the database
  330. persistSuccess = persist_record(url, rec, cur, con, createLog, logFile, listingFile,
  331. descriptionFile)
  332. doMoveDescription = persistSuccess
  333. else:
  334. parseDescriptionError = True
  335. if doMoveDescription:
  336. # move description files of completed folder
  337. moveSuccess = move_file(descriptionFile, createLog, logFile)
  338. if not moveSuccess:
  339. moveDescriptionError = True
  340. else:
  341. moveDescriptionError = True
  342. if not (nFound > 0):
  343. findDescriptionError = True
  344. incrementError()
  345. print(f"There was a problem to locate the file(s) for {listingFile} in the Description section!")
  346. if createLog:
  347. logFile.write(
  348. str(nError) + f". There was a problem to locate the file(s) for {listingFile}"
  349. f" in the Description section!\n\n")
  350. if not (readDescriptionError or parseDescriptionError or persistDescriptionError
  351. or moveDescriptionError or findDescriptionError):
  352. # move listing files of completed folder
  353. move_file(listingFile, createLog, logFile)
  354. # registering the current forum status (up/down) and the number of scraped pages in the database
  355. marketId = verifyMarketPlace(cur, marketPlace)
  356. if (marketId > 0):
  357. readListings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing\\read", '*.html'))
  358. readDescriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description\\read", '*.html'))
  359. create_status(cur, marketId, CURRENT_DATE, len(readListings), len(readDescriptions), '1' if len(listings) > 0 else '0')
  360. con.commit()
  361. if createLog:
  362. logFile.close()
  363. cur.close()
  364. con.close()
  365. print("Parsing the " + marketPlace + " market and data classification done.")