this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

417 lines
14 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. import traceback
  7. from psycopg2.extras import RealDictCursor
  8. from MarketPlaces.DB_Connection.db_connection import *
  9. from MarketPlaces.DarkFox.parser import *
  10. from MarketPlaces.AnonymousMarketplace.parser import *
  11. from MarketPlaces.ViceCity.parser import *
  12. from MarketPlaces.M00nkeyMarket.parser import *
  13. from MarketPlaces.MikesGrandStore.parser import *
  14. from MarketPlaces.PabloEscobarMarket.parser import *
  15. from MarketPlaces.CityMarket.parser import *
  16. from MarketPlaces.DarkBazar.parser import *
  17. from MarketPlaces.Sonanza.parser import *
  18. from MarketPlaces.Kingdom.parser import *
  19. from MarketPlaces.BlackPyramid.parser import *
  20. from MarketPlaces.Quest.parser import *
  21. from MarketPlaces.Ares.parser import *
  22. from MarketPlaces.CypherMarketplace.parser import *
  23. from MarketPlaces.Classifier.classify_product import predict
  24. nError = 0
  25. def mergePages(rmm, rec):
  26. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  27. # key = rec[23]
  28. print("----------------- Matched: " + rec[4] + "--------------------")
  29. if rec[1] == "-1": # name_vendor
  30. rec[1] = rmm[0]
  31. if rec[2] == "-1": # rating_vendor
  32. rec[2] = rmm[1]
  33. if rec[3] == "-1": # success_vendor
  34. rec[3] = rmm[2]
  35. if rec[4] == "-1": # name_item
  36. rec[4] = rmm[3]
  37. if rec[5] == "-1": # description_item
  38. rec[5] = rmm[4]
  39. if rec[6] == "-1": # cve_item
  40. rec[6] = rmm[5]
  41. if rec[7] == "-1": # ms_item
  42. rec[7] = rmm[6]
  43. if rec[8] == "-1": # category_item
  44. rec[8] = rmm[7]
  45. if rec[9] == "-1": # views_item
  46. rec[9] = rmm[8]
  47. if rec[10] == "-1": # reviews_item
  48. rec[10] = rmm[9]
  49. if rec[11] == "-1": # rating_item
  50. rec[11] = rmm[10]
  51. if rec[12] == "-1": # adddate_item
  52. rec[12] = rmm[11]
  53. if rec[13] == "-1": # btc_item
  54. rec[13] = rmm[12]
  55. if rec[14] == "-1": # usd_item
  56. rec[14] = rmm[13]
  57. if rec[15] == "-1": # euro_item
  58. rec[15] = rmm[14]
  59. if rec[16] == "-1": # quantitysold_item
  60. rec[16] = rmm[15]
  61. if rec[17] == "-1": # quantityleft_item
  62. rec[17] = rmm[16]
  63. if rec[18] == "-1": # shippedfrom_item
  64. rec[18] = rmm[17]
  65. if rec[19] == "-1": # shippedto_item
  66. rec[19] = rmm[18]
  67. if rmm[19] != "-1": # image
  68. rec[20] = rmm[19]
  69. if rmm[20] != "-1": # image_vendor
  70. rec[21] = rmm[20]
  71. return rec
  72. def persist_data(url, row, cur):
  73. marketPlace = create_marketPlace(cur, row, url)
  74. vendor = create_vendor(cur, row, marketPlace)
  75. create_items(cur, row, marketPlace, vendor)
  76. def incrementError():
  77. global nError
  78. nError += 1
  79. def read_file(filePath, createLog, logFile):
  80. try:
  81. html = codecs.open(filePath.strip('\n'), encoding='utf8')
  82. soup = BeautifulSoup(html, "html.parser")
  83. html.close()
  84. time.sleep(0.01) # making sure the file is closed before returning soup object
  85. return soup
  86. except:
  87. try:
  88. html = open(filePath.strip('\n'))
  89. soup = BeautifulSoup(html, "html.parser")
  90. html.close()
  91. time.sleep(0.01) # making sure the file is closed before returning soup object
  92. return soup
  93. except:
  94. incrementError()
  95. print("There was a problem to read the file " + filePath)
  96. if createLog:
  97. logFile.write(
  98. str(nError) + ". There was a problem to read the file " + filePath + "\n" + traceback.format_exc() + "\n")
  99. return None
  100. def parse_listing(marketPlace, listingFile, soup, createLog, logFile):
  101. try:
  102. if marketPlace == "DarkFox":
  103. rw = darkfox_listing_parser(soup)
  104. elif marketPlace == "AnonymousMarketplace":
  105. rw = anonymousMarketplace_listing_parser(soup)
  106. elif marketPlace == "ViceCity":
  107. rw = vicecity_listing_parser(soup)
  108. elif marketPlace == "M00nkeyMarket":
  109. rw = m00nkey_listing_parser(soup)
  110. elif marketPlace == "MikesGrandStore":
  111. rw = mikesGrandStore_listing_parser(soup)
  112. elif marketPlace == "PabloEscobarMarket":
  113. rw = pabloescobarmarket_listing_parser(soup)
  114. elif marketPlace == "CityMarket":
  115. rw = city_listing_parser(soup)
  116. elif marketPlace == "Ares":
  117. rw = ares_listing_parser(soup)
  118. elif marketPlace == "DarkBazar":
  119. rw = darkbazar_listing_parser(soup)
  120. elif marketPlace == "Sonanza":
  121. rw = sonanza_listing_parser(soup)
  122. elif marketPlace == "Kingdom":
  123. rw = kingdom_listing_parser(soup)
  124. elif marketPlace == "BlackPyramid":
  125. rw = blackpyramid_listing_parser(soup)
  126. elif marketPlace == "Quest":
  127. rw = quest_listing_parser(soup)
  128. elif marketPlace == "CypherMarketplace":
  129. rw = cyphermarketplace_listing_parser(soup)
  130. else:
  131. print("MISSING CALL TO LISTING PARSER IN PREPARE_PARSER.PY!")
  132. raise Exception
  133. return rw
  134. except:
  135. incrementError()
  136. print("There was a problem to parse the file " + listingFile + " in the listing section!")
  137. traceback.print_exc()
  138. if createLog:
  139. logFile.write(
  140. str(nError) + ". There was a problem to parse the file " + listingFile + " in the Listing section.\n"
  141. + traceback.format_exc() + "\n")
  142. return None
  143. def parse_description(marketPlace, descriptionFile, soup, createLog, logFile):
  144. try:
  145. if marketPlace == "DarkFox":
  146. rmm = darkfox_description_parser(soup)
  147. elif marketPlace == "AnonymousMarketplace":
  148. rmm = anonymousMarketplace_description_parser(soup)
  149. elif marketPlace == "ViceCity":
  150. rmm = vicecity_description_parser(soup)
  151. elif marketPlace == "M00nkeyMarket":
  152. rmm = m00nkey_description_parser(soup)
  153. elif marketPlace == "MikesGrandStore":
  154. rmm = mikesGrandStore_description_parser(soup)
  155. elif marketPlace == "PabloEscobarMarket":
  156. rmm = pabloescobarmarket_description_parser(soup)
  157. elif marketPlace == "CityMarket":
  158. rmm = city_description_parser(soup)
  159. elif marketPlace == "Ares":
  160. rmm = ares_description_parser(soup)
  161. elif marketPlace == "DarkBazar":
  162. rmm = darkbazar_description_parser(soup)
  163. elif marketPlace == "Sonanza":
  164. rmm = sonanza_description_parser(soup)
  165. elif marketPlace == "Kingdom":
  166. rmm = kingdom_description_parser(soup)
  167. elif marketPlace == "BlackPyramid":
  168. rmm = blackpyramid_description_parser(soup)
  169. elif marketPlace == "Quest":
  170. rmm = quest_description_parser(soup)
  171. elif marketPlace == "CypherMarketplace":
  172. rmm = cyphermarketplace_description_parser(soup)
  173. else:
  174. print("MISSING CALL TO DESCRIPTION PARSER IN PREPARE_PARSER.PY!")
  175. raise Exception
  176. return rmm
  177. except:
  178. incrementError()
  179. print("There was a problem to parse the file " + descriptionFile + " in the Description section!")
  180. traceback.print_exc()
  181. if createLog:
  182. logFile.write(
  183. str(nError) + ". There was a problem to parse the file " + descriptionFile + " in the Description section.\n"
  184. + traceback.format_exc() + "\n")
  185. return None
  186. def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descriptionFile):
  187. try:
  188. persist_data(url, tuple(rec), cur)
  189. con.commit()
  190. return True
  191. except:
  192. con.rollback()
  193. incrementError()
  194. print(f"There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!")
  195. traceback.print_exc()
  196. if createLog:
  197. logFile.write(
  198. str(nError) + f". There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!\n"
  199. + traceback.format_exc() + "\n")
  200. return False
  201. def move_file(filePath, createLog, logFile):
  202. source = filePath
  203. destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
  204. try:
  205. shutil.move(source, destination, shutil.copy2)
  206. return True
  207. except:
  208. try:
  209. shutil.move(source, destination, shutil.copytree)
  210. return True
  211. except:
  212. incrementError()
  213. print("There was a problem to move the file " + filePath)
  214. traceback.print_exc()
  215. if createLog:
  216. logFile.write(
  217. str(nError) + ". There was a problem to move the file " + filePath + "\n" + traceback.format_exc() + "\n")
  218. return False
  219. def new_parse(marketPlace, url, createLog):
  220. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  221. global nError
  222. nError = 0
  223. print("Parsing the " + marketPlace + " market and conduct data classification to store the information in the database.")
  224. # Connecting to the database
  225. con = connectDataBase()
  226. cur = con.cursor(cursor_factory=RealDictCursor)
  227. # Creating the tables (The database should be created manually)
  228. create_database(cur, con)
  229. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces\\" + marketPlace + "\\HTML_Pages")
  230. # Creating the log file for each Forum
  231. if createLog:
  232. try:
  233. logFile = open(mainDir + f"/{CURRENT_DATE}/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  234. except:
  235. print("Could not open log file!")
  236. createLog = False
  237. logFile = None
  238. # raise SystemExit
  239. else:
  240. logFile = None
  241. # Reading the Listing Html Pages
  242. listings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html'))
  243. for listingIndex, listingFile in enumerate(listings):
  244. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(listingFile) + "', index= " + str(
  245. listingIndex + 1) + " ... " + str(len(listings)))
  246. listingSoup = read_file(listingFile, createLog, logFile)
  247. # listing flags
  248. doParseListing = listingSoup is not None
  249. doDescription = False
  250. readDescriptionError = False
  251. parseDescriptionError = False
  252. persistDescriptionError = False
  253. moveDescriptionError = False
  254. findDescriptionError = False
  255. rw = []
  256. if doParseListing:
  257. rw = parse_listing(marketPlace, listingFile, listingSoup, createLog, logFile)
  258. doDescription = rw is not None
  259. if doDescription:
  260. nFound = 0
  261. for rec in rw:
  262. rec = rec.split(',')
  263. descriptionPattern = cleanLink(rec[22]) + ".html"
  264. # Reading the associated description Html Pages
  265. descriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", descriptionPattern))
  266. nFound += len(descriptions)
  267. for descriptionIndex, descriptionFile in enumerate(descriptions):
  268. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(
  269. descriptionFile) + "', index= " + str(descriptionIndex + 1) + " ... " + str(len(descriptions)))
  270. descriptionSoup = read_file(descriptionFile, createLog, logFile)
  271. # description flags
  272. doParseDescription = descriptionSoup is not None
  273. doPersistRecord = False
  274. doMoveDescription = False
  275. rmm = []
  276. if doParseDescription:
  277. rmm = parse_description(marketPlace, descriptionFile, descriptionSoup, createLog, logFile)
  278. doPersistRecord = rmm is not None
  279. else:
  280. readDescriptionError = True
  281. parseDescriptionError = True
  282. if doPersistRecord:
  283. # Combining the information from Listing and Description Pages
  284. rec = mergePages(rmm, rec)
  285. # Append to the list the classification of the topic
  286. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  287. # Persisting the information in the database
  288. persistSuccess = persist_record(url, rec, cur, con, createLog, logFile, listingFile,
  289. descriptionFile)
  290. doMoveDescription = persistSuccess
  291. else:
  292. parseDescriptionError = True
  293. if doMoveDescription:
  294. # move description files of completed folder
  295. moveSuccess = move_file(descriptionFile, createLog, logFile)
  296. if not moveSuccess:
  297. moveDescriptionError = True
  298. else:
  299. moveDescriptionError = True
  300. if not (nFound > 0):
  301. findDescriptionError = True
  302. incrementError()
  303. print(f"There was a problem to locate the file(s) for {listingFile} in the Description section!")
  304. if createLog:
  305. logFile.write(
  306. str(nError) + f". There was a problem to locate the file(s) for {listingFile}"
  307. f" in the Description section!\n\n")
  308. if not (readDescriptionError or parseDescriptionError or persistDescriptionError
  309. or moveDescriptionError or findDescriptionError):
  310. # move listing files of completed folder
  311. move_file(listingFile, createLog, logFile)
  312. # registering the current forum status (up/down) and the number of scraped pages in the database
  313. marketId = verifyMarketPlace(cur, marketPlace)
  314. if (marketId > 0):
  315. readListings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing\\read", '*.html'))
  316. readDescriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description\\read", '*.html'))
  317. create_status(cur, marketId, CURRENT_DATE, len(readListings), len(readDescriptions), '1' if len(listings) > 0 else '0')
  318. con.commit()
  319. if createLog:
  320. logFile.close()
  321. cur.close()
  322. con.close()
  323. print("Parsing the " + marketPlace + " market and data classification done.")