this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

372 lines
12 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. import traceback
  7. from psycopg2.extras import RealDictCursor
  8. from MarketPlaces.DB_Connection.db_connection import *
  9. from MarketPlaces.DarkFox.parser import *
  10. from MarketPlaces.AnonymousMarketplace.parser import *
  11. from MarketPlaces.ViceCity.parser import *
  12. from MarketPlaces.M00nkeyMarket.parser import *
  13. from MarketPlaces.MikesGrandStore.parser import *
  14. from MarketPlaces.PabloEscobarMarket.parser import *
  15. from MarketPlaces.CityMarket.parser import *
  16. from MarketPlaces.Classifier.classify_product import predict
  17. nError = 0
  18. def mergePages(rmm, rec):
  19. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  20. # key = rec[23]
  21. print("----------------- Matched: " + rec[4] + "--------------------")
  22. if rec[1] == "-1": # name_vendor
  23. rec[1] = rmm[0]
  24. if rec[2] == "-1": # rating_vendor
  25. rec[2] = rmm[1]
  26. if rec[3] == "-1": # success_vendor
  27. rec[3] = rmm[2]
  28. if rec[4] == "-1": # name_item
  29. rec[4] = rmm[3]
  30. if rec[5] == "-1": # description_item
  31. rec[5] = rmm[4]
  32. if rec[6] == "-1": # cve_item
  33. rec[6] = rmm[5]
  34. if rec[7] == "-1": # ms_item
  35. rec[7] = rmm[6]
  36. if rec[8] == "-1": # category_item
  37. rec[8] = rmm[7]
  38. if rec[9] == "-1": # views_item
  39. rec[9] = rmm[8]
  40. if rec[10] == "-1": # reviews_item
  41. rec[10] = rmm[9]
  42. if rec[11] == "-1": # rating_item
  43. rec[11] = rmm[10]
  44. if rec[12] == "-1": # adddate_item
  45. rec[12] = rmm[11]
  46. if rec[13] == "-1": # btc_item
  47. rec[13] = rmm[12]
  48. if rec[14] == "-1": # usd_item
  49. rec[14] = rmm[13]
  50. if rec[15] == "-1": # euro_item
  51. rec[15] = rmm[14]
  52. if rec[16] == "-1": # quantitysold_item
  53. rec[16] = rmm[15]
  54. if rec[17] == "-1": # quantityleft_item
  55. rec[17] = rmm[16]
  56. if rec[18] == "-1": # shippedfrom_item
  57. rec[18] = rmm[17]
  58. if rec[19] == "-1": # shippedto_item
  59. rec[19] = rmm[18]
  60. if rmm[19] != "-1": # image
  61. rec[20] = rmm[19]
  62. if rmm[20] != "-1": # image_vendor
  63. rec[21] = rmm[20]
  64. return rec
  65. def persist_data(url, row, cur):
  66. marketPlace = create_marketPlace(cur, row, url)
  67. vendor = create_vendor(cur, row, marketPlace)
  68. create_items(cur, row, marketPlace, vendor)
  69. def incrementError():
  70. global nError
  71. nError += 1
  72. def read_file(filePath, createLog, logFile):
  73. try:
  74. html = codecs.open(filePath.strip('\n'), encoding='utf8')
  75. soup = BeautifulSoup(html, "html.parser")
  76. html.close()
  77. time.sleep(0.01) # making sure the file is closed before returning soup object
  78. return soup
  79. except:
  80. try:
  81. html = open(filePath.strip('\n'))
  82. soup = BeautifulSoup(html, "html.parser")
  83. html.close()
  84. time.sleep(0.01) # making sure the file is closed before returning soup object
  85. return soup
  86. except:
  87. incrementError()
  88. print("There was a problem to read the file " + filePath)
  89. if createLog:
  90. logFile.write(
  91. str(nError) + ". There was a problem to read the file " + filePath + "\n" + traceback.format_exc() + "\n")
  92. return None
  93. def parse_listing(marketPlace, listingFile, soup, createLog, logFile):
  94. try:
  95. if marketPlace == "DarkFox":
  96. rw = darkfox_listing_parser(soup)
  97. elif marketPlace == "AnonymousMarketplace":
  98. rw = anonymousMarketplace_listing_parser(soup)
  99. elif marketPlace == "ViceCity":
  100. rw = vicecity_listing_parser(soup)
  101. elif marketPlace == "M00nkeyMarket":
  102. rw = m00nkey_listing_parser(soup)
  103. elif marketPlace == "MikesGrandStore":
  104. rw = mikesGrandStore_listing_parser(soup)
  105. elif marketPlace == "PabloEscobarMarket":
  106. rw = pabloescobarmarket_listing_parser(soup)
  107. elif marketPlace == "CityMarket":
  108. rw = city_listing_parser(soup)
  109. else:
  110. print("MISSING CALL TO LISTING PARSER IN PREPARE_PARSER.PY!")
  111. raise Exception
  112. return rw
  113. except:
  114. incrementError()
  115. print("There was a problem to parse the file " + listingFile + " in the listing section!")
  116. traceback.print_exc()
  117. if createLog:
  118. logFile.write(
  119. str(nError) + ". There was a problem to parse the file " + listingFile + " in the Listing section.\n"
  120. + traceback.format_exc() + "\n")
  121. return None
  122. def parse_description(marketPlace, descriptionFile, soup, createLog, logFile):
  123. try:
  124. if marketPlace == "DarkFox":
  125. rmm = darkfox_description_parser(soup)
  126. elif marketPlace == "AnonymousMarketplace":
  127. rmm = anonymousMarketplace_description_parser(soup)
  128. elif marketPlace == "ViceCity":
  129. rmm = vicecity_description_parser(soup)
  130. elif marketPlace == "M00nkeyMarket":
  131. rmm = m00nkey_description_parser(soup)
  132. elif marketPlace == "MikesGrandStore":
  133. rmm = mikesGrandStore_description_parser(soup)
  134. elif marketPlace == "PabloEscobarMarket":
  135. rmm = pabloescobarmarket_description_parser(soup)
  136. elif marketPlace == "CityMarket":
  137. rmm = city_description_parser(soup)
  138. else:
  139. print("MISSING CALL TO DESCRIPTION PARSER IN PREPARE_PARSER.PY!")
  140. raise Exception
  141. return rmm
  142. except:
  143. incrementError()
  144. print("There was a problem to parse the file " + descriptionFile + " in the Description section!")
  145. traceback.print_exc()
  146. if createLog:
  147. logFile.write(
  148. str(nError) + ". There was a problem to parse the file " + descriptionFile + " in the Description section.\n"
  149. + traceback.format_exc() + "\n")
  150. return None
  151. def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descriptionFile):
  152. try:
  153. persist_data(url, tuple(rec), cur)
  154. con.commit()
  155. return True
  156. except:
  157. con.rollback()
  158. incrementError()
  159. print(f"There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!")
  160. traceback.print_exc()
  161. if createLog:
  162. logFile.write(
  163. str(nError) + f". There was a problem to persist the files ({listingFile} + {descriptionFile}) in the database!\n"
  164. + traceback.format_exc() + "\n")
  165. return False
  166. def move_file(filePath, createLog, logFile):
  167. source = filePath
  168. destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
  169. try:
  170. shutil.move(source, destination, shutil.copy2)
  171. return True
  172. except:
  173. try:
  174. shutil.move(source, destination, shutil.copytree)
  175. return True
  176. except:
  177. incrementError()
  178. print("There was a problem to move the file " + filePath)
  179. traceback.print_exc()
  180. if createLog:
  181. logFile.write(
  182. str(nError) + ". There was a problem to move the file " + filePath + "\n" + traceback.format_exc() + "\n")
  183. return False
  184. def new_parse(marketPlace, url, createLog):
  185. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  186. global nError
  187. nError = 0
  188. print("Parsing the " + marketPlace + " market and conduct data classification to store the information in the database.")
  189. # Connecting to the database
  190. con = connectDataBase()
  191. cur = con.cursor(cursor_factory=RealDictCursor)
  192. # Creating the tables (The database should be created manually)
  193. create_database(cur, con)
  194. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces\\" + marketPlace + "\\HTML_Pages")
  195. # Creating the log file for each Forum
  196. if createLog:
  197. try:
  198. logFile = open(mainDir + f"/{CURRENT_DATE}/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  199. except:
  200. print("Could not open log file!")
  201. createLog = False
  202. logFile = None
  203. # raise SystemExit
  204. else:
  205. logFile = None
  206. # Reading the Listing Html Pages
  207. listings = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html'))
  208. for listingIndex, listingFile in enumerate(listings):
  209. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(listingFile) + "', index= " + str(
  210. listingIndex + 1) + " ... " + str(len(listings)))
  211. listingSoup = read_file(listingFile, createLog, logFile)
  212. # listing flags
  213. doParseListing = listingSoup is not None
  214. doDescription = False
  215. readDescriptionError = False
  216. parseDescriptionError = False
  217. persistDescriptionError = False
  218. moveDescriptionError = False
  219. findDescriptionError = False
  220. rw = []
  221. if doParseListing:
  222. rw = parse_listing(marketPlace, listingFile, listingSoup, createLog, logFile)
  223. doDescription = rw is not None
  224. if doDescription:
  225. nFound = 0
  226. for rec in rw:
  227. rec = rec.split(',')
  228. descriptionPattern = cleanLink(rec[22]) + ".html"
  229. # Reading the associated description Html Pages
  230. descriptions = glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", descriptionPattern))
  231. nFound += len(descriptions)
  232. for descriptionIndex, descriptionFile in enumerate(descriptions):
  233. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(
  234. descriptionFile) + "', index= " + str(descriptionIndex + 1) + " ... " + str(len(descriptions)))
  235. descriptionSoup = read_file(descriptionFile, createLog, logFile)
  236. # description flags
  237. doParseDescription = descriptionSoup is not None
  238. doPersistRecord = False
  239. doMoveDescription = False
  240. rmm = []
  241. if doParseDescription:
  242. rmm = parse_description(marketPlace, descriptionFile, descriptionSoup, createLog, logFile)
  243. doPersistRecord = rmm is not None
  244. else:
  245. readDescriptionError = True
  246. parseDescriptionError = True
  247. if doPersistRecord:
  248. # Combining the information from Listing and Description Pages
  249. rec = mergePages(rmm, rec)
  250. # Append to the list the classification of the topic
  251. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  252. # Persisting the information in the database
  253. persistSuccess = persist_record(url, rec, cur, con, createLog, logFile, listingFile,
  254. descriptionFile)
  255. doMoveDescription = persistSuccess
  256. else:
  257. parseDescriptionError = True
  258. if doMoveDescription:
  259. # move description files of completed folder
  260. moveSuccess = move_file(descriptionFile, createLog, logFile)
  261. if not moveSuccess:
  262. moveDescriptionError = True
  263. else:
  264. moveDescriptionError = True
  265. if not (nFound > 0):
  266. findDescriptionError = True
  267. incrementError()
  268. print(f"There was a problem to locate the file(s) for {listingFile} in the Description section!")
  269. if createLog:
  270. logFile.write(
  271. str(nError) + f". There was a problem to locate the file(s) for {listingFile}"
  272. f" in the Description section!\n\n")
  273. if not (readDescriptionError or parseDescriptionError or persistDescriptionError
  274. or moveDescriptionError or findDescriptionError):
  275. # move listing files of completed folder
  276. move_file(listingFile, createLog, logFile)
  277. if createLog:
  278. logFile.close()
  279. cur.close()
  280. con.close()
  281. print("Parsing the " + marketPlace + " market and data classification done.")