this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

347 lines
14 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'Helium'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Tor2door.parser import *
  9. from MarketPlaces.Apocalypse.parser import *
  10. from MarketPlaces.ThiefWorld.parser import *
  11. from MarketPlaces.AnonymousMarketplace.parser import *
  12. from MarketPlaces.ViceCity.parser import *
  13. from MarketPlaces.TorBay.parser import *
  14. from MarketPlaces.M00nkeyMarket.parser import *
  15. from MarketPlaces.DarkMatter.parser import *
  16. from MarketPlaces.DigitalThriftShop.parser import *
  17. from MarketPlaces.LionMarketplace.parser import *
  18. from MarketPlaces.Classifier.classify_product import predict
  19. def mergePages(rmm, rec):
  20. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  21. # key = rec[23]
  22. print("----------------- Matched: " + rec[4] + "--------------------")
  23. if rec[1] == "-1": # name_vendor
  24. rec[1] = rmm[0]
  25. if rec[2] == "-1": # rating_vendor
  26. rec[2] = rmm[1]
  27. if rec[3] == "-1": # success_vendor
  28. rec[3] = rmm[2]
  29. if rec[4] == "-1": # name_item
  30. rec[4] = rmm[3]
  31. if rec[5] == "-1": # description_item
  32. rec[5] = rmm[4]
  33. if rec[6] == "-1": # cve_item
  34. rec[6] = rmm[5]
  35. if rec[7] == "-1": # ms_item
  36. rec[7] = rmm[6]
  37. if rec[8] == "-1": # category_item
  38. rec[8] = rmm[7]
  39. if rec[9] == "-1": # views_item
  40. rec[9] = rmm[8]
  41. if rec[10] == "-1": # reviews_item
  42. rec[10] = rmm[9]
  43. if rec[11] == "-1": # rating_item
  44. rec[11] = rmm[10]
  45. if rec[12] == "-1": # adddate_item
  46. rec[12] = rmm[11]
  47. if rec[13] == "-1": # btc_item
  48. rec[13] = rmm[12]
  49. if rec[14] == "-1": # usd_item
  50. rec[14] = rmm[13]
  51. if rec[15] == "-1": # euro_item
  52. rec[15] = rmm[14]
  53. if rec[16] == "-1": # quantitysold_item
  54. rec[16] = rmm[15]
  55. if rec[17] == "-1": # quantityleft_item
  56. rec[17] = rmm[16]
  57. if rec[18] == "-1": # shippedfrom_item
  58. rec[18] = rmm[17]
  59. if rec[19] == "-1": # shippedto_item
  60. rec[19] = rmm[18]
  61. return rec
  62. def persist_data(url, row, cur):
  63. marketPlace = create_marketPlace(cur, row, url)
  64. vendor = create_vendor(cur, row, marketPlace)
  65. create_items(cur, row, marketPlace, vendor)
  66. def new_parse(marketPlace, url, createLog):
  67. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  68. print("Parsing the " + marketPlace + " marketplace and conduct data classification to store the information in the database.")
  69. # ini = time.time()
  70. # Connecting to the database
  71. con = connectDataBase()
  72. cur = con.cursor()
  73. # Creating the tables (The database should be created manually)
  74. create_database(cur, con)
  75. nError = 0
  76. lines = [] # listing pages
  77. lns = [] # description pages
  78. detPage = {}
  79. #Creating the log file for each Market Place
  80. if createLog:
  81. if not os.path.exists("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log"):
  82. logFile = open("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  83. else:
  84. print("Files of the date " + CURRENT_DATE + " from the Market Place " + marketPlace +
  85. " were already read. Delete the referent information in the Data Base and also delete the log file"
  86. " in the _Logs folder to read files from this Market Place of this date again.")
  87. raise SystemExit
  88. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + marketPlace + "/HTML_Pages")
  89. # Reading the Listing Html Pages
  90. for fileListing in glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html')):
  91. lines.append(fileListing)
  92. # Reading the Description Html Pages
  93. for fileDescription in glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", '*.html')):
  94. lns.append(fileDescription)
  95. # Parsing the Description Pages and put the tag's content into a dictionary (Hash table)
  96. for index, line2 in enumerate(lns):
  97. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(line2) + "', index= " + str(index + 1) + " ... " + str(len(lns)))
  98. try:
  99. html = codecs.open(line2.strip('\n'), encoding='utf8')
  100. soup = BeautifulSoup(html, "html.parser")
  101. html.close()
  102. except:
  103. try:
  104. html = open(line2.strip('\n'))
  105. soup = BeautifulSoup(html, "html.parser")
  106. html.close()
  107. except:
  108. nError += 1
  109. print("There was a problem to read the file " + line2 + " in the Description section!")
  110. if createLog:
  111. logFile.write(str(nError) + ". There was a problem to read the file " + line2 + " in the Description section.\n")
  112. continue
  113. try:
  114. if marketPlace == "DarkFox":
  115. rmm = darkfox_description_parser(soup)
  116. elif marketPlace == "Tor2door":
  117. rmm = tor2door_description_parser(soup)
  118. elif marketPlace == "Apocalypse":
  119. rmm = apocalypse_description_parser(soup)
  120. elif marketPlace == "ThiefWorld":
  121. rmm = thiefWorld_description_parser(soup)
  122. elif marketPlace =="AnonymousMarketplace":
  123. rmm = anonymousMarketplace_description_parser(soup)
  124. elif marketPlace == "ViceCity":
  125. rmm = vicecity_description_parser(soup)
  126. elif marketPlace == "TorBay":
  127. rmm = torbay_description_parser(soup)
  128. elif marketPlace == "M00nkeyMarket":
  129. rmm = m00nkey_description_parser(soup)
  130. elif marketPlace == "DarkMatter":
  131. rmm = darkmatter_description_parser(soup)
  132. elif marketPlace == "DigitalThriftShop":
  133. rmm = digitalThriftShop_description_parser(soup)
  134. elif marketPlace == "LionMarketplace":
  135. rmm = lionmarketplace_description_parser(soup)
  136. # key = u"Pr:" + rmm[0].upper()[:desc_lim1] + u" Vendor:" + rmm[13].upper()[:desc_lim2]
  137. key = u"Url:" + os.path.basename(line2).replace(".html", "")
  138. # save file address with description record in memory
  139. detPage[key] = {'rmm': rmm, 'filename': os.path.basename(line2)}
  140. except Exception as e:
  141. raise e
  142. nError += 1
  143. print("There was a problem to parse the file " + line2 + " in the Description section!")
  144. if createLog:
  145. logFile.write(str(nError) + ". There was a problem to parse the file " + line2 + " in the Description section.\n")
  146. # Parsing the Listing Pages and put the tag's content into a list
  147. for index, line1 in enumerate(lines):
  148. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(line1) + "', index= " + str(index + 1) + " ... " + str(len(lines)))
  149. readError = False
  150. try:
  151. html = codecs.open(line1.strip('\n'), encoding='utf8')
  152. soup = BeautifulSoup(html, "html.parser")
  153. html.close()
  154. except:
  155. try:
  156. html = open(line1.strip('\n'))
  157. soup = BeautifulSoup(html, "html.parser")
  158. html.close()
  159. except:
  160. nError += 1
  161. print("There was a problem to read the file " + line1 + " in the Listing section!")
  162. if createLog:
  163. logFile.write(str(nError) + ". There was a problem to read the file " + line1 + " in the Listing section.\n")
  164. readError = True
  165. if not readError:
  166. parseError = False
  167. try:
  168. if marketPlace == "DarkFox":
  169. rw = darkfox_listing_parser(soup)
  170. elif marketPlace == "Tor2door":
  171. rw = tor2door_listing_parser(soup)
  172. elif marketPlace == "Apocalypse":
  173. rw = apocalypse_listing_parser(soup)
  174. elif marketPlace == "ThiefWorld":
  175. rw = thiefWorld_listing_parser(soup)
  176. elif marketPlace == "AnonymousMarketplace":
  177. rw = anonymousMarketplace_listing_parser(soup)
  178. elif marketPlace == "ViceCity":
  179. rw = vicecity_listing_parser(soup)
  180. elif marketPlace == "TorBay":
  181. rw = torbay_listing_parser(soup)
  182. elif marketPlace == "M00nkeyMarket":
  183. rw = m00nkey_listing_parser(soup)
  184. elif marketPlace == "DarkMatter":
  185. rw = darkmatter_listing_parser(soup)
  186. elif marketPlace == "DigitalThriftShop":
  187. rw = digitalThriftShop_listing_parser(soup)
  188. elif marketPlace == "LionMarketplace":
  189. rw = lionmarketplace_listing_parser(soup)
  190. else:
  191. parseError = True
  192. except:
  193. nError += 1
  194. print("There was a problem to parse the file " + line1 + " in the listing section!")
  195. if createLog:
  196. logFile.write(
  197. str(nError) + ". There was a problem to parse the file " + line1 + " in the Listing section.\n")
  198. parseError = True
  199. if not parseError:
  200. persistError = False
  201. moveError = False
  202. num_in_db = 0
  203. num_persisted_moved = 0
  204. for rec in rw:
  205. rec = rec.split(',')
  206. # if len(detPage) > 0: #It was created here just because Zeroday Market does not have Description Pages
  207. # key = rec[23]
  208. # key = u"Pr:" + rec[1].upper()[:list_lim1] + u" Vendor:" + rec[18].upper()[:list_lim2]
  209. key = u"Url:" + cleanLink(rec[20])
  210. # if the associated description page is parsed
  211. if key in detPage:
  212. # rec = mergePages(detPage, rec)
  213. # Combining the information from Listing and Description Pages
  214. rmm = detPage[key]['rmm']
  215. rec = mergePages(rmm, rec)
  216. # Append to the list the classification of the product
  217. # rec.append(str(predict(rec[1], rec[5], language='markets')))
  218. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  219. # Persisting the information in the database
  220. try:
  221. persist_data(url, tuple(rec), cur)
  222. con.commit()
  223. except:
  224. trace = traceback.format_exc()
  225. if trace.find("already exists") == -1:
  226. nError += 1
  227. print("There was a problem to persist the file " + detPage[key]['filename'] + " in the database!")
  228. if createLog:
  229. logFile.write(
  230. str(nError) + ". There was a problem to persist the file " + detPage[key]['filename'] + " in the database.\n")
  231. persistError = True
  232. con.rollback()
  233. if not persistError:
  234. # move description files of completed folder
  235. source = line2.replace(os.path.basename(line2), "") + detPage[key]['filename']
  236. destination = line2.replace(os.path.basename(line2), "") + r'Read/'
  237. try:
  238. shutil.move(source, destination)
  239. num_persisted_moved += 1
  240. except:
  241. print("There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!")
  242. nError += 1
  243. if createLog:
  244. logFile.write(
  245. str(nError) + ". There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!.\n")
  246. moveError = True
  247. # if the associated description page is not read or not parsed
  248. else:
  249. # query database
  250. # if the product already exists:
  251. # num_in_db += 1
  252. pass
  253. # if number of products on listing page is equal to
  254. # the number of merged, persisted, and moved products plus
  255. # the number of products already in the database
  256. if not persistError and not moveError and len(rw) == (num_persisted_moved + num_in_db):
  257. # move listing file to completed folder
  258. source = line1
  259. destination = line1.replace(os.path.basename(line1), "") + r'Read/'
  260. try:
  261. shutil.move(source, destination)
  262. except:
  263. nError += 1
  264. print("There was a problem to move the file " + line1 + " in the Listing section!")
  265. if createLog:
  266. logFile.write(str(nError) + ". There was a problem to move the file " + line1 + " in the Listing section!.\n")
  267. # g.close ()
  268. if createLog:
  269. logFile.close()
  270. # end = time.time()
  271. # finalTime = float(end-ini)
  272. # print (marketPlace + " Parsing Perfomed Succesfully in %.2f" %finalTime + "!")
  273. input("Parsing the " + marketPlace + " marketplace and data classification done successfully. Press ENTER to continue\n")