this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

336 lines
13 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'Helium'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Tor2door.parser import *
  9. from MarketPlaces.Apocalypse.parser import *
  10. from MarketPlaces.ThiefWorld.parser import *
  11. from MarketPlaces.AnonymousMarketplace.parser import *
  12. from MarketPlaces.ViceCity.parser import *
  13. from MarketPlaces.TorBay.parser import *
  14. from MarketPlaces.M00nkeyMarket.parser import *
  15. from MarketPlaces.DarkMatter.parser import *
  16. from MarketPlaces.Classifier.classify_product import predict
  17. def mergePages(rmm, rec):
  18. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  19. # key = rec[23]
  20. print("----------------- Matched: " + rec[4] + "--------------------")
  21. if rec[1] == "-1": # name_vendor
  22. rec[1] = rmm[0]
  23. if rec[2] == "-1": # rating_vendor
  24. rec[2] = rmm[1]
  25. if rec[3] == "-1": # success_vendor
  26. rec[3] = rmm[2]
  27. if rec[4] == "-1": # name_item
  28. rec[4] = rmm[3]
  29. if rec[5] == "-1": # description_item
  30. rec[5] = rmm[4]
  31. if rec[6] == "-1": # cve_item
  32. rec[6] = rmm[5]
  33. if rec[7] == "-1": # ms_item
  34. rec[7] = rmm[6]
  35. if rec[8] == "-1": # category_item
  36. rec[8] = rmm[7]
  37. if rec[9] == "-1": # views_item
  38. rec[9] = rmm[8]
  39. if rec[10] == "-1": # reviews_item
  40. rec[10] = rmm[9]
  41. if rec[11] == "-1": # rating_item
  42. rec[11] = rmm[10]
  43. if rec[12] == "-1": # adddate_item
  44. rec[12] = rmm[11]
  45. if rec[13] == "-1": # btc_item
  46. rec[13] = rmm[12]
  47. if rec[14] == "-1": # usd_item
  48. rec[14] = rmm[13]
  49. if rec[15] == "-1": # euro_item
  50. rec[15] = rmm[14]
  51. if rec[16] == "-1": # quantitysold_item
  52. rec[16] = rmm[15]
  53. if rec[17] == "-1": # quantityleft_item
  54. rec[17] = rmm[16]
  55. if rec[18] == "-1": # shippedfrom_item
  56. rec[18] = rmm[17]
  57. if rec[19] == "-1": # shippedto_item
  58. rec[19] = rmm[18]
  59. return rec
  60. def persist_data(url, row, cur):
  61. marketPlace = create_marketPlace(cur, row, url)
  62. vendor = create_vendor(cur, row, marketPlace)
  63. create_items(cur, row, marketPlace, vendor)
  64. def new_parse(marketPlace, url, createLog):
  65. from MarketPlaces.Initialization.markets_mining import config, CURRENT_DATE
  66. print("Parsing the " + marketPlace + " marketplace and conduct data classification to store the information in the database.")
  67. # ini = time.time()
  68. # Connecting to the database
  69. con = connectDataBase()
  70. cur = con.cursor()
  71. # Creating the tables (The database should be created manually)
  72. create_database(cur, con)
  73. nError = 0
  74. lines = [] # listing pages
  75. lns = [] # description pages
  76. detPage = {}
  77. #Creating the log file for each Market Place
  78. if createLog:
  79. if not os.path.exists("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log"):
  80. logFile = open("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  81. else:
  82. print("Files of the date " + CURRENT_DATE + " from the Market Place " + marketPlace +
  83. " were already read. Delete the referent information in the Data Base and also delete the log file"
  84. " in the _Logs folder to read files from this Market Place of this date again.")
  85. raise SystemExit
  86. mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + marketPlace + "/HTML_Pages")
  87. # Reading the Listing Html Pages
  88. for fileListing in glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Listing", '*.html')):
  89. lines.append(fileListing)
  90. # Reading the Description Html Pages
  91. for fileDescription in glob.glob(os.path.join(mainDir, CURRENT_DATE + "\\Description", '*.html')):
  92. lns.append(fileDescription)
  93. # Parsing the Description Pages and put the tag's content into a dictionary (Hash table)
  94. for index, line2 in enumerate(lns):
  95. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(line2) + "', index= " + str(index + 1) + " ... " + str(len(lns)))
  96. try:
  97. html = codecs.open(line2.strip('\n'), encoding='utf8')
  98. soup = BeautifulSoup(html, "html.parser")
  99. html.close()
  100. except:
  101. try:
  102. html = open(line2.strip('\n'))
  103. soup = BeautifulSoup(html, "html.parser")
  104. html.close()
  105. except:
  106. nError += 1
  107. print("There was a problem to read the file " + line2 + " in the Description section!")
  108. if createLog:
  109. logFile.write(str(nError) + ". There was a problem to read the file " + line2 + " in the Description section.\n")
  110. continue
  111. try:
  112. if marketPlace == "DarkFox":
  113. rmm = darkfox_description_parser(soup)
  114. elif marketPlace == "Tor2door":
  115. rmm = tor2door_description_parser(soup)
  116. elif marketPlace == "Apocalypse":
  117. rmm = apocalypse_description_parser(soup)
  118. elif marketPlace == "ThiefWorld":
  119. rmm = thiefWorld_description_parser(soup)
  120. elif marketPlace =="AnonymousMarketplace":
  121. rmm = anonymousMarketplace_description_parser(soup)
  122. elif marketPlace == "ViceCity":
  123. rmm = vicecity_description_parser(soup)
  124. elif marketPlace == "TorBay":
  125. rmm = torbay_description_parser(soup)
  126. elif marketPlace == "M00nkeyMarket":
  127. rmm = m00nkey_description_parser(soup)
  128. elif marketPlace == "DarkMatter":
  129. rmm = darkmatter_description_parser(soup)
  130. # key = u"Pr:" + rmm[0].upper()[:desc_lim1] + u" Vendor:" + rmm[13].upper()[:desc_lim2]
  131. key = u"Url:" + os.path.basename(line2).replace(".html", "")
  132. # save file address with description record in memory
  133. detPage[key] = {'rmm': rmm, 'filename': os.path.basename(line2)}
  134. except :
  135. nError += 1
  136. print("There was a problem to parse the file " + line2 + " in the Description section!")
  137. if createLog:
  138. logFile.write(str(nError) + ". There was a problem to parse the file " + line2 + " in the Description section.\n")
  139. # Parsing the Listing Pages and put the tag's content into a list
  140. for index, line1 in enumerate(lines):
  141. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(line1) + "', index= " + str(index + 1) + " ... " + str(len(lines)))
  142. readError = False
  143. try:
  144. html = codecs.open(line1.strip('\n'), encoding='utf8')
  145. soup = BeautifulSoup(html, "html.parser")
  146. html.close()
  147. except:
  148. try:
  149. html = open(line1.strip('\n'))
  150. soup = BeautifulSoup(html, "html.parser")
  151. html.close()
  152. except:
  153. nError += 1
  154. print("There was a problem to read the file " + line1 + " in the Listing section!")
  155. if createLog:
  156. logFile.write(str(nError) + ". There was a problem to read the file " + line1 + " in the Listing section.\n")
  157. readError = True
  158. if not readError:
  159. parseError = False
  160. try:
  161. if marketPlace == "DarkFox":
  162. rw = darkfox_listing_parser(soup)
  163. elif marketPlace == "Tor2door":
  164. rw = tor2door_listing_parser(soup)
  165. elif marketPlace == "Apocalypse":
  166. rw = apocalypse_listing_parser(soup)
  167. elif marketPlace == "ThiefWorld":
  168. rw = thiefWorld_listing_parser(soup)
  169. elif marketPlace == "AnonymousMarketplace":
  170. rw = anonymousMarketplace_listing_parser(soup)
  171. elif marketPlace == "ViceCity":
  172. rw = vicecity_listing_parser(soup)
  173. elif marketPlace == "TorBay":
  174. rw = torbay_listing_parser(soup)
  175. elif marketPlace == "M00nkeyMarket":
  176. rw = m00nkey_listing_parser(soup)
  177. elif marketPlace == "DarkMatter":
  178. rw = darkmatter_listing_parser(soup)
  179. else:
  180. parseError = True
  181. except:
  182. nError += 1
  183. print("There was a problem to parse the file " + line1 + " in the listing section!")
  184. if createLog:
  185. logFile.write(
  186. str(nError) + ". There was a problem to parse the file " + line1 + " in the Listing section.\n")
  187. parseError = True
  188. if not parseError:
  189. persistError = False
  190. moveError = False
  191. num_in_db = 0
  192. num_persisted_moved = 0
  193. for rec in rw:
  194. rec = rec.split(',')
  195. # if len(detPage) > 0: #It was created here just because Zeroday Market does not have Description Pages
  196. # key = rec[23]
  197. # key = u"Pr:" + rec[1].upper()[:list_lim1] + u" Vendor:" + rec[18].upper()[:list_lim2]
  198. key = u"Url:" + cleanLink(rec[20])
  199. # if the associated description page is parsed
  200. if key in detPage:
  201. # rec = mergePages(detPage, rec)
  202. # Combining the information from Listing and Description Pages
  203. rmm = detPage[key]['rmm']
  204. rec = mergePages(rmm, rec)
  205. # Append to the list the classification of the product
  206. # rec.append(str(predict(rec[1], rec[5], language='markets')))
  207. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  208. # Persisting the information in the database
  209. try:
  210. persist_data(url, tuple(rec), cur)
  211. con.commit()
  212. except:
  213. trace = traceback.format_exc()
  214. if trace.find("already exists") == -1:
  215. nError += 1
  216. print("There was a problem to persist the file " + detPage[key]['filename'] + " in the database!")
  217. if createLog:
  218. logFile.write(
  219. str(nError) + ". There was a problem to persist the file " + detPage[key]['filename'] + " in the database.\n")
  220. persistError = True
  221. con.rollback()
  222. if not persistError:
  223. # move description files of completed folder
  224. source = line2.replace(os.path.basename(line2), "") + detPage[key]['filename']
  225. destination = line2.replace(os.path.basename(line2), "") + r'Read/'
  226. try:
  227. shutil.move(source, destination)
  228. num_persisted_moved += 1
  229. except:
  230. print("There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!")
  231. nError += 1
  232. if createLog:
  233. logFile.write(
  234. str(nError) + ". There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!.\n")
  235. moveError = True
  236. # if the associated description page is not read or not parsed
  237. else:
  238. # query database
  239. # if the product already exists:
  240. # num_in_db += 1
  241. pass
  242. # if number of products on listing page is equal to
  243. # the number of merged, persisted, and moved products plus
  244. # the number of products already in the database
  245. if not persistError and not moveError and len(rw) == (num_persisted_moved + num_in_db):
  246. # move listing file to completed folder
  247. source = line1
  248. destination = line1.replace(os.path.basename(line1), "") + r'Read/'
  249. try:
  250. shutil.move(source, destination)
  251. except:
  252. nError += 1
  253. print("There was a problem to move the file " + line1 + " in the Listing section!")
  254. if createLog:
  255. logFile.write(str(nError) + ". There was a problem to move the file " + line1 + " in the Listing section!.\n")
  256. # g.close ()
  257. if createLog:
  258. logFile.close()
  259. # end = time.time()
  260. # finalTime = float(end-ini)
  261. # print (marketPlace + " Parsing Perfomed Succesfully in %.2f" %finalTime + "!")
  262. input("Parsing the " + marketPlace + " marketplace and data classification done successfully. Press ENTER to continue\n")