this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

300 lines
12 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'Helium'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Tor2door.parser import *
  9. from MarketPlaces.Classifier.classify_product import predict
  10. def mergePages(rmm, rec):
  11. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  12. # key = rec[23]
  13. print("----------------- Matched: " + rec[1] + "--------------------")
  14. if rec[1] == "-1": # name_vendor
  15. rec[1] = rmm[0]
  16. if rec[2] == "-1": # rating_vendor
  17. rec[2] = rmm[1]
  18. if rec[3] == "-1": # success_vendor
  19. rec[3] = rmm[2]
  20. if rec[4] == "-1": # name_item
  21. rec[4] = rmm[3]
  22. if rec[5] == "-1": # description_item
  23. rec[5] = rmm[4]
  24. if rec[6] == "-1": # cve_item
  25. rec[6] = rmm[5]
  26. if rec[7] == "-1": # ms_item
  27. rec[7] = rmm[6]
  28. if rec[8] == "-1": # category_item
  29. rec[8] = rmm[7]
  30. if rec[9] == "-1": # views_item
  31. rec[9] = rmm[8]
  32. if rec[10] == "-1": # reviews_item
  33. rec[10] = rmm[9]
  34. if rec[11] == "-1": # rating_item
  35. rec[11] = rmm[10]
  36. if rec[12] == "-1": # adddate_item
  37. rec[12] = rmm[11]
  38. if rec[13] == "-1": # btc_item
  39. rec[13] = rmm[12]
  40. if rec[14] == "-1": # usd_item
  41. rec[14] = rmm[13]
  42. if rec[15] == "-1": # euro_item
  43. rec[15] = rmm[14]
  44. if rec[16] == "-1": # quantitysold_item
  45. rec[16] = rmm[15]
  46. if rec[17] == "-1": # quantityleft_item
  47. rec[17] = rmm[16]
  48. if rec[18] == "-1": # shippedfrom_item
  49. rec[18] = rmm[17]
  50. if rec[19] == "-1": # shippedto_item
  51. rec[19] = rmm[18]
  52. return rec
  53. def persist_data(url, row, cur):
  54. marketPlace = create_marketPlace(cur, row, url)
  55. vendor = create_vendor(cur, row, marketPlace)
  56. create_items(cur, row, marketPlace, vendor)
  57. def new_parse(marketPlace, url, createLog):
  58. from MarketPlaces.Initialization.markets_mining import CURRENT_DATE
  59. print("Parsing the " + marketPlace + " marketplace and conduct data classification to store the information in the database.")
  60. # ini = time.time()
  61. # Connecting to the database
  62. con = connectDataBase()
  63. cur = con.cursor()
  64. # Creating the tables (The database should be created manually)
  65. create_database(cur, con)
  66. nError = 0
  67. lines = [] # listing pages
  68. lns = [] # description pages
  69. detPage = {}
  70. #Creating the log file for each Market Place
  71. if createLog:
  72. if not os.path.exists("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log"):
  73. logFile = open("./" + marketPlace + "/Logs/" + marketPlace + "_" + CURRENT_DATE + ".log", "w")
  74. else:
  75. print("Files of the date " + CURRENT_DATE + " from the Market Place " + marketPlace +
  76. " were already read. Delete the referent information in the Data Base and also delete the log file"
  77. " in the _Logs folder to read files from this Market Place of this date again.")
  78. raise SystemExit
  79. # Reading the Listing Html Pages
  80. for fileListing in glob.glob(os.path.join("..\\" + marketPlace + "\\HTML_Pages\\" + CURRENT_DATE + "\\Listing", '*.html')):
  81. lines.append(fileListing)
  82. # Reading the Description Html Pages
  83. for fileDescription in glob.glob(os.path.join("..\\" + marketPlace + "\\HTML_Pages\\" + CURRENT_DATE + "\\Description", '*.html')):
  84. lns.append(fileDescription)
  85. # Parsing the Description Pages and put the tag's content into a dictionary (Hash table)
  86. for index, line2 in enumerate(lns):
  87. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(line2) + "', index= " + str(index + 1) + " ... " + str(len(lns)))
  88. try:
  89. html = codecs.open(line2.strip('\n'), encoding='utf8')
  90. soup = BeautifulSoup(html, "html.parser")
  91. html.close()
  92. except:
  93. try:
  94. html = open(line2.strip('\n'))
  95. soup = BeautifulSoup(html, "html.parser")
  96. html.close()
  97. except:
  98. nError += 1
  99. print("There was a problem to read the file " + line2 + " in the Description section!")
  100. if createLog:
  101. logFile.write(str(nError) + ". There was a problem to read the file " + line2 + " in the Description section.\n")
  102. continue
  103. try:
  104. if marketPlace == "DarkFox":
  105. rmm = darkfox_description_parser(soup)
  106. elif marketPlace == "Tor2door":
  107. rmm = tor2door_description_parser(soup)
  108. # key = u"Pr:" + rmm[0].upper()[:desc_lim1] + u" Vendor:" + rmm[13].upper()[:desc_lim2]
  109. key = u"Url:" + os.path.basename(line2).replace(".html", "")
  110. # save file address with description record in memory
  111. detPage[key] = {'rmm': rmm, 'filename': os.path.basename(line2)}
  112. except:
  113. nError += 1
  114. print("There was a problem to parse the file " + line2 + " in the Description section!")
  115. if createLog:
  116. logFile.write(str(nError) + ". There was a problem to parse the file " + line2 + " in the Description section.\n")
  117. # Parsing the Listing Pages and put the tag's content into a list
  118. for index, line1 in enumerate(lines):
  119. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(line1) + "', index= " + str(index + 1) + " ... " + str(len(lines)))
  120. readError = False
  121. try:
  122. html = codecs.open(line1.strip('\n'), encoding='utf8')
  123. soup = BeautifulSoup(html, "html.parser")
  124. html.close()
  125. except:
  126. try:
  127. html = open(line1.strip('\n'))
  128. soup = BeautifulSoup(html, "html.parser")
  129. html.close()
  130. except:
  131. nError += 1
  132. print("There was a problem to read the file " + line1 + " in the Listing section!")
  133. if createLog:
  134. logFile.write(str(nError) + ". There was a problem to read the file " + line1 + " in the Listing section.\n")
  135. readError = True
  136. if not readError:
  137. parseError = False
  138. try:
  139. if marketPlace == "DarkFox":
  140. rw = darkfox_listing_parser(soup)
  141. elif marketPlace == "Tor2door":
  142. rw = tor2door_listing_parser(soup)
  143. else:
  144. parseError = True
  145. except:
  146. nError += 1
  147. print("There was a problem to parse the file " + line1 + " in the listing section!")
  148. if createLog:
  149. logFile.write(
  150. str(nError) + ". There was a problem to parse the file " + line1 + " in the Listing section.\n")
  151. parseError = True
  152. if not parseError:
  153. persistError = False
  154. moveError = False
  155. num_in_db = 0
  156. num_persisted_moved = 0
  157. for rec in rw:
  158. rec = rec.split(',')
  159. # if len(detPage) > 0: #It was created here just because Zeroday Market does not have Description Pages
  160. # key = rec[23]
  161. # key = u"Pr:" + rec[1].upper()[:list_lim1] + u" Vendor:" + rec[18].upper()[:list_lim2]
  162. key = u"Url:" + cleanLink(rec[20])
  163. # if the associated description page is parsed
  164. if key in detPage:
  165. # rec = mergePages(detPage, rec)
  166. # Combining the information from Listing and Description Pages
  167. rmm = detPage[key]['rmm']
  168. rec = mergePages(rmm, rec)
  169. # Append to the list the classification of the product
  170. # rec.append(str(predict(rec[1], rec[5], language='markets')))
  171. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  172. # Persisting the information in the database
  173. try:
  174. persist_data(url, tuple(rec), cur)
  175. con.commit()
  176. except:
  177. trace = traceback.format_exc()
  178. if trace.find("already exists") == -1:
  179. nError += 1
  180. print("There was a problem to persist the file " + detPage[key]['filename'] + " in the database!")
  181. if createLog:
  182. logFile.write(
  183. str(nError) + ". There was a problem to persist the file " + detPage[key]['filename'] + " in the database.\n")
  184. persistError = True
  185. con.rollback()
  186. if not persistError:
  187. # move description files of completed folder
  188. source = line2.replace(os.path.basename(line2), "") + detPage[key]['filename']
  189. destination = line2.replace(os.path.basename(line2), "") + r'Read/'
  190. try:
  191. shutil.move(source, destination)
  192. num_persisted_moved += 1
  193. except:
  194. print("There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!")
  195. nError += 1
  196. if createLog:
  197. logFile.write(
  198. str(nError) + ". There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!.\n")
  199. moveError = True
  200. # if the associated description page is not read or not parsed
  201. else:
  202. # query database
  203. # if the product already exists:
  204. # num_in_db += 1
  205. pass
  206. # if number of products on listing page is equal to
  207. # the number of merged, persisted, and moved products plus
  208. # the number of products already in the database
  209. if not persistError and not moveError and len(rw) == (num_persisted_moved + num_in_db):
  210. # move listing file to completed folder
  211. source = line1
  212. destination = line1.replace(os.path.basename(line1), "") + r'Read/'
  213. try:
  214. shutil.move(source, destination)
  215. except:
  216. nError += 1
  217. print("There was a problem to move the file " + line1 + " in the Listing section!")
  218. if createLog:
  219. logFile.write(str(nError) + ". There was a problem to move the file " + line1 + " in the Listing section!.\n")
  220. # g.close ()
  221. if createLog:
  222. logFile.close()
  223. # end = time.time()
  224. # finalTime = float(end-ini)
  225. # print (marketPlace + " Parsing Perfomed Succesfully in %.2f" %finalTime + "!")
  226. input("Parsing the " + marketPlace + " marketplace and data classification done successfully. Press ENTER to continue\n")