this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

305 lines
12 KiB

1 year ago
1 year ago
1 year ago
1 year ago
1 year ago
  1. __author__ = 'Helium'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Tor2door.parser import *
  9. from MarketPlaces.Classifier.classify_product import predict
  10. def mergePages(rmm, rec):
  11. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  12. # key = rec[23]
  13. print("----------------- Matched: " + rec[1] + "--------------------")
  14. if rec[1] == "-1": # name_vendor
  15. rec[1] = rmm[0]
  16. if rec[2] == "-1": # rating_vendor
  17. rec[2] = rmm[1]
  18. if rec[3] == "-1": # success_vendor
  19. rec[3] = rmm[2]
  20. if rec[4] == "-1": # name_item
  21. rec[4] = rmm[3]
  22. if rec[5] == "-1": # description_item
  23. rec[5] = rmm[4]
  24. if rec[6] == "-1": # cve_item
  25. rec[6] = rmm[5]
  26. if rec[7] == "-1": # ms_item
  27. rec[7] = rmm[6]
  28. if rec[8] == "-1": # category_item
  29. rec[8] = rmm[7]
  30. if rec[9] == "-1": # views_item
  31. rec[9] = rmm[8]
  32. if rec[10] == "-1": # reviews_item
  33. rec[10] = rmm[9]
  34. if rec[11] == "-1": # rating_item
  35. rec[11] = rmm[10]
  36. if rec[12] == "-1": # adddate_item
  37. rec[12] = rmm[11]
  38. if rec[13] == "-1": # btc_item
  39. rec[13] = rmm[12]
  40. if rec[14] == "-1": # usd_item
  41. rec[14] = rmm[13]
  42. if rec[15] == "-1": # euro_item
  43. rec[15] = rmm[14]
  44. if rec[16] == "-1": # quantitysold_item
  45. rec[16] = rmm[15]
  46. if rec[17] == "-1": # quantityleft_item
  47. rec[17] = rmm[16]
  48. if rec[18] == "-1": # shippedfrom_item
  49. rec[18] = rmm[17]
  50. if rec[19] == "-1": # shippedto_item
  51. rec[19] = rmm[18]
  52. return rec
  53. def persist_data(row, cur):
  54. marketPlace = create_marketPlace(cur, row)
  55. vendor = create_vendor(cur, row, marketPlace)
  56. create_items(cur, row, marketPlace, vendor)
  57. def new_parse(marketPlace, createLog):
  58. print("Parsing the " + marketPlace + " marketplace and conduct data classification to store the information in the database.")
  59. crawlerDate = date.today()
  60. # ini = time.time()
  61. global site
  62. #Connecting to the database
  63. con = connectDataBase()
  64. cur = con.cursor()
  65. #Creating the tables (The database should be created manually)
  66. create_database(cur, con)
  67. nError = 0
  68. lines = [] #lines.clear()
  69. lns = [] #lns.clear()
  70. detPage = {}
  71. rw = []
  72. #Creating the log file for each Market Place
  73. if createLog:
  74. if not os.path.exists("./" + marketPlace + "/Logs/" + marketPlace + "_" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + ".log"):
  75. logFile = open("./" + marketPlace + "/Logs/" + marketPlace + "_" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + ".log", "w")
  76. else:
  77. print("Files of the date " + str("%02d" %crawlerDate.month) + "/" + str("%02d" %crawlerDate.day) + "/" + str("%04d" %crawlerDate.year) +
  78. " from the Market Place " + marketPlace + " were already read. Delete the referent information in the Data Base and also delete the log file "
  79. "in the _Logs folder to read files from this Market Place of this date again.")
  80. raise SystemExit
  81. # Reading the Listing Html Pages
  82. for fileListing in glob.glob(os.path.join (os.getcwd().replace("Initialization","") + marketPlace + "\\HTML_Pages\\" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + "\\Listing" ,'*.html')):
  83. lines.append(fileListing)
  84. # Reading the Description Html Pages
  85. for fileDescription in glob.glob(os.path.join (os.getcwd().replace("Initialization","") + marketPlace + "\\HTML_Pages\\" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + "\\Description" ,'*.html')):
  86. lns.append(fileDescription)
  87. # Parsing the Description Pages and put the tag's content into a dictionary (Hash table)
  88. for index, line2 in enumerate(lns):
  89. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(line2) + "', index= " + str(index + 1) + " ... " + str(len(lns)))
  90. try:
  91. html = codecs.open(line2.strip('\n'), encoding='utf8')
  92. soup = BeautifulSoup(html, "html.parser")
  93. html.close()
  94. except:
  95. try:
  96. html = open(line2.strip('\n'))
  97. soup = BeautifulSoup(html, "html.parser")
  98. html.close()
  99. except:
  100. nError += 1
  101. print("There was a problem to read the file " + line2 + " in the Description section!")
  102. if createLog:
  103. logFile.write(str(nError) + ". There was a problem to read the file " + line2 + " in the Description section.\n")
  104. continue
  105. try:
  106. if marketPlace == "DarkFox":
  107. rmm = darkfox_description_parser(soup)
  108. elif marketPlace == "Tor2door":
  109. rmm = tor2door_description_parser(soup)
  110. # key = u"Pr:" + rmm[0].upper()[:desc_lim1] + u" Vendor:" + rmm[13].upper()[:desc_lim2]
  111. key = u"Url:" + os.path.basename(line2).replace(".html", "")
  112. # save file address with description record in memory
  113. detPage[key] = {'rmm': rmm, 'filename': os.path.basename(line2)}
  114. except:
  115. nError += 1
  116. print("There was a problem to parse the file " + line2 + " in the Description section!")
  117. if createLog:
  118. logFile.write(str(nError) + ". There was a problem to parse the file " + line2 + " in the Description section.\n")
  119. # Parsing the Listing Pages and put the tag's content into a list
  120. for index, line1 in enumerate(lines):
  121. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(line1) + "', index= " + str(index + 1) + " ... " + str(len(lines)))
  122. readError = False
  123. try:
  124. html = codecs.open(line1.strip('\n'), encoding='utf8')
  125. soup = BeautifulSoup(html, "html.parser")
  126. html.close()
  127. except:
  128. try:
  129. html = open(line1.strip('\n'))
  130. soup = BeautifulSoup(html, "html.parser")
  131. html.close()
  132. except:
  133. nError += 1
  134. print("There was a problem to read the file " + line1 + " in the Listing section!")
  135. if createLog:
  136. logFile.write(str(nError) + ". There was a problem to read the file " + line1 + " in the Listing section.\n")
  137. readError = True
  138. if not readError:
  139. parseError = False
  140. try:
  141. if marketPlace == "DarkFox":
  142. rw = darkfox_listing_parser(soup)
  143. elif marketPlace == "Tor2door":
  144. rw = tor2door_listing_parser(soup)
  145. else:
  146. parseError = True
  147. except:
  148. nError += 1
  149. print("There was a problem to parse the file " + line1 + " in the listing section!")
  150. if createLog:
  151. logFile.write(
  152. str(nError) + ". There was a problem to parse the file " + line1 + " in the Listing section.\n")
  153. parseError = True
  154. if not parseError:
  155. persistError = False
  156. moveError = False
  157. num_in_db = 0
  158. num_persisted_moved = 0
  159. for rec in rw:
  160. rec = rec.split(',')
  161. # if len(detPage) > 0: #It was created here just because Zeroday Market does not have Description Pages
  162. # key = rec[23]
  163. # key = u"Pr:" + rec[1].upper()[:list_lim1] + u" Vendor:" + rec[18].upper()[:list_lim2]
  164. # key = u"Pr:" + rec[1].upper()
  165. url = ''.join(e for e in rec[20] if e.isalnum())
  166. key = u"Url:" + url
  167. # if the associated description page is parsed
  168. if key in detPage:
  169. # rec = mergePages(detPage, rec)
  170. # Combining the information from Listing and Description Pages
  171. rmm = detPage[key]['rmm']
  172. rec = mergePages(rmm, rec)
  173. # Append to the list the classification of the product
  174. # rec.append(str(predict(rec[1], rec[5], language='markets')))
  175. rec.append(str(predict(rec[4], rec[5], language='sup_english')))
  176. # Persisting the information in the database
  177. try:
  178. persist_data(tuple(rec), cur)
  179. con.commit()
  180. except:
  181. trace = traceback.format_exc()
  182. if trace.find("already exists") == -1:
  183. nError += 1
  184. print("There was a problem to persist the file " + detPage[key]['filename'] + " in the database!")
  185. if createLog:
  186. logFile.write(
  187. str(nError) + ". There was a problem to persist the file " + detPage[key]['filename'] + " in the database.\n")
  188. persistError = True
  189. con.rollback()
  190. if not persistError:
  191. # move description files of completed folder
  192. source = line2.replace(os.path.basename(line2), "") + detPage[key]['filename']
  193. destination = line2.replace(os.path.basename(line2), "") + r'Read/'
  194. try:
  195. shutil.move(source, destination)
  196. num_persisted_moved += 1
  197. except:
  198. print("There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!")
  199. nError += 1
  200. if createLog:
  201. logFile.write(
  202. str(nError) + ". There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!.\n")
  203. moveError = True
  204. # if the associated description page is not read or not parsed
  205. else:
  206. # query database
  207. # if the product already exists:
  208. # num_in_db += 1
  209. pass
  210. # if number of products on listing page is equal to
  211. # the number of merged, persisted, and moved products plus
  212. # the number of products already in the database
  213. if not persistError and not moveError and len(rw) == (num_persisted_moved + num_in_db):
  214. # move listing file to completed folder
  215. source = line1
  216. destination = line1.replace(os.path.basename(line1), "") + r'Read/'
  217. try:
  218. shutil.move(source, destination)
  219. except:
  220. nError += 1
  221. print("There was a problem to move the file " + line1 + " in the Listing section!")
  222. if createLog:
  223. logFile.write(str(nError) + ". There was a problem to move the file " + line1 + " in the Listing section!.\n")
  224. # g.close ()
  225. if createLog:
  226. logFile.close()
  227. # end = time.time()
  228. # finalTime = float(end-ini)
  229. # print (marketPlace + " Parsing Perfomed Succesfully in %.2f" %finalTime + "!")
  230. input("Parsing the " + marketPlace + " marketplace and data classification done successfully. Press ENTER to continue\n")