this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

366 lines
14 KiB

1 year ago
  1. __author__ = 'DarkWeb'
  2. import glob
  3. import os
  4. import codecs
  5. import shutil
  6. from MarketPlaces.DB_Connection.db_connection import *
  7. from MarketPlaces.DarkFox.parser import *
  8. from MarketPlaces.Ares.parser import *
  9. from MarketPlaces.Quest.parser import *
  10. from MarketPlaces.Royal.parser import *
  11. from MarketPlaces.Kingdom.parser import *
  12. from MarketPlaces.Tor2door.parser import *
  13. from MarketPlaces.WeTheNorth.parser import *
  14. from MarketPlaces.Bohemia.parser import *
  15. from MarketPlaces.Classifier.classify_product import predict
  16. def mergePages(rmm, rec):
  17. # key = u"Pr:" + rec[1].upper() + u" Vendor:" + rec[18].upper()
  18. # key = rec[23]
  19. print("----------------- Matched: " + rec[1] + "--------------------")
  20. # if rec[1] == "-1": #Item_Name
  21. # rec[1] = rmm[0]
  22. rec[1] = rmm[0]
  23. if rec[2] == "-1": #Item_CVE_Classification
  24. rec[2] = rmm[4]
  25. if rec[3] == "-1": #Item_MS_Classification
  26. rec[3] = rmm[5]
  27. if rec[4] == "-1": #Item_MarketCategory
  28. rec[4] = rmm[7]
  29. if rec[5] == "-1": #Item_Description
  30. rec[5] = rmm[1]
  31. elif rmm[1] != "-1":
  32. rec[5] = rec[5] + " " + rmm[1]
  33. if rec[6] == "-1": #Item _EscrowInfo
  34. rec[6] = rmm[11]
  35. #rec[7] = "-1" #Item__N.OfViews
  36. if rec[8] == "-1": #Item_Reviews
  37. rec[8] = rmm[6]
  38. if rec[9] == "-1": #Item_AddedDate
  39. rec[9] = rmm[15]
  40. if rec[10] == "-1": #Item_LastViewedDate
  41. rec[10] = rmm[2]
  42. if rec[11] == "-1": #Item_BTC_SellingPrice
  43. rec[11] = rmm[18]
  44. if rec[12] == "-1": #Item_US_SellingPrice
  45. rec[12] = rmm[19]
  46. if rec[13] == "-1": #Item_EURO_SellingPrice
  47. rec[13] = rmm[22]
  48. if rec[14] == "-1": #Item_QuantitySold
  49. rec[14] = rmm[14]
  50. if rec[15] == "-1": #Item_QuantityLeft
  51. rec[15] = rmm[10]
  52. if rec[16] == "-1": #Item_ShippedFrom
  53. rec[16] = rmm[8]
  54. if rec[17] == "-1": #Item_ShippedTo
  55. rec[17] = rmm[9]
  56. if rec[18] == "-1": #Vendor_Name
  57. rec[18] = rmm[13]
  58. if rec[19] == "-1": #Vendor_Rating
  59. rec[19] = rmm[20]
  60. if rec[20] == "-1": #Vendor_Successfull Transactions
  61. rec[20] = rmm[21]
  62. if rec[21] == "-1": #Vendor_TermsAndConditions
  63. rec[21] = rmm[12]
  64. #rec[?] = rmm[17] #Item_EndDate
  65. #rec[?] = rmm[?] #Item_Feedback
  66. #rec[?] = rmm[?] #Shipping Options
  67. #rec[?] = rmm[?] #Average Delivery Time
  68. return rec
  69. def persist_data(row, cur):
  70. marketPlace = create_marketPlace(cur, row)
  71. vendor = create_vendor(cur, row)
  72. create_items(cur, row, marketPlace, vendor)
  73. def new_parse(marketPlace, createLog):
  74. print("Parsing the " + marketPlace + " marketplace and conduct data classification to store the information in the database.")
  75. crawlerDate = date.today()
  76. # ini = time.time()
  77. global site
  78. #Connecting to the database
  79. con = connectDataBase()
  80. cur = con.cursor()
  81. #Creating the tables (The database should be created manually)
  82. create_database(cur, con)
  83. nError = 0
  84. lines = [] #lines.clear()
  85. lns = [] #lns.clear()
  86. detPage = {}
  87. rw = []
  88. #Creating the log file for each Market Place
  89. if createLog:
  90. if not os.path.exists("./" + marketPlace + "/Logs/" + marketPlace + "_" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + ".log"):
  91. logFile = open("./" + marketPlace + "/Logs/" + marketPlace + "_" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + ".log", "w")
  92. else:
  93. print("Files of the date " + str("%02d" %crawlerDate.month) + "/" + str("%02d" %crawlerDate.day) + "/" + str("%04d" %crawlerDate.year) +
  94. " from the Market Place " + marketPlace + " were already read. Delete the referent information in the Data Base and also delete the log file "
  95. "in the _Logs folder to read files from this Market Place of this date again.")
  96. raise SystemExit
  97. # Reading the Listing Html Pages
  98. for fileListing in glob.glob(os.path.join (os.getcwd().replace("Initialization","") + marketPlace + "\\HTML_Pages\\" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + "\\Listing" ,'*.html')):
  99. lines.append(fileListing)
  100. # Reading the Description Html Pages
  101. for fileDescription in glob.glob(os.path.join (os.getcwd().replace("Initialization","") + marketPlace + "\\HTML_Pages\\" + str("%02d" %crawlerDate.month) + str("%02d" %crawlerDate.day) + str("%04d" %crawlerDate.year) + "\\Description" ,'*.html')):
  102. lns.append(fileDescription)
  103. # Parsing the Description Pages and put the tag's content into a dictionary (Hash table)
  104. for index, line2 in enumerate(lns):
  105. print("Reading description folder of '" + marketPlace + "', file '" + os.path.basename(line2) + "', index= " + str(index + 1) + " ... " + str(len(lns)))
  106. try:
  107. html = codecs.open(line2.strip('\n'), encoding='utf8')
  108. soup = BeautifulSoup(html, "html.parser")
  109. html.close()
  110. except:
  111. try:
  112. html = open(line2.strip('\n'))
  113. soup = BeautifulSoup(html, "html.parser")
  114. html.close()
  115. except:
  116. nError += 1
  117. print("There was a problem to read the file " + line2 + " in the Description section!")
  118. if createLog:
  119. logFile.write(str(nError) + ". There was a problem to read the file " + line2 + " in the Description section.\n")
  120. continue
  121. try:
  122. if marketPlace == "DarkFox":
  123. rmm = darkfox_description_parser(soup)
  124. elif marketPlace == "Ares":
  125. rmm = ares_description_parser(soup)
  126. elif marketPlace == "Quest":
  127. rmm = quest_description_parser(soup)
  128. elif marketPlace == "Royal":
  129. rmm = royal_description_parser(soup)
  130. elif marketPlace == 'Kingdom':
  131. rmm = kingdom_description_parser(soup)
  132. elif marketPlace == "Tor2door":
  133. rmm = tor2door_description_parser(soup)
  134. elif marketPlace == "WeTheNorth":
  135. rmm = wethenorth_description_parser(soup)
  136. elif marketPlace == "Bohemia":
  137. rmm = bohemia_description_parser(soup)
  138. # key = u"Pr:" + rmm[0].upper()[:desc_lim1] + u" Vendor:" + rmm[13].upper()[:desc_lim2]
  139. key = u"Url:" + os.path.basename(line2).replace(".html", "")
  140. # save file address with description record in memory
  141. detPage[key] = {'rmm': rmm, 'filename': os.path.basename(line2)}
  142. except:
  143. nError += 1
  144. print("There was a problem to parse the file " + line2 + " in the Description section!")
  145. if createLog:
  146. logFile.write(str(nError) + ". There was a problem to parse the file " + line2 + " in the Description section.\n")
  147. # Parsing the Listing Pages and put the tag's content into a list
  148. for index, line1 in enumerate(lines):
  149. print("Reading listing folder of '" + marketPlace + "', file '" + os.path.basename(line1) + "', index= " + str(index + 1) + " ... " + str(len(lines)))
  150. readError = False
  151. try:
  152. html = codecs.open(line1.strip('\n'), encoding='utf8')
  153. soup = BeautifulSoup(html, "html.parser")
  154. html.close()
  155. except:
  156. try:
  157. html = open(line1.strip('\n'))
  158. soup = BeautifulSoup(html, "html.parser")
  159. html.close()
  160. except:
  161. nError += 1
  162. print("There was a problem to read the file " + line1 + " in the Listing section!")
  163. if createLog:
  164. logFile.write(str(nError) + ". There was a problem to read the file " + line1 + " in the Listing section.\n")
  165. readError = True
  166. if not readError:
  167. parseError = False
  168. try:
  169. if marketPlace == "DarkFox":
  170. rw = darkfox_listing_parser(soup)
  171. elif marketPlace == "Ares":
  172. rw = ares_listing_parser(soup)
  173. elif marketPlace == "Quest":
  174. rw = quest_listing_parser(soup)
  175. elif marketPlace == "Royal":
  176. rw = royal_listing_parser(soup)
  177. elif marketPlace == "Kingdom":
  178. rw = kingdom_listing_parser(soup)
  179. elif marketPlace == "Tor2door":
  180. rw = tor2door_listing_parser(soup)
  181. elif marketPlace == "WeTheNorth":
  182. rw = wethenorth_listing_parser(soup)
  183. elif marketPlace == "Bohemia":
  184. rw = bohemia_listing_parser(soup)
  185. else:
  186. parseError = True
  187. except:
  188. nError += 1
  189. print("There was a problem to parse the file " + line1 + " in the listing section!")
  190. if createLog:
  191. logFile.write(
  192. str(nError) + ". There was a problem to parse the file " + line1 + " in the Listing section.\n")
  193. parseError = True
  194. if not parseError:
  195. persistError = False
  196. moveError = False
  197. num_in_db = 0
  198. num_persisted_moved = 0
  199. for rec in rw:
  200. rec = rec.split(',')
  201. # if len(detPage) > 0: #It was created here just because Zeroday Market does not have Description Pages
  202. # key = rec[23]
  203. # key = u"Pr:" + rec[1].upper()[:list_lim1] + u" Vendor:" + rec[18].upper()[:list_lim2]
  204. # key = u"Pr:" + rec[1].upper()
  205. url = ''.join(e for e in rec[23] if e.isalnum())
  206. key = u"Url:" + url
  207. # if the associated description page is parsed
  208. if key in detPage:
  209. # rec = mergePages(detPage, rec)
  210. # Combining the information from Listing and Description Pages
  211. rmm = detPage[key]['rmm']
  212. rec = mergePages(rmm, rec)
  213. # Append to the list the classification of the product
  214. # rec.append(str(predict(rec[1], rec[5], language='markets')))
  215. rec.append(str(predict(rec[1], rec[5], language='sup_english')))
  216. # Persisting the information in the database
  217. try:
  218. persist_data(tuple(rec), cur)
  219. con.commit()
  220. except:
  221. trace = traceback.format_exc()
  222. if trace.find("already exists") == -1:
  223. nError += 1
  224. print("There was a problem to persist the file " + detPage[key]['filename'] + " in the database!")
  225. if createLog:
  226. logFile.write(
  227. str(nError) + ". There was a problem to persist the file " + detPage[key]['filename'] + " in the database.\n")
  228. persistError = True
  229. con.rollback()
  230. if not persistError:
  231. # move description files of completed folder
  232. source = line2.replace(os.path.basename(line2), "") + detPage[key]['filename']
  233. destination = line2.replace(os.path.basename(line2), "") + r'Read/'
  234. try:
  235. shutil.move(source, destination)
  236. num_persisted_moved += 1
  237. except:
  238. print("There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!")
  239. nError += 1
  240. if createLog:
  241. logFile.write(
  242. str(nError) + ". There was a problem to move the file " + detPage[key]['filename'] + " in the Description section!.\n")
  243. moveError = True
  244. # if the associated description page is not read or not parsed
  245. else:
  246. # query database
  247. # if the product already exists:
  248. # num_in_db += 1
  249. pass
  250. # if number of products on listing page is equal to
  251. # the number of merged, persisted, and moved products plus
  252. # the number of products already in the database
  253. if not persistError and not moveError and len(rw) == (num_persisted_moved + num_in_db):
  254. # move listing file to completed folder
  255. source = line1
  256. destination = line1.replace(os.path.basename(line1), "") + r'Read/'
  257. try:
  258. shutil.move(source, destination)
  259. except:
  260. nError += 1
  261. print("There was a problem to move the file " + line1 + " in the Listing section!")
  262. if createLog:
  263. logFile.write(str(nError) + ". There was a problem to move the file " + line1 + " in the Listing section!.\n")
  264. # g.close ()
  265. if createLog:
  266. logFile.close()
  267. # end = time.time()
  268. # finalTime = float(end-ini)
  269. # print (marketPlace + " Parsing Perfomed Succesfully in %.2f" %finalTime + "!")
  270. input("Parsing the " + marketPlace + " marketplace and data classification done successfully. Press ENTER to continue\n")