this is based on calsyslab project
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

194 lines
8.2 KiB

  1. __author__ = 'Helium'
  2. # Here, we are importing the auxiliary functions to clean or convert data
  3. from typing import List
  4. from Forums.Utilities.utilities import *
  5. from datetime import date
  6. from datetime import timedelta
  7. import re
  8. # Here, we are importing BeautifulSoup to search through the HTML tree
  9. from bs4 import BeautifulSoup, ResultSet, Tag
  10. # This is the method to parse the Description Pages (one page to each topic in the Listing Pages)
  11. def hiddenAnswers_description_parser(soup: BeautifulSoup):
  12. # Fields to be parsed
  13. topic: str = "-1" # 0 topic name
  14. user: List[str] = [] # 1 all users of each post
  15. addDate: List[datetime] = [] # 2 all dated of each post
  16. feedback: List[str] = [] # 3 all feedbacks of each vendor (this was found in just one Forum and with a number format)
  17. status: List[str] = [] # 4 all user's authority in each post such as (adm, member, dangerous)
  18. reputation: List[str] = [] # 5 all user's karma in each post (usually found as a number)
  19. sign: List[str] = [] # 6 all user's signature in each post (usually a standard message after the content of the post)
  20. post: List[str] = [] # 7 all messages of each post
  21. interest: List[str] = [] # 8 all user's interest in each post
  22. image_user = [] # 9 all user avatars of each post
  23. image_post = [] # 10 all first images of each post
  24. # Finding the topic (should be just one coming from the Listing Page)
  25. li = soup.find("h1").find("span", {"itemprop": "name"})
  26. topic = li.text
  27. question: Tag = soup.find("div", {"class": "qa-part-q-view"})
  28. question_user = question.find("span", {"class": "qa-q-view-who-data"}).text
  29. user.append(cleanString(question_user.strip()))
  30. question_time = question.find("span", {"class": "qa-q-view-when-data"}).find("time").get("datetime")
  31. datetime_string = question_time.split("+")[0]
  32. datetime_obj = datetime.strptime(datetime_string, "%Y-%m-%dT%H:%M:%S")
  33. addDate.append(datetime_obj)
  34. question_user_status = question.find("span", {"class": "qa-q-view-who-title"}).text
  35. status.append(cleanString(question_user_status.strip()))
  36. question_user_karma = question.find("span", {"class": "qa-q-view-who-points-data"}).text
  37. # Convert karma to pure numerical string
  38. if question_user_karma.find("k") > -1:
  39. question_user_karma = str(float(question_user_karma.replace("k", "")) * 1000)
  40. reputation.append(cleanString(question_user_karma.strip()))
  41. question_content = question.find("div", {"class": "qa-q-view-content qa-post-content"}).text
  42. post.append(cleanString(question_content.strip()))
  43. feedback.append("-1")
  44. sign.append("-1")
  45. interest.append("-1")
  46. img = question.find('div', {"class": "qa-q-view-content qa-post-content"}).find('img')
  47. if img is not None:
  48. img = img.get('src').split('base64,')[-1]
  49. else:
  50. img = "-1"
  51. image_post.append(img)
  52. img = question.find('span', {"class": "qa-q-view-avatar-meta"}).find('img')
  53. if img is not None:
  54. img = img.get('src').split('base64,')[-1]
  55. else:
  56. img = "-1"
  57. image_user.append(img)
  58. answer_list: ResultSet[Tag] = soup.find("div", {"class": "qa-a-list"}).find_all("div", {"class": "qa-a-list-item"})
  59. for replies in answer_list:
  60. user_name = replies.find("span", {"class", "qa-a-item-who-data"}).text
  61. user.append(cleanString(user_name.strip()))
  62. date_added = replies.find("span", {"class": "qa-a-item-when"}).find("time", {"itemprop": "dateCreated"}).get('datetime')
  63. date_string = date_added.split("+")[0]
  64. datetime_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S")
  65. addDate.append(datetime_obj)
  66. post_data = replies.find("div", {"class": "qa-a-item-content qa-post-content"}).find("div",{"itemprop":"text"}).text
  67. post.append(cleanString(post_data.strip()))
  68. user_reputations = replies.find("span", {"class", "qa-a-item-who-title"}).text
  69. status.append(cleanString(user_reputations.strip()))
  70. karma = replies.find("span", {"class": "qa-a-item-who-points-data"}).text
  71. # Convert karma to pure numerical string
  72. if karma.find("k") > -1:
  73. karma = str(float(karma.replace("k", "")) * 1000)
  74. reputation.append(cleanString(karma.strip()))
  75. feedback.append("-1")
  76. sign.append("-1")
  77. interest.append("-1")
  78. img = replies.find("div", {"class": "qa-a-item-content qa-post-content"}).find("div",{"itemprop":"text"}).find('img')
  79. if img is not None:
  80. img = img.get('src').split('base64,')[-1]
  81. else:
  82. img = "-1"
  83. image_post.append(img)
  84. img = replies.find('span', {"class": "qa-a-item-avatar-meta"}).find('img')
  85. if img is not None:
  86. img = img.get('src').split('base64,')[-1]
  87. else:
  88. img = "-1"
  89. image_user.append(img)
  90. # Populate the final variable (this should be a list with all fields scraped)
  91. row = (topic, user, status, reputation, interest, sign, post, feedback, addDate, image_user, image_post)
  92. # Sending the results
  93. return row
  94. def hiddenAnswers_listing_parser(soup: BeautifulSoup):
  95. nm: int = 0 # this variable should receive the number of topics
  96. forum: str = "HiddenAnswers" # 0 *forum name
  97. board = "-1" # 1 board name (the previous level of the topic in the Forum categorization tree.
  98. # For instance: Security/Malware/Tools to hack Facebook. The board here should be Malware)
  99. user: List[str] = [] # 2 all users of each topic
  100. topic: List[str] = [] # 3 all topics
  101. view: List[int] = [] # 4 number of views of each topic
  102. post: List[int] = [] # 5 number of posts of each topic
  103. href: List[str] = [] # 6 this variable should receive all cleaned urls (we will use this to do the merge between
  104. # Listing and Description pages)
  105. addDate: List[str] = [] # 7 when the topic was created (difficult to find)
  106. image_author = [] # 8 all author avatars used in each topic
  107. # Finding the board
  108. literature = soup.find("div", {"class": "qa-main-heading"}).find("h1")
  109. board = literature.text
  110. queries_by_user: ResultSet[Tag] = soup.find("div", {"class": "qa-q-list"}).find_all("div", {"class": "qa-q-list-item"})
  111. for queries in queries_by_user:
  112. topic_of_query = queries.find("div", {"class": "qa-q-item-title"}).find("a").text
  113. topic.append(cleanString(topic_of_query.strip()))
  114. image_author.append("-1")
  115. author = queries.find("span", {"class": "qa-q-item-who-data"}).find("a").text
  116. user.append(cleanString(author.strip()))
  117. num_answers = queries.find("span", {"class": "qa-a-count-data"}).text
  118. post.append(cleanString(num_answers.strip()))
  119. view.append("-1")
  120. date_posted = queries.find("span", {"class": "qa-q-item-when-data"}).text
  121. if date_posted.find("day") > 0:
  122. datetime_obj = datetime.now() - timedelta(days=1)
  123. else:
  124. try:
  125. datetime_obj = datetime.strptime(f"{date_posted} {date.today().year}", "%b %d %Y")
  126. except ValueError:
  127. datetime_obj = datetime.strptime(f"{date_posted}", "%b %d, %Y")
  128. addDate.append(datetime_obj)
  129. #this link will be cleaned
  130. listing_href = queries.find("div", {"class": "qa-q-item-title"}).find("a").get("href")
  131. href.append(listing_href)
  132. nm = len(topic)
  133. return organizeTopics(forum, nm, board, user, topic, view, post, href, addDate, image_author)
  134. #need to change this method
  135. def hiddenanswers_links_parser(soup):
  136. # Returning all links that should be visited by the Crawler
  137. href = []
  138. #print(soup.find('table', {"class": "tborder clear"}).find(
  139. # 'tbody').find_all('tr', {"class": "inline_row"}))
  140. listing = soup.find_all('div', {"class": "qa-q-item-title"})
  141. for a in listing:
  142. link = a.find('a').get('href')
  143. href.append(link)
  144. return href