Browse Source

added delay after closing file in read_file()

main
westernmeadow 1 year ago
parent
commit
ce00b923ff
2 changed files with 8 additions and 4 deletions
  1. +4
    -2
      Forums/Initialization/prepare_parser.py
  2. +4
    -2
      MarketPlaces/Initialization/prepare_parser.py

+ 4
- 2
Forums/Initialization/prepare_parser.py View File

@ -97,6 +97,7 @@ def read_file(filePath, createLog, logFile):
html = codecs.open(filePath.strip('\n'), encoding='utf8') html = codecs.open(filePath.strip('\n'), encoding='utf8')
soup = BeautifulSoup(html, "html.parser") soup = BeautifulSoup(html, "html.parser")
html.close() html.close()
time.sleep(0.01) # making sure the file is closed before returning soup object
return soup return soup
except: except:
@ -104,6 +105,7 @@ def read_file(filePath, createLog, logFile):
html = open(filePath.strip('\n')) html = open(filePath.strip('\n'))
soup = BeautifulSoup(html, "html.parser") soup = BeautifulSoup(html, "html.parser")
html.close() html.close()
time.sleep(0.01) # making sure the file is closed before returning soup object
return soup return soup
except: except:
@ -212,7 +214,7 @@ def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descript
def move_file(filePath, createLog, logFile): def move_file(filePath, createLog, logFile):
source = filePath source = filePath
destination = filePath.replace(os.path.basename(filePath), "") + r'Read/' + os.path.basename(filePath)
destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
try: try:
shutil.move(source, destination, shutil.copy2) shutil.move(source, destination, shutil.copy2)
@ -250,7 +252,7 @@ def new_parse(forum, url, createLog):
# Creating the tables (The database should be created manually) # Creating the tables (The database should be created manually)
create_database(cur, con) create_database(cur, con)
mainDir = os.path.join(config.get('Project', 'shared_folder'), "Forums/" + forum + "/HTML_Pages")
mainDir = os.path.join(config.get('Project', 'shared_folder'), "Forums\\" + forum + "\\HTML_Pages")
# Creating the log file for each Forum # Creating the log file for each Forum
if createLog: if createLog:


+ 4
- 2
MarketPlaces/Initialization/prepare_parser.py View File

@ -107,6 +107,7 @@ def read_file(filePath, createLog, logFile):
html = codecs.open(filePath.strip('\n'), encoding='utf8') html = codecs.open(filePath.strip('\n'), encoding='utf8')
soup = BeautifulSoup(html, "html.parser") soup = BeautifulSoup(html, "html.parser")
html.close() html.close()
time.sleep(0.01) # making sure the file is closed before returning soup object
return soup return soup
except: except:
@ -114,6 +115,7 @@ def read_file(filePath, createLog, logFile):
html = open(filePath.strip('\n')) html = open(filePath.strip('\n'))
soup = BeautifulSoup(html, "html.parser") soup = BeautifulSoup(html, "html.parser")
html.close() html.close()
time.sleep(0.01) # making sure the file is closed before returning soup object
return soup return soup
except: except:
@ -271,7 +273,7 @@ def persist_record(url, rec, cur, con, createLog, logFile, listingFile, descript
def move_file(filePath, createLog, logFile): def move_file(filePath, createLog, logFile):
source = filePath source = filePath
destination = filePath.replace(os.path.basename(filePath), "") + r'Read/' + os.path.basename(filePath)
destination = filePath.replace(os.path.basename(filePath), "") + 'Read\\' + os.path.basename(filePath)
try: try:
shutil.move(source, destination, shutil.copy2) shutil.move(source, destination, shutil.copy2)
@ -307,7 +309,7 @@ def new_parse(marketPlace, url, createLog):
# Creating the tables (The database should be created manually) # Creating the tables (The database should be created manually)
create_database(cur, con) create_database(cur, con)
mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces/" + marketPlace + "/HTML_Pages")
mainDir = os.path.join(config.get('Project', 'shared_folder'), "MarketPlaces\\" + marketPlace + "\\HTML_Pages")
# Creating the log file for each Forum # Creating the log file for each Forum
if createLog: if createLog:


Loading…
Cancel
Save