helpers.py
This commit is contained in:
parent
850538a92f
commit
a2b97e9f21
@ -8,6 +8,8 @@ from dateconverter import *
|
||||
from datetime import datetime
|
||||
import os
|
||||
import sqlite3
|
||||
import webbrowser
|
||||
from time import sleep
|
||||
|
||||
DEBUG = True
|
||||
number = ['0','1','2','3','4','5','6','7','8','9']
|
||||
@ -15,6 +17,7 @@ homePath = os.path.expanduser('~')
|
||||
cookiePath = homePath + "/.mozilla/firefox/imibizoh.default/cookies.sqlite"
|
||||
tmpPath = "/tmp/cookies.sqlite"
|
||||
DBFILE = "../db/sqlite3.db"
|
||||
winFirefoxPath = f"""C:\Program Files\Mozilla Firefox\firefox.exe"""
|
||||
def log(*s):
|
||||
if DEBUG:
|
||||
print(s)
|
||||
@ -225,10 +228,12 @@ def indeedExtractDays(datestr):
|
||||
return "NOTFound"
|
||||
def getCookiesFromBrowser(url):
|
||||
#workaround for loked database
|
||||
shutil.copyfile(cookiePath,tmpPath)
|
||||
tries=0
|
||||
cookie = ''
|
||||
rows = [0]
|
||||
|
||||
while(cookie == '' and tries < 2):
|
||||
tries+=1;
|
||||
shutil.copyfile(cookiePath,tmpPath)
|
||||
with sqlite3.connect(tmpPath) as connection:
|
||||
cmd_read_cookies = f"""SELECT name,value FROM moz_cookies WHERE host like ?;"""
|
||||
print(cmd_read_cookies)
|
||||
@ -242,6 +247,14 @@ def getCookiesFromBrowser(url):
|
||||
cookie += ";"
|
||||
|
||||
print("Cookies:",cookie)
|
||||
if cookie == '':
|
||||
if os.name == 'posix':
|
||||
webbrowser.register("firefox",None,webbrowser.BackgroundBrowser("firefox"))
|
||||
webbrowser.get('firefox').open(url)
|
||||
elif os.name == 'nt':
|
||||
webbrowser.register("firefox",None,webbrowser.BackgroundBrowser(winFirefoxPath))
|
||||
webbrowser.get('firefox').open(url)
|
||||
sleep(1)
|
||||
return cookie
|
||||
#access cookies from firefox:
|
||||
#copy (because locked): cp .mozilla/firefox/imibizoh.default/cookies.sqlite cookies.sqlite
|
||||
|
@ -25,7 +25,7 @@ def scrap_indeed_com(url,entry,session):
|
||||
log(page)
|
||||
solveCaptcha(session,page)
|
||||
soup = BeautifulSoup(page.content,"html.parser")
|
||||
print(soup.prettify())
|
||||
#print(soup.prettify())
|
||||
|
||||
|
||||
results = soup.find_all("li",class_= 'css-5lfssm eu4oa1w0') #top level list element
|
||||
|
Loading…
Reference in New Issue
Block a user