PEP-8: Fix filter class formatting
This commit is contained in:
parent
dd56ca9bfc
commit
92d94d9ea7
|
@ -32,20 +32,27 @@ class Filter:
|
||||||
def reskin(self, page):
|
def reskin(self, page):
|
||||||
# Aesthetic only re-skinning
|
# Aesthetic only re-skinning
|
||||||
if self.dark:
|
if self.dark:
|
||||||
page = page.replace('fff', '000').replace('202124', 'ddd').replace('1967D2', '3b85ea')
|
page = page.replace(
|
||||||
|
'fff', '000').replace(
|
||||||
|
'202124', 'ddd').replace(
|
||||||
|
'1967D2', '3b85ea')
|
||||||
|
|
||||||
return page
|
return page
|
||||||
|
|
||||||
def encrypt_path(self, msg, is_element=False):
|
def encrypt_path(self, msg, is_element=False):
|
||||||
# Encrypts path to avoid plaintext results in logs
|
# Encrypts path to avoid plaintext results in logs
|
||||||
if is_element:
|
if is_element:
|
||||||
# Element paths are tracked differently in order for the element key to be regenerated
|
# Element paths are encrypted separately from text, to allow key
|
||||||
# once all elements have been loaded
|
# regeneration once all items have been served to the user
|
||||||
enc_path = Fernet(self.user_keys['element_key']).encrypt(msg.encode()).decode()
|
enc_path = Fernet(
|
||||||
|
self.user_keys['element_key']
|
||||||
|
).encrypt(msg.encode()).decode()
|
||||||
self._elements += 1
|
self._elements += 1
|
||||||
return enc_path
|
return enc_path
|
||||||
|
|
||||||
return Fernet(self.user_keys['text_key']).encrypt(msg.encode()).decode()
|
return Fernet(
|
||||||
|
self.user_keys['text_key']
|
||||||
|
).encrypt(msg.encode()).decode()
|
||||||
|
|
||||||
def clean(self, soup):
|
def clean(self, soup):
|
||||||
self.main_divs = soup.find('div', {'id': 'main'})
|
self.main_divs = soup.find('div', {'id': 'main'})
|
||||||
|
@ -74,8 +81,8 @@ class Filter:
|
||||||
footer = soup.find('footer')
|
footer = soup.find('footer')
|
||||||
if footer:
|
if footer:
|
||||||
# Remove divs that have multiple links beyond just page navigation
|
# Remove divs that have multiple links beyond just page navigation
|
||||||
[_.decompose() for _ in footer.find_all('div', recursive=False)
|
[_.decompose() for _ in footer.find_all('div', recursive=False)
|
||||||
if len(_.find_all('a', href=True)) > 3]
|
if len(_.find_all('a', href=True)) > 3]
|
||||||
|
|
||||||
header = soup.find('header')
|
header = soup.find('header')
|
||||||
if header:
|
if header:
|
||||||
|
@ -88,8 +95,9 @@ class Filter:
|
||||||
return
|
return
|
||||||
|
|
||||||
for div in [_ for _ in self.main_divs.find_all('div', recursive=True)]:
|
for div in [_ for _ in self.main_divs.find_all('div', recursive=True)]:
|
||||||
has_ad = len([_ for _ in div.find_all('span', recursive=True) if has_ad_content(_.text)])
|
div_ads = [_ for _ in div.find_all('span', recursive=True)
|
||||||
_ = div.decompose() if has_ad else None
|
if has_ad_content(_.text)]
|
||||||
|
_ = div.decompose() if len(div_ads) else None
|
||||||
|
|
||||||
def fix_question_section(self):
|
def fix_question_section(self):
|
||||||
if not self.main_divs:
|
if not self.main_divs:
|
||||||
|
@ -97,14 +105,14 @@ class Filter:
|
||||||
|
|
||||||
question_divs = [_ for _ in self.main_divs.find_all(
|
question_divs = [_ for _ in self.main_divs.find_all(
|
||||||
'div', recursive=False
|
'div', recursive=False
|
||||||
) if len(_.find_all('h2')) > 0]
|
) if len(_.find_all('h2')) > 0]
|
||||||
|
|
||||||
if len(question_divs) == 0:
|
if len(question_divs) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Wrap section in details element to allow collapse/expand
|
# Wrap section in details element to allow collapse/expand
|
||||||
details = BeautifulSoup(features='lxml').new_tag('details')
|
details = BeautifulSoup('html.parser').new_tag('details')
|
||||||
summary = BeautifulSoup(features='lxml').new_tag('summary')
|
summary = BeautifulSoup('html.parser').new_tag('summary')
|
||||||
summary.string = question_divs[0].find('h2').text
|
summary.string = question_divs[0].find('h2').text
|
||||||
question_divs[0].find('h2').decompose()
|
question_divs[0].find('h2').decompose()
|
||||||
details.append(summary)
|
details.append(summary)
|
||||||
|
@ -113,7 +121,7 @@ class Filter:
|
||||||
for question_div in question_divs:
|
for question_div in question_divs:
|
||||||
questions = [_ for _ in question_div.find_all(
|
questions = [_ for _ in question_div.find_all(
|
||||||
'div', recursive=True
|
'div', recursive=True
|
||||||
) if _.text.endswith('?')]
|
) if _.text.endswith('?')]
|
||||||
|
|
||||||
for question in questions:
|
for question in questions:
|
||||||
question['style'] = 'padding: 10px; font-style: italic;'
|
question['style'] = 'padding: 10px; font-style: italic;'
|
||||||
|
@ -131,11 +139,15 @@ class Filter:
|
||||||
element['src'] = BLANK_B64
|
element['src'] = BLANK_B64
|
||||||
return
|
return
|
||||||
|
|
||||||
element['src'] = 'element?url=' + self.encrypt_path(element_src, is_element=True) + \
|
element['src'] = 'element?url=' + self.encrypt_path(
|
||||||
'&type=' + urlparse.quote(mime)
|
element_src,
|
||||||
# TODO: Non-mobile image results link to website instead of image
|
is_element=True) + '&type=' + urlparse.quote(mime)
|
||||||
|
|
||||||
|
# FIXME: Non-mobile image results link to website instead of image
|
||||||
# if not self.mobile:
|
# if not self.mobile:
|
||||||
# img.append(BeautifulSoup(FULL_RES_IMG.format(element_src), 'html.parser'))
|
# img.append(
|
||||||
|
# BeautifulSoup(FULL_RES_IMG.format(element_src),
|
||||||
|
# 'html.parser'))
|
||||||
|
|
||||||
def update_styling(self, soup):
|
def update_styling(self, soup):
|
||||||
# Remove unnecessary button(s)
|
# Remove unnecessary button(s)
|
||||||
|
@ -149,8 +161,9 @@ class Filter:
|
||||||
# Update logo
|
# Update logo
|
||||||
logo = soup.find('a', {'class': 'l'})
|
logo = soup.find('a', {'class': 'l'})
|
||||||
if logo and self.mobile:
|
if logo and self.mobile:
|
||||||
logo['style'] = 'display:flex; justify-content:center; align-items:center; color:#685e79; ' \
|
logo['style'] = ('display:flex; justify-content:center; '
|
||||||
'font-size:18px; '
|
'align-items:center; color:#685e79; '
|
||||||
|
'font-size:18px; ')
|
||||||
|
|
||||||
# Fix search bar length on mobile
|
# Fix search bar length on mobile
|
||||||
try:
|
try:
|
||||||
|
@ -163,7 +176,7 @@ class Filter:
|
||||||
# Replace href with only the intended destination (no "utm" type tags)
|
# Replace href with only the intended destination (no "utm" type tags)
|
||||||
href = link['href'].replace('https://www.google.com', '')
|
href = link['href'].replace('https://www.google.com', '')
|
||||||
if 'advanced_search' in href or 'tbm=shop' in href:
|
if 'advanced_search' in href or 'tbm=shop' in href:
|
||||||
# TODO: The "Shopping" tab requires further filtering (see #136)
|
# FIXME: The "Shopping" tab requires further filtering (see #136)
|
||||||
# Temporarily removing all links to that tab for now.
|
# Temporarily removing all links to that tab for now.
|
||||||
link.decompose()
|
link.decompose()
|
||||||
return
|
return
|
||||||
|
@ -171,20 +184,26 @@ class Filter:
|
||||||
link['target'] = '_blank'
|
link['target'] = '_blank'
|
||||||
|
|
||||||
result_link = urlparse.urlparse(href)
|
result_link = urlparse.urlparse(href)
|
||||||
query_link = parse_qs(result_link.query)['q'][0] if '?q=' in href else ''
|
query_link = parse_qs(
|
||||||
|
result_link.query
|
||||||
|
)['q'][0] if '?q=' in href else ''
|
||||||
|
|
||||||
if query_link.startswith('/'):
|
if query_link.startswith('/'):
|
||||||
# Internal google links (i.e. mail, maps, etc) should still be forwarded to Google
|
# Internal google links (i.e. mail, maps, etc) should still
|
||||||
|
# be forwarded to Google
|
||||||
link['href'] = 'https://google.com' + query_link
|
link['href'] = 'https://google.com' + query_link
|
||||||
elif '/search?q=' in href:
|
elif '/search?q=' in href:
|
||||||
# "li:1" implies the query should be interpreted verbatim, so we wrap it in double quotes
|
# "li:1" implies the query should be interpreted verbatim,
|
||||||
|
# which is accomplished by wrapping the query in double quotes
|
||||||
if 'li:1' in href:
|
if 'li:1' in href:
|
||||||
query_link = '"' + query_link + '"'
|
query_link = '"' + query_link + '"'
|
||||||
new_search = 'search?q=' + self.encrypt_path(query_link)
|
new_search = 'search?q=' + self.encrypt_path(query_link)
|
||||||
|
|
||||||
query_params = parse_qs(urlparse.urlparse(href).query)
|
query_params = parse_qs(urlparse.urlparse(href).query)
|
||||||
for param in VALID_PARAMS:
|
for param in VALID_PARAMS:
|
||||||
param_val = query_params[param][0] if param in query_params else ''
|
if param not in query_params:
|
||||||
|
continue
|
||||||
|
param_val = query_params[param][0]
|
||||||
new_search += '&' + param + '=' + param_val
|
new_search += '&' + param + '=' + param_val
|
||||||
link['href'] = new_search
|
link['href'] = new_search
|
||||||
elif 'url?q=' in href:
|
elif 'url?q=' in href:
|
||||||
|
@ -199,9 +218,11 @@ class Filter:
|
||||||
|
|
||||||
# Replace link location if "alts" config is enabled
|
# Replace link location if "alts" config is enabled
|
||||||
if self.alt_redirect:
|
if self.alt_redirect:
|
||||||
# Search and replace all link descriptions with alternative location
|
# Search and replace all link descriptions
|
||||||
|
# with alternative location
|
||||||
link['href'] = get_site_alt(link['href'])
|
link['href'] = get_site_alt(link['href'])
|
||||||
link_desc = link.find_all(text=re.compile('|'.join(SITE_ALTS.keys())))
|
link_desc = link.find_all(
|
||||||
|
text=re.compile('|'.join(SITE_ALTS.keys())))
|
||||||
if len(link_desc) == 0:
|
if len(link_desc) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user