Merge branch 'main' into feature/farside
This commit is contained in:
commit
b87adaef13
|
@ -509,6 +509,7 @@ A lot of the app currently piggybacks on Google's existing support for fetching
|
|||
| [https://search.exonip.de](https://search.exonip.de) | 🇳🇱 NL | Multi-choice | |
|
||||
| [https://s.alefvanoon.xyz](https://s.alefvanoon.xyz) | 🇺🇸 US | Multi-choice | ✅ |
|
||||
| [https://www.whooglesearch.ml](https://www.whooglesearch.ml) | 🇺🇸 US | English | |
|
||||
| [https://search.sethforprivacy.com](https://search.sethforprivacy.com) | 🇩🇪 DE | English | |
|
||||
|
||||
* A checkmark in the "Cloudflare" category here refers to the use of the reverse proxy, [Cloudflare](https://cloudflare.com). The checkmark will not be listed for a site which uses Cloudflare DNS but rather the proxying service which grants Cloudflare the ability to monitor traffic to the website.
|
||||
|
||||
|
@ -517,6 +518,7 @@ A lot of the app currently piggybacks on Google's existing support for fetching
|
|||
| Website | Country | Language |
|
||||
|-|-|-|
|
||||
| [http://whoglqjdkgt2an4tdepberwqz3hk7tjo4kqgdnuj77rt7nshw2xqhqad.onion](http://whoglqjdkgt2an4tdepberwqz3hk7tjo4kqgdnuj77rt7nshw2xqhqad.onion) | 🇺🇸 US | Multi-choice
|
||||
| [http://nuifgsnbb2mcyza74o7illtqmuaqbwu4flam3cdmsrnudwcmkqur37qd.onion](http://nuifgsnbb2mcyza74o7illtqmuaqbwu4flam3cdmsrnudwcmkqur37qd.onion) | 🇩🇪 DE | English
|
||||
|
||||
## Screenshots
|
||||
#### Desktop
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from app.models.config import Config
|
||||
from app.models.endpoint import Endpoint
|
||||
from app.request import VALID_PARAMS, MAPS_URL
|
||||
from app.utils.misc import read_config_bool
|
||||
|
@ -45,18 +46,8 @@ class Filter:
|
|||
# type result (such as "people also asked", "related searches", etc)
|
||||
RESULT_CHILD_LIMIT = 7
|
||||
|
||||
def __init__(self, user_key: str, mobile=False, config=None) -> None:
|
||||
if config is None:
|
||||
config = {}
|
||||
self.near = config['near'] if 'near' in config else ''
|
||||
self.dark = config['dark'] if 'dark' in config else False
|
||||
self.nojs = config['nojs'] if 'nojs' in config else False
|
||||
self.new_tab = config['new_tab'] if 'new_tab' in config else False
|
||||
self.alt_redirect = config['alts'] if 'alts' in config else False
|
||||
self.block_title = (
|
||||
config['block_title'] if 'block_title' in config else '')
|
||||
self.block_url = (
|
||||
config['block_url'] if 'block_url' in config else '')
|
||||
def __init__(self, user_key: str, config: Config, mobile=False) -> None:
|
||||
self.config = config
|
||||
self.mobile = mobile
|
||||
self.user_key = user_key
|
||||
self.main_divs = ResultSet('')
|
||||
|
@ -69,16 +60,6 @@ class Filter:
|
|||
def elements(self):
|
||||
return self._elements
|
||||
|
||||
def reskin(self, page: str) -> str:
|
||||
# Aesthetic only re-skinning
|
||||
if self.dark:
|
||||
page = page.replace(
|
||||
'fff', '000').replace(
|
||||
'202124', 'ddd').replace(
|
||||
'1967D2', '3b85ea')
|
||||
|
||||
return page
|
||||
|
||||
def encrypt_path(self, path, is_element=False) -> str:
|
||||
# Encrypts path to avoid plaintext results in logs
|
||||
if is_element:
|
||||
|
@ -109,7 +90,7 @@ class Filter:
|
|||
|
||||
input_form = soup.find('form')
|
||||
if input_form is not None:
|
||||
input_form['method'] = 'POST'
|
||||
input_form['method'] = 'GET' if self.config.get_only else 'POST'
|
||||
|
||||
# Ensure no extra scripts passed through
|
||||
for script in soup('script'):
|
||||
|
@ -143,9 +124,7 @@ class Filter:
|
|||
_ = div.decompose() if len(div_ads) else None
|
||||
|
||||
def remove_block_titles(self) -> None:
|
||||
if not self.main_divs:
|
||||
return
|
||||
if self.block_title == '':
|
||||
if not self.main_divs or not self.config.block_title:
|
||||
return
|
||||
block_title = re.compile(self.block_title)
|
||||
for div in [_ for _ in self.main_divs.find_all('div', recursive=True)]:
|
||||
|
@ -154,9 +133,7 @@ class Filter:
|
|||
_ = div.decompose() if len(block_divs) else None
|
||||
|
||||
def remove_block_url(self) -> None:
|
||||
if not self.main_divs:
|
||||
return
|
||||
if self.block_url == '':
|
||||
if not self.main_divs or not self.config.block_url:
|
||||
return
|
||||
block_url = re.compile(self.block_url)
|
||||
for div in [_ for _ in self.main_divs.find_all('div', recursive=True)]:
|
||||
|
@ -244,7 +221,7 @@ class Filter:
|
|||
if src.startswith(LOGO_URL):
|
||||
# Re-brand with Whoogle logo
|
||||
element.replace_with(BeautifulSoup(
|
||||
render_template('logo.html', dark=self.dark),
|
||||
render_template('logo.html'),
|
||||
features='html.parser'))
|
||||
return
|
||||
elif src.startswith(GOOG_IMG) or GOOG_STATIC in src:
|
||||
|
@ -323,10 +300,10 @@ class Filter:
|
|||
link['href'] = filter_link_args(q)
|
||||
|
||||
# Add no-js option
|
||||
if self.nojs:
|
||||
if self.config.nojs:
|
||||
append_nojs(link)
|
||||
|
||||
if self.new_tab:
|
||||
if self.config.new_tab:
|
||||
link['target'] = '_blank'
|
||||
else:
|
||||
if href.startswith(MAPS_URL):
|
||||
|
@ -336,7 +313,7 @@ class Filter:
|
|||
link['href'] = href
|
||||
|
||||
# Replace link location if "alts" config is enabled
|
||||
if self.alt_redirect:
|
||||
if self.config.alts:
|
||||
# Search and replace all link descriptions
|
||||
# with alternative location
|
||||
link['href'] = get_site_alt(link['href'])
|
||||
|
|
|
@ -59,7 +59,7 @@ def gen_user_agent(is_mobile) -> str:
|
|||
return DESKTOP_UA.format("Mozilla", linux, firefox)
|
||||
|
||||
|
||||
def gen_query(query, args, config, near_city=None) -> str:
|
||||
def gen_query(query, args, config) -> str:
|
||||
param_dict = {key: '' for key in VALID_PARAMS}
|
||||
|
||||
# Use :past(hour/day/week/month/year) if available
|
||||
|
@ -96,8 +96,8 @@ def gen_query(query, args, config, near_city=None) -> str:
|
|||
param_dict['start'] = '&start=' + args.get('start')
|
||||
|
||||
# Search for results near a particular city, if available
|
||||
if near_city:
|
||||
param_dict['near'] = '&near=' + urlparse.quote(near_city)
|
||||
if config.near:
|
||||
param_dict['near'] = '&near=' + urlparse.quote(config.near)
|
||||
|
||||
# Set language for results (lr) if source isn't set, otherwise use the
|
||||
# result language param provided in the results
|
||||
|
|
|
@ -119,8 +119,7 @@ class Search:
|
|||
config=self.config)
|
||||
full_query = gen_query(self.query,
|
||||
self.request_params,
|
||||
self.config,
|
||||
content_filter.near)
|
||||
self.config)
|
||||
|
||||
# force mobile search when view image is true and
|
||||
# the request is not already made by a mobile
|
||||
|
@ -132,7 +131,7 @@ class Search:
|
|||
force_mobile=view_image)
|
||||
|
||||
# Produce cleanable html soup from response
|
||||
html_soup = bsoup(content_filter.reskin(get_body.text), 'html.parser')
|
||||
html_soup = bsoup(get_body.text, 'html.parser')
|
||||
|
||||
# Replace current soup if view_image is active
|
||||
if view_image:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from bs4 import BeautifulSoup
|
||||
from app.filter import Filter
|
||||
from app.models.config import Config
|
||||
from app.models.endpoint import Endpoint
|
||||
from app.utils.session import generate_user_key
|
||||
from datetime import datetime
|
||||
|
@ -11,7 +12,7 @@ from test.conftest import demo_config
|
|||
|
||||
def get_search_results(data):
|
||||
secret_key = generate_user_key()
|
||||
soup = Filter(user_key=secret_key).clean(
|
||||
soup = Filter(user_key=secret_key, config=Config(**demo_config)).clean(
|
||||
BeautifulSoup(data, 'html.parser'))
|
||||
|
||||
main_divs = soup.find('div', {'id': 'main'})
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
#WHOOGLE_CONFIG_NEAR=denver
|
||||
|
||||
# See app/static/settings/countries.json for values
|
||||
#WHOOGLE_CONFIG_COUNTRY=countryUK
|
||||
#WHOOGLE_CONFIG_COUNTRY=US
|
||||
|
||||
# See app/static/settings/languages.json for values
|
||||
#WHOOGLE_CONFIG_LANGUAGE=lang_en
|
||||
|
|
Loading…
Reference in New Issue
Block a user