97 lines
2.8 KiB
Python
97 lines
2.8 KiB
Python
import logging
|
|
from url import construct_url
|
|
from proxy import get_data_with_proxy, NoMoreProxiesException
|
|
from db_util import get_page_id
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def ignore_title(title):
|
|
ignore_starters = ["Help:"
|
|
, "Wikipedia:"
|
|
, "Template:"
|
|
, "Template_talk:"
|
|
, "Category:"
|
|
]
|
|
for ignore in ignore_starters:
|
|
if(title.startswith(ignore)):
|
|
return True
|
|
return False
|
|
|
|
def _receive_links(title, connection):
|
|
url = construct_url(title)
|
|
|
|
source = get_page_id(title, connection)
|
|
|
|
result = get_data_with_proxy(url, connection)
|
|
# This is basically because we don't know the page ID.
|
|
for k, page_data in result["query"]["pages"].items():
|
|
cursor = connection.cursor()
|
|
if(not "links" in page_data):
|
|
destination_title = page_data["title"].replace(" ", "_")
|
|
# avoid 1-loops
|
|
if(destination_title == title):
|
|
continue
|
|
if(ignore_title(title)):
|
|
continue
|
|
destination = get_page_id(destination_title, connection)
|
|
cursor.execute("INSERT INTO links(source, destination) VALUES(%s, %s)", (source, destination))
|
|
yield destination_title
|
|
|
|
else:
|
|
for destination in page_data["links"]:
|
|
if(ignore_title(title)):
|
|
continue
|
|
destination_title = destination["title"].replace(" ", "_")
|
|
destination = get_page_id(destination_title, connection)
|
|
cursor.execute("INSERT INTO links(source, destination) VALUES(%s, %s)", (source, destination))
|
|
yield destination_title
|
|
connection.commit()
|
|
|
|
def receive_links(title, connection):
|
|
return list(_receive_links(title, connection))
|
|
|
|
|
|
def receive_link_graph(title, connection, depth, fetch_missing=True):
|
|
cursor = connection.cursor()
|
|
|
|
# Fetch the missing links.
|
|
if(fetch_missing):
|
|
delete_cursor = connection.cursor()
|
|
cursor.execute('''SELECT pages.title, failed_to_fetch.depth, failed_to_fetch.page
|
|
FROM failed_to_fetch
|
|
LEFT JOIN pages ON pages.page_id=failed_to_fetch.page''')
|
|
for t, d, p in cursor:
|
|
receive_link_graph(t, connection, d, fetch_missing=False)
|
|
delete_cursor.execute("DELETE FROM failed_to_fetch WHERE page=%s", (p,))
|
|
|
|
|
|
if(depth < 0):
|
|
# end of recursion
|
|
return
|
|
|
|
page = get_page_id(title, connection)
|
|
|
|
cursor = connection.cursor()
|
|
cursor.execute("SELECT COUNT(source) FROM links WHERE source=%s", (page,))
|
|
if(cursor.fetchone()[0] != 0):
|
|
# we fetched that title already
|
|
return
|
|
|
|
logger.info("fetching links for {}".format(title))
|
|
|
|
for link in _receive_links(title, connection):
|
|
try:
|
|
receive_link_graph(link, connection, depth - 1)
|
|
except NoMoreProxiesException as e:
|
|
logger.exception("All proxies are blocked")
|
|
# Wikipedia blocked all our proxies.
|
|
# Retry later, so we have to store our list that is still to fetch.
|
|
|
|
cursor = connection.cursor()
|
|
failed_page = get_page_id(link, connection)
|
|
cursor.execute("INSERT INTO failed_to_fetch(page, depth) VALUES(%s, %s)", (failed_page, depth - 1))
|
|
connection.commit()
|
|
|
|
|
|
|