2019-02-02 10:18:57 +00:00
|
|
|
import logging
|
2019-02-25 12:07:52 +00:00
|
|
|
from collections import deque
|
|
|
|
|
|
|
|
from cfg import config
|
2019-02-02 10:18:57 +00:00
|
|
|
from url import construct_url
|
2019-02-02 15:06:57 +00:00
|
|
|
from proxy import get_data_with_proxy, NoMoreProxiesException
|
2019-02-25 12:07:52 +00:00
|
|
|
from db_util import get_page_id, get_page_title
|
2019-02-25 13:14:55 +00:00
|
|
|
import sql
|
2019-02-02 10:18:57 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2019-02-15 11:46:32 +00:00
|
|
|
def ignore_title(title):
|
|
|
|
ignore_starters = ["Help:"
|
|
|
|
, "Wikipedia:"
|
|
|
|
, "Template:"
|
|
|
|
, "Template_talk:"
|
|
|
|
, "Category:"
|
|
|
|
]
|
|
|
|
for ignore in ignore_starters:
|
|
|
|
if(title.startswith(ignore)):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-02-25 13:43:15 +00:00
|
|
|
def _receive_links(page, connection, lang="en"):
|
2019-02-25 12:07:52 +00:00
|
|
|
title = get_page_title(page, connection)
|
2019-02-25 13:43:15 +00:00
|
|
|
url = construct_url(title, lang=lang)
|
2019-02-02 10:18:57 +00:00
|
|
|
|
|
|
|
result = get_data_with_proxy(url, connection)
|
2019-02-25 12:07:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-02-02 10:18:57 +00:00
|
|
|
# This is basically because we don't know the page ID.
|
|
|
|
for k, page_data in result["query"]["pages"].items():
|
|
|
|
cursor = connection.cursor()
|
|
|
|
if(not "links" in page_data):
|
|
|
|
destination_title = page_data["title"].replace(" ", "_")
|
|
|
|
# avoid 1-loops
|
|
|
|
if(destination_title == title):
|
2019-02-15 11:46:32 +00:00
|
|
|
continue
|
2019-02-25 12:07:52 +00:00
|
|
|
if(ignore_title(destination_title)):
|
2019-02-15 11:46:32 +00:00
|
|
|
continue
|
2019-02-19 13:16:22 +00:00
|
|
|
destination = get_page_id(destination_title, connection)
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["insert_link"], (page, destination))
|
2019-02-25 12:07:52 +00:00
|
|
|
yield destination
|
2019-02-02 10:18:57 +00:00
|
|
|
|
|
|
|
else:
|
|
|
|
for destination in page_data["links"]:
|
|
|
|
destination_title = destination["title"].replace(" ", "_")
|
2019-02-25 12:07:52 +00:00
|
|
|
if(ignore_title(destination_title)):
|
|
|
|
continue
|
2019-02-19 13:16:22 +00:00
|
|
|
destination = get_page_id(destination_title, connection)
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["insert_link"], (page, destination))
|
2019-02-25 12:07:52 +00:00
|
|
|
yield destination
|
2019-02-02 10:18:57 +00:00
|
|
|
connection.commit()
|
|
|
|
|
2019-02-25 13:43:15 +00:00
|
|
|
def receive_links(title, connection, lang="en"):
|
|
|
|
return list(_receive_links(title, connection, lang=lang))
|
2019-02-02 10:18:57 +00:00
|
|
|
|
|
|
|
|
2019-02-25 13:43:15 +00:00
|
|
|
def receive_link_graph(title, connection, depth, lang="en"):
|
2019-02-25 12:07:52 +00:00
|
|
|
page = get_page_id(title, connection)
|
2019-02-25 13:43:15 +00:00
|
|
|
do_receive_link_graph(page, connection, depth, fetch_missing=True, lang=lang)
|
2019-02-25 12:07:52 +00:00
|
|
|
|
|
|
|
cursor = connection.cursor()
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["count_failed_to_fetch"])
|
2019-02-25 12:07:52 +00:00
|
|
|
if(cursor.fetchone()[0]):
|
2019-02-25 13:43:15 +00:00
|
|
|
do_receive_link_graph(page, connection, depth, fetch_missing=True, lang=lang)
|
2019-02-25 12:07:52 +00:00
|
|
|
|
2019-02-25 13:43:15 +00:00
|
|
|
def do_receive_link_graph(page, connection, depth, fetch_missing=False, lang="en"):
|
2019-02-25 12:07:52 +00:00
|
|
|
if(depth < 0):
|
|
|
|
# end of recursion
|
|
|
|
return
|
|
|
|
logger.info("do_receive_link_graph(%d, <connection>, %d)" % (page, depth))
|
2019-02-19 13:16:22 +00:00
|
|
|
cursor = connection.cursor()
|
|
|
|
|
|
|
|
# Fetch the missing links.
|
|
|
|
if(fetch_missing):
|
|
|
|
delete_cursor = connection.cursor()
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["get_failed_to_fetch"])
|
2019-02-25 12:07:52 +00:00
|
|
|
for d, p in cursor:
|
2019-02-25 13:43:15 +00:00
|
|
|
do_receive_link_graph(p, connection, d, fetch_missing=False, lang=lang)
|
2019-02-25 13:14:55 +00:00
|
|
|
delete_cursor.execute(sql.statements["delete_failed_to_fetch"], (p,))
|
2019-02-19 13:16:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2019-02-02 10:18:57 +00:00
|
|
|
cursor = connection.cursor()
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["count_links_from"], (page,))
|
2019-02-02 10:18:57 +00:00
|
|
|
if(cursor.fetchone()[0] != 0):
|
|
|
|
# we fetched that title already
|
|
|
|
return
|
|
|
|
|
2019-02-25 12:07:52 +00:00
|
|
|
logger.info("fetching links for {}".format(page))
|
2019-02-02 10:18:57 +00:00
|
|
|
|
2019-02-25 12:07:52 +00:00
|
|
|
for link in _receive_links(page, connection):
|
2019-02-02 15:06:57 +00:00
|
|
|
try:
|
2019-02-25 13:43:15 +00:00
|
|
|
do_receive_link_graph(link, connection, depth - 1, lang=lang)
|
2019-02-02 15:06:57 +00:00
|
|
|
except NoMoreProxiesException as e:
|
|
|
|
logger.exception("All proxies are blocked")
|
|
|
|
# Wikipedia blocked all our proxies.
|
|
|
|
# Retry later, so we have to store our list that is still to fetch.
|
|
|
|
|
|
|
|
cursor = connection.cursor()
|
2019-02-25 13:14:55 +00:00
|
|
|
cursor.execute(sql.statements["insert_failed_to_fetch"], (link, depth - 1))
|
2019-02-02 15:06:57 +00:00
|
|
|
connection.commit()
|
|
|
|
|
2019-02-02 10:18:57 +00:00
|
|
|
|
|
|
|
|