added multi pages configuration and code
This commit is contained in:
parent
7aa903883b
commit
8b20bc178f
2 changed files with 183 additions and 133 deletions
|
@ -7,14 +7,13 @@
|
||||||
foerderinfo.bund.de:
|
foerderinfo.bund.de:
|
||||||
domain: 'http://foerderinfo.bund.de'
|
domain: 'http://foerderinfo.bund.de'
|
||||||
entry-list:
|
entry-list:
|
||||||
link1: 'https://www.foerderinfo.bund.de/SiteGlobals/Forms/foerderinfo/expertensuche/Servicesuche_Formular.html#searchResults'
|
link1: 'https://www.foerderinfo.bund.de/SiteGlobals/Forms/foerderinfo/expertensuche/Servicesuche_Formular.html?gtp=33498_list%253D'
|
||||||
link2: 'https://www.foerderinfo.bund.de/SiteGlobals/Forms/foerderinfo/expertensuche/Servicesuche_Formular.html?gtp=33498_list%253D2#searchResults'
|
link2: '#searchResults'
|
||||||
link3: 'https://www.foerderinfo.bund.de/SiteGlobals/Forms/foerderinfo/expertensuche/Servicesuche_Formular.html?gtp=33498_list%253D3#searchResults'
|
iteration-var-list: [1,2,3,4,5,6,7,8]
|
||||||
iteration-var-list: [1,1
|
|
||||||
parent: '//html//body//form//table//tr//td//table//tr'
|
parent: '//html//body//form//table//tr//td//table//tr'
|
||||||
child-name: '//td//a/text()'
|
child-name: '//td//a/text()'
|
||||||
child-link: '//td//a/@href'
|
child-link: '//td//a/@href'
|
||||||
member:
|
entry:
|
||||||
info-1:
|
info-1:
|
||||||
parent: '//html//body//form//table'
|
parent: '//html//body//form//table'
|
||||||
#child-name: '//html//body//form//table//tr[1]//td[2]//span'
|
#child-name: '//html//body//form//table//tr[1]//td[2]//span'
|
||||||
|
|
|
@ -34,153 +34,204 @@ class fdb_spider(object):
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
entry_list_link = entry_list.get("link")
|
entry_list_link1 = entry_list.get("link1")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(
|
print(
|
||||||
"No entryListLink defined in config.yaml - the original error message is:",
|
"No link1 defined in config.yaml - the original error message is:",
|
||||||
e,
|
e,
|
||||||
)
|
)
|
||||||
|
|
||||||
# download the html page of the List of entrys
|
try:
|
||||||
|
entry_list_link2 = entry_list.get("link2")
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
"No link2 defined in config.yaml - the original error message is:",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
response = urllib.request.urlopen(entry_list_link)
|
try:
|
||||||
|
entry_iteration_var_list = eval(entry_list.get("iteration-var-list"))
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
"No iteration-var-list defined in config.yaml - the original error message is:",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in entry_iteration_var_list:
|
||||||
|
|
||||||
|
# download the html page of the List of entrys
|
||||||
|
|
||||||
|
response = urllib.request.urlopen(entry_list_link1 + string(i) + entry_list_link2)
|
||||||
|
web_content = response.read().decode("UTF-8")
|
||||||
|
|
||||||
|
# save interim results to files
|
||||||
|
|
||||||
|
f = open("spiders/pages/" + key + str(i) + "entryList.html", "w+")
|
||||||
|
f.write(webContent)
|
||||||
|
f.close
|
||||||
|
|
||||||
|
def parse_entry_list_data2dictionary(self, list_of_fdbs):
|
||||||
|
for fdb in list_of_fdbs:
|
||||||
|
|
||||||
|
try:
|
||||||
|
iteration_var_list = eval(self.config.get(fdb).get("entry-list").get("iteration-var-list"))
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
"There is a problem with the configuration variable entryList iteration var list in the config.yaml - the original error message is:",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in iteration_var_list:
|
||||||
|
try:
|
||||||
|
# use soupparser to handle broken html
|
||||||
|
|
||||||
|
tree = lxml.html.soupparser.parse(
|
||||||
|
"spiders/pages/" + fdb + str(i) + "entryList.html"
|
||||||
|
)
|
||||||
|
|
||||||
|
# for e in tree.iter():
|
||||||
|
#
|
||||||
|
# print(e.tag)
|
||||||
|
#
|
||||||
|
# for e in tree.xpath('//html//body//form//table//tr//td//table//tr'):
|
||||||
|
#
|
||||||
|
# #print(etree.tostring(e).decode())
|
||||||
|
|
||||||
|
dictionary_entry_list = {}
|
||||||
|
|
||||||
|
fdb_conf = self.config.get(fdb)
|
||||||
|
fdb_domain = fdb_conf.get("domain")
|
||||||
|
fdb_conf_entry_list = fdb_conf.get("entry-list")
|
||||||
|
fdb_conf_entry_list_parent = fdb_conf_entry_list.get("parent")
|
||||||
|
fdb_conf_entry_list_child_name = fdb_conf_entry_list.get("child-name")
|
||||||
|
fdb_conf_entry_list_child_link = fdb_conf_entry_list.get("child-link")
|
||||||
|
|
||||||
|
for n in range(len(tree.xpath(fdb_conf_entry_list_parent))):
|
||||||
|
name = tree.xpath(
|
||||||
|
fdb_conf_entry_list_parent
|
||||||
|
+ "["
|
||||||
|
+ str(n)
|
||||||
|
+ "]"
|
||||||
|
+ fdb_conf_entry_list_child_name
|
||||||
|
)
|
||||||
|
link = tree.xpath(
|
||||||
|
fdb_conf_entry_list_parent
|
||||||
|
+ "["
|
||||||
|
+ str(n)
|
||||||
|
+ "]"
|
||||||
|
+ fdb_conf_entry_list_child_link
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(name) > 0:
|
||||||
|
dictionary_entry_list[n] = {}
|
||||||
|
dictionary_entry_list[n]["name"] = name[0]
|
||||||
|
|
||||||
|
if fdb_domain in link[0]:
|
||||||
|
dictionary_entry_list[n]["link"] = link[0]
|
||||||
|
|
||||||
|
if fdb_domain not in link[0]:
|
||||||
|
dictionary_entry_list[n]["link"] = fdb_domain + link[0]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
"parsing the html did not work. Possibly you first have to run download_link_list_pages_of_funding_databases(). The original error message is:",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
|
# save interim results to files
|
||||||
|
|
||||||
|
f = open("spiders/output/" + fdb + str(i) + "entryList.txt", "w+")
|
||||||
|
f.write(str(dictionary_entry_list))
|
||||||
|
f.close
|
||||||
|
|
||||||
|
def download_entry_data_htmls(self, list_of_fdbs):
|
||||||
|
for fdb in list_of_fdbs:
|
||||||
|
|
||||||
|
try:
|
||||||
|
iteration_var_list = eval(self.config.get(fdb).get("entry-list").get("iteration-var-list"))
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
"There is a problem with the configuration variable entryList iteration var list in the config.yaml - the original error message is:",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
|
for i in iteration_var_list:
|
||||||
|
|
||||||
|
|
||||||
|
f = open("spiders/output/" + fdb + str(i) + "entryList.txt")
|
||||||
|
text = f.read()
|
||||||
|
|
||||||
|
dictionary_entry_list = eval(text)
|
||||||
|
|
||||||
|
for entry_id in dictionary_entry_list:
|
||||||
|
entry_link = dictionary_entry_list[entry_id]["link"]
|
||||||
|
|
||||||
|
# download the html page of the entry
|
||||||
|
|
||||||
|
response = urllib.request.urlopen(entry_link)
|
||||||
web_content = response.read().decode("UTF-8")
|
web_content = response.read().decode("UTF-8")
|
||||||
|
|
||||||
# save interim results to files
|
# save interim results to files
|
||||||
|
|
||||||
f = open("spiders/pages/" + key + "entryList.html", "w+")
|
file_name = "spiders/pages/" + fdb + str(i) + "/" + str(entry_id) + ".html"
|
||||||
f.write(webContent)
|
|
||||||
|
os.makedirs(os.path.dirname(file_name), exist_ok=True)
|
||||||
|
f = open(file_name, "w+")
|
||||||
|
f.write(web_content)
|
||||||
f.close
|
f.close
|
||||||
|
|
||||||
def parse_entry_list_data2dictionary(self, list_of_fdbs):
|
|
||||||
for fdb in list_of_fdbs:
|
|
||||||
try:
|
|
||||||
# use soupparser to handle broken html
|
|
||||||
|
|
||||||
tree = lxml.html.soupparser.parse(
|
|
||||||
"spiders/pages/" + fdb + "entryList.html"
|
|
||||||
)
|
|
||||||
|
|
||||||
# for e in tree.iter():
|
|
||||||
#
|
|
||||||
# print(e.tag)
|
|
||||||
#
|
|
||||||
# for e in tree.xpath('//html//body//form//table//tr//td//table//tr'):
|
|
||||||
#
|
|
||||||
# #print(etree.tostring(e).decode())
|
|
||||||
|
|
||||||
dictionary_entry_list = {}
|
|
||||||
|
|
||||||
fdb_conf = self.config.get(fdb)
|
|
||||||
fdb_domain = fdb_conf.get("domain")
|
|
||||||
fdb_conf_entry_list = fdb_conf.get("entryList")
|
|
||||||
fdb_conf_entry_list_parent = fdb_conf_entry_list.get("parent")
|
|
||||||
fdb_conf_entry_list_child_name = fdb_conf_entry_list.get("child-name")
|
|
||||||
fdb_conf_entry_list_child_link = fdb_conf_entry_list.get("child-link")
|
|
||||||
|
|
||||||
for n in range(len(tree.xpath(fdb_conf_entry_list_parent))):
|
|
||||||
name = tree.xpath(
|
|
||||||
fdb_conf_entry_list_parent
|
|
||||||
+ "["
|
|
||||||
+ str(n)
|
|
||||||
+ "]"
|
|
||||||
+ fdb_conf_entry_list_child_name
|
|
||||||
)
|
|
||||||
link = tree.xpath(
|
|
||||||
fdb_conf_entry_list_parent
|
|
||||||
+ "["
|
|
||||||
+ str(n)
|
|
||||||
+ "]"
|
|
||||||
+ fdb_conf_entry_list_child_link
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(name) > 0:
|
|
||||||
dictionary_entry_list[n] = {}
|
|
||||||
dictionary_entry_list[n]["name"] = name[0]
|
|
||||||
|
|
||||||
if fdb_domain in link[0]:
|
|
||||||
dictionary_entry_list[n]["link"] = link[0]
|
|
||||||
|
|
||||||
if fdb_domain not in link[0]:
|
|
||||||
dictionary_entry_list[n]["link"] = fdb_domain + link[0]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(
|
|
||||||
"parsing the html did not work. Possibly you first have to run download_link_list_pages_of_funding_databases(). The original error message is:",
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
|
|
||||||
# save interim results to files
|
|
||||||
|
|
||||||
f = open("spiders/output/" + fdb + "entryList.txt", "w+")
|
|
||||||
f.write(str(dictionary_entry_list))
|
|
||||||
f.close
|
|
||||||
|
|
||||||
def download_entry_data_htmls(self, list_of_fdbs):
|
|
||||||
for fdb in list_of_fdbs:
|
|
||||||
f = open("spiders/output/" + fdb + "entryList.txt")
|
|
||||||
text = f.read()
|
|
||||||
|
|
||||||
dictionary_entry_list = eval(text)
|
|
||||||
|
|
||||||
for entry_id in dictionary_entry_list:
|
|
||||||
entry_link = dictionary_entry_list[entry_id]["link"]
|
|
||||||
|
|
||||||
# download the html page of the entry
|
|
||||||
|
|
||||||
response = urllib.request.urlopen(entry_link)
|
|
||||||
web_content = response.read().decode("UTF-8")
|
|
||||||
|
|
||||||
# save interim results to files
|
|
||||||
|
|
||||||
file_name = "spiders/pages/" + fdb + "/" + str(entry_id) + ".html"
|
|
||||||
|
|
||||||
os.makedirs(os.path.dirname(file_name), exist_ok=True)
|
|
||||||
f = open(file_name, "w+")
|
|
||||||
f.write(web_content)
|
|
||||||
f.close
|
|
||||||
|
|
||||||
def parse_entry_data2dictionary(self, list_of_fdbs):
|
def parse_entry_data2dictionary(self, list_of_fdbs):
|
||||||
for fdb in list_of_fdbs:
|
for fdb in list_of_fdbs:
|
||||||
print("started to parse data of entry of " + fdb + " ..")
|
|
||||||
|
try:
|
||||||
f = open("spiders/output/" + fdb + "entryList.txt")
|
iteration_var_list = eval(self.config.get(fdb).get("entry-list").get("iteration-var-list"))
|
||||||
text = f.read()
|
except Exception as e:
|
||||||
|
|
||||||
dictionary_entry_list = eval(text)
|
|
||||||
|
|
||||||
fdb_conf = self.config.get(fdb)
|
|
||||||
fdb_domain = fdb_conf.get("domain")
|
|
||||||
fdb_conf_entry = fdb_conf.get("entry")
|
|
||||||
fdb_conf_entry_info1 = fdb_conf_entry.get("info-1")
|
|
||||||
fdb_conf_entry_info1_parent = fdb_conf_entry_info1.get("parent")
|
|
||||||
fdb_conf_entry_info1_child_1 = fdb_conf_entry_info1.get(
|
|
||||||
"child-1"
|
|
||||||
)
|
|
||||||
|
|
||||||
for entry_id in dictionary_entry_list:
|
|
||||||
print(
|
print(
|
||||||
"started to parse data of entry with name "
|
"There is a problem with the configuration variable entryList iteration var list in the config.yaml - the original error message is:",
|
||||||
+ dictionary_entry_list[entry_id]["name"]
|
e,
|
||||||
+ " .."
|
)
|
||||||
|
|
||||||
|
for i in iteration_var_list:
|
||||||
|
|
||||||
|
print("started to parse data of entry of " + fdb + " ..")
|
||||||
|
|
||||||
|
f = open("spiders/output/" + fdb + str(i) + "entryList.txt")
|
||||||
|
text = f.read()
|
||||||
|
|
||||||
|
dictionary_entry_list = eval(text)
|
||||||
|
|
||||||
|
fdb_conf = self.config.get(fdb)
|
||||||
|
fdb_domain = fdb_conf.get("domain")
|
||||||
|
fdb_conf_entry = fdb_conf.get("entry")
|
||||||
|
fdb_conf_entry_info1 = fdb_conf_entry.get("info-1")
|
||||||
|
fdb_conf_entry_info1_parent = fdb_conf_entry_info1.get("parent")
|
||||||
|
fdb_conf_entry_info1_child_1 = fdb_conf_entry_info1.get(
|
||||||
|
"child-1"
|
||||||
)
|
)
|
||||||
|
|
||||||
file_name = "spiders/pages/" + fdb + "/" + str(entry_id) + ".html"
|
for entry_id in dictionary_entry_list:
|
||||||
|
print(
|
||||||
|
"started to parse data of entry with name "
|
||||||
|
+ dictionary_entry_list[entry_id]["name"]
|
||||||
|
+ " .."
|
||||||
|
)
|
||||||
|
|
||||||
tree = lxml.html.soupparser.parse(file_name)
|
file_name = "spiders/pages/" + fdb + str(i) + "/" + str(entry_id) + ".html"
|
||||||
|
|
||||||
child_1 = tree.xpath(
|
tree = lxml.html.soupparser.parse(file_name)
|
||||||
fdb_conf_entry_info1_parent
|
|
||||||
+ fdb_conf_entry_info1_child_1
|
|
||||||
)
|
|
||||||
|
|
||||||
print("oi", child_1)
|
child_1 = tree.xpath(
|
||||||
|
fdb_conf_entry_info1_parent
|
||||||
|
+ fdb_conf_entry_info1_child_1
|
||||||
|
)
|
||||||
|
|
||||||
if len(child_1) > 0:
|
print("oi", child_1)
|
||||||
dictionary_entry_list[entry_id]["child_1"] = child_1[
|
|
||||||
0
|
|
||||||
]
|
|
||||||
|
|
||||||
f = open("spiders/output/" + fdb + "entryList.txt", "w+")
|
if len(child_1) > 0:
|
||||||
f.write(str(dictionary_entry_list))
|
dictionary_entry_list[entry_id]["child_1"] = child_1[
|
||||||
f.close
|
0
|
||||||
|
]
|
||||||
|
|
||||||
|
f = open("spiders/output/" + fdb + str(i) + "entryList.txt", "w+")
|
||||||
|
f.write(str(dictionary_entry_list))
|
||||||
|
f.close
|
||||||
|
|
Loading…
Reference in a new issue