|
| 1 | +import pandas as pd |
| 2 | +import requests |
| 3 | +import urllib.request |
| 4 | +from bs4 import BeautifulSoup |
| 5 | +import wikipediaapi |
| 6 | + |
| 7 | + |
| 8 | + |
| 9 | +class myScraper: |
| 10 | + |
| 11 | + #Initializing the Constructor |
| 12 | + def __init__(self, player): |
| 13 | + |
| 14 | + self.wiki_lang = wikipediaapi.Wikipedia('en',extract_format=wikipediaapi.ExtractFormat.HTML) |
| 15 | + self.wiki_page = self.wiki_lang.page(player) |
| 16 | + self.page_html_text = self.wiki_page.text |
| 17 | + self.soup = BeautifulSoup(self.page_html_text, "lxml") |
| 18 | + self.player = player |
| 19 | + |
| 20 | + def get_club_details(self,sections, level=0): |
| 21 | + for s in sections: |
| 22 | + if 'Club career' in s.title: |
| 23 | + print(s.title) |
| 24 | + for s in s.sections: |
| 25 | + level=level + 1 |
| 26 | + print(s.title) |
| 27 | + if(s.sections is None): |
| 28 | + return |
| 29 | + else: |
| 30 | + for s in s.sections: |
| 31 | + level = level+1 |
| 32 | + print(s.title) |
| 33 | + |
| 34 | + def execute(self): |
| 35 | + self.get_club_details(self.wiki_page.sections, level=0) |
| 36 | + |
| 37 | + |
| 38 | + |
| 39 | +# def print_sections(sections, level=0): |
| 40 | +# for s in sections: |
| 41 | +# if 'Club career' in s.title: |
| 42 | +# print(s.title) |
| 43 | +# #print("%s: %s - %s" % ("*" * (level + 1), s.title, s.text[0:100])) |
| 44 | +# for s in s.sections: |
| 45 | +# level=level + 1 |
| 46 | +# print(s.title) |
| 47 | +# if(s.sections is None): |
| 48 | +# return |
| 49 | +# else: |
| 50 | +# for s in s.sections: |
| 51 | +# level = level+1 |
| 52 | +# print(s.title) |
| 53 | + |
| 54 | + #break |
| 55 | +# print_sections(wiki_page.sections) |
| 56 | + |
| 57 | +def main(): |
| 58 | + player = input("Please Enter the player Info") |
| 59 | + my_scraper_obj = myScraper(player) |
| 60 | + my_scraper_obj.execute() |
| 61 | + |
| 62 | +if __name__ == '__main__': |
| 63 | + main() |
| 64 | + |
0 commit comments