1from bs4 import BeautifulSoup
2import requests
3
4response = requests.get('url')
5all_links = response.find_all('a') # this will return all links+text
1from bs4 import BeautifulSoup
2import requests
3
4response = requests.get('url')
5all_links = response.find_all('a') # this will return all links+text
6for link in all_links:
7 print(link.get_text()) # this will prints all text
8 print(link.get('href')) # this will print all links
1import requests
2from bs4 import BeautifulSoup as bs
3
4github_avatar = input('Input git user: ')
5url = 'https://github.com/'+ github_avatar
6r = requests.get(url)
7soup = bs(r.text, 'html.parser')
8profile_image = soup.find('img', {'alt' : 'Avatar'})['src']
9# print(url)
10print(profile_image)