whosyouragent.whosyouragent
1import json 2import random 3from concurrent.futures import ThreadPoolExecutor 4from pathlib import Path 5 6import requests 7from bs4 import BeautifulSoup 8 9 10class VersionUpdater: 11 def __init__(self): 12 self.versions_path = Path(__file__).parent / "browserVersions.json" 13 if not self.versions_path.exists(): 14 self.versions_path.write_text( 15 json.dumps( 16 { 17 "Firefox": "119.0.1", 18 "Chrome": "109.0.5414.165", 19 "Edg": "119.0.2151.58", 20 "Vivaldi": "6.4.3160.44", 21 "OPR": "105.0.4970.13", 22 "Safari": "17.0", 23 } 24 ) 25 ) 26 27 def update_firefox(self): 28 try: 29 url = "https://www.mozilla.org/en-US/firefox/releases/" 30 soup = BeautifulSoup(requests.get(url).text, "html.parser") 31 release_list = soup.find("ol", class_="c-release-list") 32 version = release_list.ol.li.a.text 33 self.firefox = version 34 except Exception as e: 35 self.firefox = None 36 37 def update_chrome(self): 38 try: 39 url = "https://en.wikipedia.org/wiki/Google_Chrome" 40 soup = BeautifulSoup(requests.get(url).text, "html.parser") 41 info_boxes = soup.find_all("td", class_="infobox-data") 42 version = info_boxes[8].text[ 43 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 44 ] 45 self.chrome = version 46 except Exception as e: 47 self.chrome = None 48 49 def update_safari(self): 50 try: 51 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 52 soup = BeautifulSoup(requests.get(url).text, "html.parser") 53 info_boxes = soup.find_all("td", class_="infobox-data") 54 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 55 self.safari = version 56 except Exception as e: 57 self.safari = None 58 59 def update_edge(self): 60 try: 61 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 62 soup = BeautifulSoup(requests.get(url).text, "html.parser") 63 version = soup.find("div", class_="subver").text 64 self.edge = version 65 except Exception as e: 66 self.edge = None 67 68 def update_vivaldi(self): 69 try: 70 url = "https://vivaldi.com/blog/" 71 soup = BeautifulSoup(requests.get(url).text, "html.parser") 72 text = soup.find("div", class_="download-vivaldi-sidebar").text 73 text = text.split(" - ")[1] 74 text = text.replace(" (", ".") 75 version = text[: text.find(")")] 76 self.vivaldi = version 77 except Exception as e: 78 self.vivaldi = None 79 80 def update_opera(self): 81 try: 82 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 83 soup = BeautifulSoup(requests.get(url).text, "html.parser") 84 info_boxes = soup.find_all("td", class_="infobox-data") 85 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 86 self.opera = version 87 except Exception as e: 88 self.opera = None 89 90 def update_all(self): 91 updaters = [ 92 self.update_firefox, 93 self.update_chrome, 94 self.update_safari, 95 self.update_edge, 96 self.update_vivaldi, 97 self.update_opera, 98 ] 99 with ThreadPoolExecutor() as executor: 100 for updater in updaters: 101 executor.submit(updater) 102 versions = { 103 "Firefox": self.firefox, 104 "Chrome": self.chrome, 105 "Edg": self.edge, 106 "Vivaldi": self.vivaldi, 107 "OPR": self.opera, 108 "Safari": self.safari, 109 } 110 # Remove any keys that failed to update and keep previous version number 111 poppers = [ 112 version 113 for version in versions 114 if version and not ((versions[version]).replace(".", "")).isnumeric() 115 ] 116 for popper in poppers: 117 versions.pop(popper) 118 previous_versions = json.loads(self.versions_path.read_text()) 119 versions = previous_versions | versions 120 self.versions_path.write_text(json.dumps(versions)) 121 122 123platforms = [ 124 "(Windows NT 10.0; Win64; x64)", 125 "(x11; Ubuntu; Linux x86_64)", 126 "(Windows NT 11.0; Win64; x64)", 127 "(Macintosh; Intel Mac OS X 13_0_0)", 128] 129 130 131def randomize_version_number(version: str) -> str: 132 """Randomize a version number so that it's in between 133 the previous major version and the current one.""" 134 parts = [int(part) for part in version.split(".")] 135 parts[0] = random.randint(parts[0] - 1, parts[0]) 136 for i, part in enumerate(parts[1:]): 137 parts[i + 1] = random.randint(0, part) 138 return ".".join(str(part) for part in parts) 139 140 141def get_agent(as_dict: bool = False) -> str: 142 """Build and return a user agent string. 143 144 :param as_dict: If True, return {"User-Agent": useragent} instead of just the useragent string. 145 Note: Leaving this parameter in place to maintain backwards compatibility, 146 but it's advised to use the `get_header()` function instead.""" 147 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 148 for browser in browsers: 149 browsers[browser] = randomize_version_number(browsers[browser]) 150 browser = random.choice(list(browsers.keys())) 151 if browser == "Safari": 152 platform = platforms[-1] 153 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 154 else: 155 platform = random.choice(platforms) 156 if browser == "Firefox": 157 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 158 useragent = ( 159 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 160 ) 161 else: 162 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 163 if browser == "Edg": 164 useragent += f' Edg/{browsers["Edg"]}' 165 elif browser == "OPR": 166 useragent += f' OPR/{browsers["OPR"]}' 167 elif browser == "Vivaldi": 168 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 169 if as_dict: 170 return {"User-Agent": useragent} 171 else: 172 return useragent 173 174 175def get_header() -> dict[str, str]: 176 """Returns a dictionary `{'User-Agent': <random user agent string>}` for convenience. 177 >>> response = requests.get(url, headers=get_header())""" 178 return {"User-Agent": get_agent()}
class
VersionUpdater:
11class VersionUpdater: 12 def __init__(self): 13 self.versions_path = Path(__file__).parent / "browserVersions.json" 14 if not self.versions_path.exists(): 15 self.versions_path.write_text( 16 json.dumps( 17 { 18 "Firefox": "119.0.1", 19 "Chrome": "109.0.5414.165", 20 "Edg": "119.0.2151.58", 21 "Vivaldi": "6.4.3160.44", 22 "OPR": "105.0.4970.13", 23 "Safari": "17.0", 24 } 25 ) 26 ) 27 28 def update_firefox(self): 29 try: 30 url = "https://www.mozilla.org/en-US/firefox/releases/" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 release_list = soup.find("ol", class_="c-release-list") 33 version = release_list.ol.li.a.text 34 self.firefox = version 35 except Exception as e: 36 self.firefox = None 37 38 def update_chrome(self): 39 try: 40 url = "https://en.wikipedia.org/wiki/Google_Chrome" 41 soup = BeautifulSoup(requests.get(url).text, "html.parser") 42 info_boxes = soup.find_all("td", class_="infobox-data") 43 version = info_boxes[8].text[ 44 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 45 ] 46 self.chrome = version 47 except Exception as e: 48 self.chrome = None 49 50 def update_safari(self): 51 try: 52 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 53 soup = BeautifulSoup(requests.get(url).text, "html.parser") 54 info_boxes = soup.find_all("td", class_="infobox-data") 55 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 56 self.safari = version 57 except Exception as e: 58 self.safari = None 59 60 def update_edge(self): 61 try: 62 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 63 soup = BeautifulSoup(requests.get(url).text, "html.parser") 64 version = soup.find("div", class_="subver").text 65 self.edge = version 66 except Exception as e: 67 self.edge = None 68 69 def update_vivaldi(self): 70 try: 71 url = "https://vivaldi.com/blog/" 72 soup = BeautifulSoup(requests.get(url).text, "html.parser") 73 text = soup.find("div", class_="download-vivaldi-sidebar").text 74 text = text.split(" - ")[1] 75 text = text.replace(" (", ".") 76 version = text[: text.find(")")] 77 self.vivaldi = version 78 except Exception as e: 79 self.vivaldi = None 80 81 def update_opera(self): 82 try: 83 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 84 soup = BeautifulSoup(requests.get(url).text, "html.parser") 85 info_boxes = soup.find_all("td", class_="infobox-data") 86 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 87 self.opera = version 88 except Exception as e: 89 self.opera = None 90 91 def update_all(self): 92 updaters = [ 93 self.update_firefox, 94 self.update_chrome, 95 self.update_safari, 96 self.update_edge, 97 self.update_vivaldi, 98 self.update_opera, 99 ] 100 with ThreadPoolExecutor() as executor: 101 for updater in updaters: 102 executor.submit(updater) 103 versions = { 104 "Firefox": self.firefox, 105 "Chrome": self.chrome, 106 "Edg": self.edge, 107 "Vivaldi": self.vivaldi, 108 "OPR": self.opera, 109 "Safari": self.safari, 110 } 111 # Remove any keys that failed to update and keep previous version number 112 poppers = [ 113 version 114 for version in versions 115 if version and not ((versions[version]).replace(".", "")).isnumeric() 116 ] 117 for popper in poppers: 118 versions.pop(popper) 119 previous_versions = json.loads(self.versions_path.read_text()) 120 versions = previous_versions | versions 121 self.versions_path.write_text(json.dumps(versions))
VersionUpdater()
12 def __init__(self): 13 self.versions_path = Path(__file__).parent / "browserVersions.json" 14 if not self.versions_path.exists(): 15 self.versions_path.write_text( 16 json.dumps( 17 { 18 "Firefox": "119.0.1", 19 "Chrome": "109.0.5414.165", 20 "Edg": "119.0.2151.58", 21 "Vivaldi": "6.4.3160.44", 22 "OPR": "105.0.4970.13", 23 "Safari": "17.0", 24 } 25 ) 26 )
def
update_firefox(self):
28 def update_firefox(self): 29 try: 30 url = "https://www.mozilla.org/en-US/firefox/releases/" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 release_list = soup.find("ol", class_="c-release-list") 33 version = release_list.ol.li.a.text 34 self.firefox = version 35 except Exception as e: 36 self.firefox = None
def
update_chrome(self):
38 def update_chrome(self): 39 try: 40 url = "https://en.wikipedia.org/wiki/Google_Chrome" 41 soup = BeautifulSoup(requests.get(url).text, "html.parser") 42 info_boxes = soup.find_all("td", class_="infobox-data") 43 version = info_boxes[8].text[ 44 : min([info_boxes[8].text.find("["), info_boxes[8].text.find("/")]) 45 ] 46 self.chrome = version 47 except Exception as e: 48 self.chrome = None
def
update_safari(self):
50 def update_safari(self): 51 try: 52 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 53 soup = BeautifulSoup(requests.get(url).text, "html.parser") 54 info_boxes = soup.find_all("td", class_="infobox-data") 55 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 56 self.safari = version 57 except Exception as e: 58 self.safari = None
def
update_vivaldi(self):
69 def update_vivaldi(self): 70 try: 71 url = "https://vivaldi.com/blog/" 72 soup = BeautifulSoup(requests.get(url).text, "html.parser") 73 text = soup.find("div", class_="download-vivaldi-sidebar").text 74 text = text.split(" - ")[1] 75 text = text.replace(" (", ".") 76 version = text[: text.find(")")] 77 self.vivaldi = version 78 except Exception as e: 79 self.vivaldi = None
def
update_opera(self):
81 def update_opera(self): 82 try: 83 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 84 soup = BeautifulSoup(requests.get(url).text, "html.parser") 85 info_boxes = soup.find_all("td", class_="infobox-data") 86 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 87 self.opera = version 88 except Exception as e: 89 self.opera = None
def
update_all(self):
91 def update_all(self): 92 updaters = [ 93 self.update_firefox, 94 self.update_chrome, 95 self.update_safari, 96 self.update_edge, 97 self.update_vivaldi, 98 self.update_opera, 99 ] 100 with ThreadPoolExecutor() as executor: 101 for updater in updaters: 102 executor.submit(updater) 103 versions = { 104 "Firefox": self.firefox, 105 "Chrome": self.chrome, 106 "Edg": self.edge, 107 "Vivaldi": self.vivaldi, 108 "OPR": self.opera, 109 "Safari": self.safari, 110 } 111 # Remove any keys that failed to update and keep previous version number 112 poppers = [ 113 version 114 for version in versions 115 if version and not ((versions[version]).replace(".", "")).isnumeric() 116 ] 117 for popper in poppers: 118 versions.pop(popper) 119 previous_versions = json.loads(self.versions_path.read_text()) 120 versions = previous_versions | versions 121 self.versions_path.write_text(json.dumps(versions))
def
randomize_version_number(version: str) -> str:
132def randomize_version_number(version: str) -> str: 133 """Randomize a version number so that it's in between 134 the previous major version and the current one.""" 135 parts = [int(part) for part in version.split(".")] 136 parts[0] = random.randint(parts[0] - 1, parts[0]) 137 for i, part in enumerate(parts[1:]): 138 parts[i + 1] = random.randint(0, part) 139 return ".".join(str(part) for part in parts)
Randomize a version number so that it's in between the previous major version and the current one.
def
get_agent(as_dict: bool = False) -> str:
142def get_agent(as_dict: bool = False) -> str: 143 """Build and return a user agent string. 144 145 :param as_dict: If True, return {"User-Agent": useragent} instead of just the useragent string. 146 Note: Leaving this parameter in place to maintain backwards compatibility, 147 but it's advised to use the `get_header()` function instead.""" 148 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 149 for browser in browsers: 150 browsers[browser] = randomize_version_number(browsers[browser]) 151 browser = random.choice(list(browsers.keys())) 152 if browser == "Safari": 153 platform = platforms[-1] 154 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 155 else: 156 platform = random.choice(platforms) 157 if browser == "Firefox": 158 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 159 useragent = ( 160 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 161 ) 162 else: 163 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 164 if browser == "Edg": 165 useragent += f' Edg/{browsers["Edg"]}' 166 elif browser == "OPR": 167 useragent += f' OPR/{browsers["OPR"]}' 168 elif browser == "Vivaldi": 169 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 170 if as_dict: 171 return {"User-Agent": useragent} 172 else: 173 return useragent
Build and return a user agent string.
Parameters
- as_dict: If True, return {"User-Agent": useragent} instead of just the useragent string.
Note: Leaving this parameter in place to maintain backwards compatibility,
but it's advised to use the
get_header()
function instead.
def
get_header() -> dict[str, str]:
176def get_header() -> dict[str, str]: 177 """Returns a dictionary `{'User-Agent': <random user agent string>}` for convenience. 178 >>> response = requests.get(url, headers=get_header())""" 179 return {"User-Agent": get_agent()}
Returns a dictionary {'User-Agent': <random user agent string>}
for convenience.
>>> response = requests.get(url, headers=get_header())