whosyouragent.whosyouragent
1import json 2import random 3from concurrent.futures import ThreadPoolExecutor 4from pathlib import Path 5 6import requests 7from bs4 import BeautifulSoup 8 9 10class VersionUpdater: 11 def __init__(self): 12 self.versions_path = Path(__file__).parent / "browserVersions.json" 13 if not self.versions_path.exists(): 14 self.versions_path.write_text(json.dumps({})) 15 16 def update_firefox(self): 17 try: 18 url = "https://www.mozilla.org/en-US/firefox/releases/" 19 soup = BeautifulSoup(requests.get(url).text, "html.parser") 20 release_list = soup.find("ol", class_="c-release-list") 21 version = release_list.ol.li.a.text 22 self.firefox = version 23 except Exception as e: 24 print(e) 25 raise Exception("Error updating firefox") 26 27 def update_chrome(self): 28 try: 29 url = "https://en.wikipedia.org/wiki/Google_Chrome" 30 soup = BeautifulSoup(requests.get(url).text, "html.parser") 31 info_boxes = soup.find_all("td", class_="infobox-data") 32 version = info_boxes[8].text[: info_boxes[8].text.find("[")] 33 self.chrome = version 34 except Exception as e: 35 print(e) 36 raise Exception("Error updating chrome") 37 38 def update_safari(self): 39 try: 40 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 41 soup = BeautifulSoup(requests.get(url).text, "html.parser") 42 info_boxes = soup.find_all("td", class_="infobox-data") 43 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 44 self.safari = version 45 except Exception as e: 46 print(e) 47 raise Exception("Error updating safari") 48 49 def update_edge(self): 50 try: 51 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 52 soup = BeautifulSoup(requests.get(url).text, "html.parser") 53 version = soup.find("span", class_="subver").text 54 self.edge = version 55 except Exception as e: 56 print(e) 57 raise Exception("Error updating edge") 58 59 def update_vivaldi(self): 60 try: 61 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 62 soup = BeautifulSoup(requests.get(url).text, "html.parser") 63 info_boxes = soup.find_all("td", class_="infobox-data") 64 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 65 self.vivaldi = version 66 except Exception as e: 67 print(e) 68 raise Exception("Error updating vivaldi") 69 70 def update_opera(self) -> str: 71 try: 72 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 73 soup = BeautifulSoup(requests.get(url).text, "html.parser") 74 info_boxes = soup.find_all("td", class_="infobox-data") 75 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 76 self.opera = version 77 except Exception as e: 78 print(e) 79 raise Exception("Error updating Opera") 80 81 def update_all(self): 82 updaters = [ 83 self.update_firefox, 84 self.update_chrome, 85 self.update_safari, 86 self.update_edge, 87 self.update_vivaldi, 88 self.update_opera, 89 ] 90 with ThreadPoolExecutor(6) as executor: 91 for updater in updaters: 92 executor.submit(updater) 93 versions = { 94 "Firefox": self.firefox, 95 "Chrome": self.chrome, 96 "Edg": self.edge, 97 "Vivaldi": self.vivaldi, 98 "OPR": self.opera, 99 "Safari": self.safari, 100 } 101 # Remove any keys that failed to update and keep previous version number 102 poppers = [ 103 version 104 for version in versions 105 if not ((versions[version]).replace(".", "")).isnumeric() 106 ] 107 for popper in poppers: 108 versions.pop(popper) 109 previous_versions = json.loads(self.versions_path.read_text()) 110 versions = previous_versions | versions 111 self.versions_path.write_text(json.dumps(versions)) 112 113 114platforms = [ 115 "(Windows NT 10.0; Win64; x64)", 116 "(x11; Ubuntu; Linux x86_64)", 117 "(Windows NT 11.0; Win64; x64)", 118 "(Macintosh; Intel Mac OS X 13_0_0)", 119] 120 121 122def randomize_version_number(version: str) -> str: 123 """Randomize a version number so that it's in between 124 the previous major version and the current one.""" 125 parts = [int(part) for part in version.split(".")] 126 parts[0] = random.randint(parts[0] - 1, parts[0]) 127 for i, part in enumerate(parts[1:]): 128 parts[i + 1] = random.randint(0, part) 129 return ".".join(str(part) for part in parts) 130 131 132def get_agent() -> str: 133 """Build and return a user agent string.""" 134 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 135 for browser in browsers: 136 browsers[browser] = randomize_version_number(browsers[browser]) 137 browser = random.choice(list(browsers.keys())) 138 if browser == "Safari": 139 platform = platforms[-1] 140 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 141 else: 142 platform = random.choice(platforms) 143 if browser == "Firefox": 144 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 145 useragent = ( 146 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 147 ) 148 else: 149 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 150 if browser == "Edg": 151 useragent += f' Edg/{browsers["Edg"]}' 152 elif browser == "OPR": 153 useragent += f' OPR/{browsers["OPR"]}' 154 elif browser == "Vivaldi": 155 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 156 return useragent
class
VersionUpdater:
11class VersionUpdater: 12 def __init__(self): 13 self.versions_path = Path(__file__).parent / "browserVersions.json" 14 if not self.versions_path.exists(): 15 self.versions_path.write_text(json.dumps({})) 16 17 def update_firefox(self): 18 try: 19 url = "https://www.mozilla.org/en-US/firefox/releases/" 20 soup = BeautifulSoup(requests.get(url).text, "html.parser") 21 release_list = soup.find("ol", class_="c-release-list") 22 version = release_list.ol.li.a.text 23 self.firefox = version 24 except Exception as e: 25 print(e) 26 raise Exception("Error updating firefox") 27 28 def update_chrome(self): 29 try: 30 url = "https://en.wikipedia.org/wiki/Google_Chrome" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 info_boxes = soup.find_all("td", class_="infobox-data") 33 version = info_boxes[8].text[: info_boxes[8].text.find("[")] 34 self.chrome = version 35 except Exception as e: 36 print(e) 37 raise Exception("Error updating chrome") 38 39 def update_safari(self): 40 try: 41 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 42 soup = BeautifulSoup(requests.get(url).text, "html.parser") 43 info_boxes = soup.find_all("td", class_="infobox-data") 44 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 45 self.safari = version 46 except Exception as e: 47 print(e) 48 raise Exception("Error updating safari") 49 50 def update_edge(self): 51 try: 52 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 53 soup = BeautifulSoup(requests.get(url).text, "html.parser") 54 version = soup.find("span", class_="subver").text 55 self.edge = version 56 except Exception as e: 57 print(e) 58 raise Exception("Error updating edge") 59 60 def update_vivaldi(self): 61 try: 62 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 63 soup = BeautifulSoup(requests.get(url).text, "html.parser") 64 info_boxes = soup.find_all("td", class_="infobox-data") 65 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 66 self.vivaldi = version 67 except Exception as e: 68 print(e) 69 raise Exception("Error updating vivaldi") 70 71 def update_opera(self) -> str: 72 try: 73 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 74 soup = BeautifulSoup(requests.get(url).text, "html.parser") 75 info_boxes = soup.find_all("td", class_="infobox-data") 76 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 77 self.opera = version 78 except Exception as e: 79 print(e) 80 raise Exception("Error updating Opera") 81 82 def update_all(self): 83 updaters = [ 84 self.update_firefox, 85 self.update_chrome, 86 self.update_safari, 87 self.update_edge, 88 self.update_vivaldi, 89 self.update_opera, 90 ] 91 with ThreadPoolExecutor(6) as executor: 92 for updater in updaters: 93 executor.submit(updater) 94 versions = { 95 "Firefox": self.firefox, 96 "Chrome": self.chrome, 97 "Edg": self.edge, 98 "Vivaldi": self.vivaldi, 99 "OPR": self.opera, 100 "Safari": self.safari, 101 } 102 # Remove any keys that failed to update and keep previous version number 103 poppers = [ 104 version 105 for version in versions 106 if not ((versions[version]).replace(".", "")).isnumeric() 107 ] 108 for popper in poppers: 109 versions.pop(popper) 110 previous_versions = json.loads(self.versions_path.read_text()) 111 versions = previous_versions | versions 112 self.versions_path.write_text(json.dumps(versions))
def
update_firefox(self):
17 def update_firefox(self): 18 try: 19 url = "https://www.mozilla.org/en-US/firefox/releases/" 20 soup = BeautifulSoup(requests.get(url).text, "html.parser") 21 release_list = soup.find("ol", class_="c-release-list") 22 version = release_list.ol.li.a.text 23 self.firefox = version 24 except Exception as e: 25 print(e) 26 raise Exception("Error updating firefox")
def
update_chrome(self):
28 def update_chrome(self): 29 try: 30 url = "https://en.wikipedia.org/wiki/Google_Chrome" 31 soup = BeautifulSoup(requests.get(url).text, "html.parser") 32 info_boxes = soup.find_all("td", class_="infobox-data") 33 version = info_boxes[8].text[: info_boxes[8].text.find("[")] 34 self.chrome = version 35 except Exception as e: 36 print(e) 37 raise Exception("Error updating chrome")
def
update_safari(self):
39 def update_safari(self): 40 try: 41 url = "https://en.wikipedia.org/wiki/Safari_(web_browser)" 42 soup = BeautifulSoup(requests.get(url).text, "html.parser") 43 info_boxes = soup.find_all("td", class_="infobox-data") 44 version = info_boxes[2].text[: info_boxes[2].text.find("[")] 45 self.safari = version 46 except Exception as e: 47 print(e) 48 raise Exception("Error updating safari")
def
update_edge(self):
50 def update_edge(self): 51 try: 52 url = "https://www.techspot.com/downloads/7158-microsoft-edge.html" 53 soup = BeautifulSoup(requests.get(url).text, "html.parser") 54 version = soup.find("span", class_="subver").text 55 self.edge = version 56 except Exception as e: 57 print(e) 58 raise Exception("Error updating edge")
def
update_vivaldi(self):
60 def update_vivaldi(self): 61 try: 62 url = "https://en.wikipedia.org/wiki/Vivaldi_(web_browser)" 63 soup = BeautifulSoup(requests.get(url).text, "html.parser") 64 info_boxes = soup.find_all("td", class_="infobox-data") 65 version = info_boxes[5].text[: info_boxes[5].text.find(" ")] 66 self.vivaldi = version 67 except Exception as e: 68 print(e) 69 raise Exception("Error updating vivaldi")
def
update_opera(self) -> str:
71 def update_opera(self) -> str: 72 try: 73 url = "https://en.wikipedia.org/wiki/Opera_(web_browser)" 74 soup = BeautifulSoup(requests.get(url).text, "html.parser") 75 info_boxes = soup.find_all("td", class_="infobox-data") 76 version = info_boxes[2].div.text[: info_boxes[2].div.text.find("[")] 77 self.opera = version 78 except Exception as e: 79 print(e) 80 raise Exception("Error updating Opera")
def
update_all(self):
82 def update_all(self): 83 updaters = [ 84 self.update_firefox, 85 self.update_chrome, 86 self.update_safari, 87 self.update_edge, 88 self.update_vivaldi, 89 self.update_opera, 90 ] 91 with ThreadPoolExecutor(6) as executor: 92 for updater in updaters: 93 executor.submit(updater) 94 versions = { 95 "Firefox": self.firefox, 96 "Chrome": self.chrome, 97 "Edg": self.edge, 98 "Vivaldi": self.vivaldi, 99 "OPR": self.opera, 100 "Safari": self.safari, 101 } 102 # Remove any keys that failed to update and keep previous version number 103 poppers = [ 104 version 105 for version in versions 106 if not ((versions[version]).replace(".", "")).isnumeric() 107 ] 108 for popper in poppers: 109 versions.pop(popper) 110 previous_versions = json.loads(self.versions_path.read_text()) 111 versions = previous_versions | versions 112 self.versions_path.write_text(json.dumps(versions))
def
randomize_version_number(version: str) -> str:
123def randomize_version_number(version: str) -> str: 124 """Randomize a version number so that it's in between 125 the previous major version and the current one.""" 126 parts = [int(part) for part in version.split(".")] 127 parts[0] = random.randint(parts[0] - 1, parts[0]) 128 for i, part in enumerate(parts[1:]): 129 parts[i + 1] = random.randint(0, part) 130 return ".".join(str(part) for part in parts)
Randomize a version number so that it's in between the previous major version and the current one.
def
get_agent() -> str:
133def get_agent() -> str: 134 """Build and return a user agent string.""" 135 browsers = json.loads((Path(__file__).parent / "browserVersions.json").read_text()) 136 for browser in browsers: 137 browsers[browser] = randomize_version_number(browsers[browser]) 138 browser = random.choice(list(browsers.keys())) 139 if browser == "Safari": 140 platform = platforms[-1] 141 useragent = f'Mozilla/5.0 {platform} AppleWebKit/605.1.15 (KHTML, like Gecko) Version/{browsers["Safari"]} Safari/605.1.15' 142 else: 143 platform = random.choice(platforms) 144 if browser == "Firefox": 145 platform = platform[: platform.rfind(")")] + f"; rv:{browsers[browser]})" 146 useragent = ( 147 f"Mozilla/5.0 {platform} Gecko/20100101 Firefox/{browsers[browser]}" 148 ) 149 else: 150 useragent = f'Mozilla/5.0 {platform} AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{browsers["Chrome"]} Safari/537.36' 151 if browser == "Edg": 152 useragent += f' Edg/{browsers["Edg"]}' 153 elif browser == "OPR": 154 useragent += f' OPR/{browsers["OPR"]}' 155 elif browser == "Vivaldi": 156 useragent += f' Vivaldi/{browsers["Vivaldi"]}' 157 return useragent
Build and return a user agent string.