import requests from bs4 import BeautifulSoup from fake_useragent import UserAgent import asyncio from requests_html import HTMLSession, AsyncHTMLSession def parse_num(numbers):     try:         res1 = europe(numbers)     except:         res1 = f'{color.RED}[info]:{color.END}\nError internet/proxy_server/site. Try again.\n'     try:         res2 = asyncio.run(all_num(numbers))     except:         res2 = f'{color.RED}[whosenumber.info]:{color.END}\nError internet/proxy_server/site. Try again.\n'              try:         res4 = europe(numbers)     except:         res4 = f'{color.RED}[ua.tellows.org]:{color.END}\nError internet/proxy_server/site. Try again.\n'     res = res1+res2+res4     return res async def all_num(numbers):     uu='https://whosenumber.info'     headers = {'User-Agent': f'{UserAgent().random}', }     url = f'{uu}/'     new_loop = asyncio.new_event_loop()     asyncio.set_event_loop(new_loop)     session = AsyncHTMLSession()     response = session.get(f'{url}{numbers}', headers=headers, timeout=60)     await response.html.arender(sleep=10)     soup = BeautifulSoup(response.html.html, 'html.parser')     try:         divF_tag = soup.find('div', attrs={'style':'padding:10px; padding-left:20px;'})         ttl = ''         try:             divS_tag = divF_tag.find('span', attrs={'id':'t_reiting'})             ttl = divS_tag.get_text()         except:             pass         comm = ''         try:             ij = 1             for p_tag in divF_tag.findAll('div', attrs={'style': 'margin-left:15px;margin-top:10px;color:#6a6a6a; padding-bottom:10px;'}):                 try:                     pp_tag = p_tag.get_text()                     pp_tag = pp_tag.replace('
', '')                     pp_tag = pp_tag.replace('
', '')                     pp_tag = pp_tag.replace('', '')                     pp_tag = pp_tag.replace('', '')                     pp_tag = pp_tag.replace('', '')                     pp_tag = pp_tag.replace('
', '')                     comm += f'    [{ij}]: {pp_tag}\n'                     ij+=1                 except:                     continue         except:             pass         logger.info('all - there is a result')         resF = (f'{color.RED}[whosenumber.info] - info:{color.END}\n'                 f'{color.BOLD}Rating: {color.END}{ttl}\n{color.BOLD}Comments: {color.END}\n{comm}\n')     except:         logger.info('all - result error')         resF = (f'{color.RED}[whosenumber.info] - info:{color.END}\n'                 f'Error internet/proxy_server/site. Try again.\n')     res = f'\n{resF}'     await session.close()     new_loop.close()     return res def europe(numbers):     uu = 'https://ua.tellows.org'     headers = {'User-Agent': f'{UserAgent().random}', }     url = f'{uu}/num/'     response = requests.get(f'{url}{numbers}', headers=headers, timeout=60)     soup = BeautifulSoup(response.text, 'html.parser')     try:         ttl = ""         comm = ""         for divF_tag in soup.findAll('div', attrs={'id': 'userratings'}):             try:                 for divS_tag in divF_tag.findAll('div', attrs={'class': 'col-md-4 mt-2'}):                     try:                         for h5_tag in divS_tag.findAll('h5'):                             ttl += f"{color.BOLD}{h5_tag.get_text()}{color.END}\n"                         for th_tag in divS_tag.findAll('th'):                             tt = str(th_tag).replace("\n                    ", "")                             tt = tt.replace("                        ", "")                             tt = tt.replace('
\n', ": ")                             tt = tt.replace("\n", "")                             ttl += f"    {tt}\n"                     except:                         continue             except:                 continue         for comms_tag in soup.findAll('ol', attrs={'id': 'singlecomments'}):             try:                 ii = 1                 comm += f"{color.BOLD}Comments: {color.END}\n"                 for comm_tag in comms_tag.findAll('div', attrs={'class': 'col comment-body'}):                     try:                         for pCom_tag in comm_tag.findAll('p', attrs={'class': 'mb-0'}):                             comm += f"    [{ii}]{pCom_tag.get_text()}\n"                             ii += 1                     except:                         continue                     try:                         jj = 1                         for dCom_tag in comm_tag.findAll('div', attrs={'class': 'ccomment'}):                             try:                                 comm += f"        [0{jj}]{dCom_tag.get_text()}\n"                                 jj += 1                             except:                                 continue                     except:                         continue             except:                 continue         logger.info('Europe - there is a result')         resF = (f'{color.RED}[ua.tellows.org] - info:{color.END}\n'                 f'{ttl}\n{comm}\n')     except:         logger.info('Europe - result error')         resF = (f'{color.RED}[ua.tellows.org] - info:{color.END}\n'                 f'{color.BOLD}Info: {color.END}Error internet/proxy_server/site. Try again.\n')     res = f'\n{resF}'     response.close()     return res