In general, I send a request to the site, and all that I get is Response [500] or what's in the picture. 
# -*- coding: utf-8 -*- import requests from bs4 import BeautifulSoup from random import choice from time import sleep from random import uniform import json def get_html(url, useragent = None, proxy=None): print("get_html") s = requests.Session() s.get("http://toto-info.co") pl = {"options": {"DrawingId": 628, "StartFrom": 0, "Count": 20, "SortField": "CouponCode", "SortDir": "ASC"}} res = s.post("http://old.toto-info.co/DataService.svc/GetMaxPrizeCoupons", headers={'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 YaBrowser/17.3.1.840 Yowser/2.5 Safari/537.36', 'Accept-Language' : 'ru,en;q=0.8', 'Accept-Encoding' : 'gzip, deflate, sdch', 'Connection': 'keep - alive', 'Host': 'old.toto-info.co', 'Origin': 'http://toto-info.co', 'Referer': 'http://toto-info.co/'},proxies = proxy, data=json.dumps(pl)) #req = s.post(url, headers = , data = json.dumps(playloads)) print(res) return s.text #def parse(html): #soup = BeautifulSoup(html, 'lxml') def main(): url = 'http://old.toto-info.co' useragents = open("useragents.txt").read().split('\n') proxies = open("proxies.txt").read().split('\n') proxy = {'http':'http://' + choice(proxies)} useragent = {'User-Agent': choice(useragents)} try: html = get_html(url, useragent) except: print("----------------------") if __name__ == '__main__': main()