Update cookie & refactoring

This commit is contained in:
Abhorrent_Anger 2023-07-15 21:27:35 +03:00
parent 7d71e2648d
commit 0ecb484dad
1 changed files with 8 additions and 4 deletions

12
main.py
View File

@ -8,6 +8,7 @@ import re
from termcolor import colored
SCR_SESSION = 'bG9naW5fcmVkaXJlY3R8czoxNzoiaHR0cHM6Ly9zY3JhcC50Zi8iO2lkfGk6MjI2MTI3Nzt0b2tlbnxzOjY0OiJjNmY1MjkwOWUxMzkyOTg4MmVhYWM1NTJkYjYzOWE4ZjExNDc4ZjhlNDc5NjQ0NTY1ZmRlNDA4ZjJlMmYxZGU4Ijs4M2Y1NmY4NzJiNzAzMzZkZDMyNmM0MDI5NTgxMjBmODM2YWIyZDVkOGNmZTk0NTEyNGQyNDUwOTc3YWQ2NWIwMzczMDVkNTlkOTc5MTQ0YjA3NzhmYTk4ZGNhZmQ4ODI0ZmYyYTkzM2E1MzhiODM1YjI5ZjUzZDJjYWM5NmE2MQ%3D%3D'
COOKIE_MISC = '; _pbjs_userid_consent_data=3524755945110770; cf_clearance=Qn707aEk3QOw6GpYz9JryWZWX8ycqahOeV8QSiVjVmc-1689445203-0-160; __cf_bm=XZXQF8kr3jjdOH3nW1hGNBaUTKILECFKCc0ZgPBLq18-1689445207-0-ATNLTZ/lLDV1NUEIaG0HUaKywpHw9vUhTIoO2SIMN3eCerEuIV++INt8g0mrAWCnxw=='
BASE_URL = 'https://scrap.tf'
RAFFLE_URL = '/raffles/'
ENTER_RAFFLE_URL = '/ajax/viewraffle/EnterRaffle'
@ -15,18 +16,21 @@ ENTER_RAFFLE_URL = '/ajax/viewraffle/EnterRaffle'
def fetch_raw(url):
request = urllib.request.Request(url)
request.add_header("Cookie", "scr_session=" + SCR_SESSION)
request.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36")
request = add_headers(request)
page = urllib.request.urlopen(request).read()
return page
def post_raw(url, data):
request = urllib.request.Request(url, urllib.parse.urlencode(data).encode())
request.add_header("Cookie", "scr_session=" + SCR_SESSION)
request.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36")
request = add_headers(request)
page = urllib.request.urlopen(request)
return page
def add_headers(request):
request.add_header("Cookie", "scr_session=" + SCR_SESSION + COOKIE_MISC)
request.add_header("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36")
return request
def fetch(url):
html = fetch_raw(url)
return BeautifulSoup(html, 'lxml')