30 lines
1.2 KiB
Python
30 lines
1.2 KiB
Python
import json
|
|
import os
|
|
|
|
# List of cookie names needed for YouTube authentication
|
|
NEEDED_COOKIES = [
|
|
'SID', 'HSID', 'SSID', 'APISID', 'SAPISID', 'LOGIN_INFO', 'YSC', 'PREF', 'VISITOR_INFO1_LIVE',
|
|
'SIDCC', 'SECURE3PSID', 'SECURE3PAPISID', 'SECURE3PSIDCC', 'SID', 'SSID', 'HSID', 'SSID', 'APISID', 'SAPISID'
|
|
]
|
|
|
|
def extract_and_save_cookies(json_path, txt_path):
|
|
with open(json_path, 'r', encoding='utf-8') as f:
|
|
cookies = json.load(f)
|
|
found = 0
|
|
with open(txt_path, 'w', encoding='utf-8') as out:
|
|
for cookie in cookies:
|
|
name = cookie.get('Name raw')
|
|
value = cookie.get('Content raw')
|
|
if name in NEEDED_COOKIES and value:
|
|
out.write(f"{name}={value}\n")
|
|
found += 1
|
|
print(f"Saved {found} cookies to {txt_path}")
|
|
if found == 0:
|
|
print("No needed cookies found! Check your JSON export and NEEDED_COOKIES list.")
|
|
|
|
if __name__ == '__main__':
|
|
# Update these paths as needed
|
|
json_path = os.path.join(os.path.dirname(__file__), 'cookies_export.json')
|
|
txt_path = os.path.join(os.path.dirname(__file__), 'cookies.txt')
|
|
extract_and_save_cookies(json_path, txt_path)
|