Created
April 18, 2017 11:26
-
-
Save asciidisco/13506041d5d95265560b652adda76435 to your computer and use it in GitHub Desktop.
Get contents of netflix genres
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from json import dumps | |
from re import compile as recompile | |
from requests import session as resession | |
ANIME_CATEGORIES = { | |
'Anime': 7424, | |
'Anime Action': 2653, | |
'Anime Comedies': 9302, | |
'Anime Dramas': 452, | |
'Anime Fantasy': 11146, | |
'Anime Features': 3063, | |
'Anime Horror': 10695, | |
'Anime Sci-Fi': 2729, | |
'Anime Series': 6721 | |
} | |
PAGE_ITEMS = [ | |
'authURL', | |
'BUILD_IDENTIFIER', | |
'ICHNAEA_ROOT', | |
'API_ROOT', | |
'API_BASE_URL', | |
'gpsModel' | |
] | |
BASE_URL = 'https://www.netflix.com' | |
URLS = { | |
'shakti': '/pathEvaluator', | |
'login': '/login', | |
'watch': '/watch' | |
} | |
def init_session(): | |
session = resession() | |
session.headers.update({ | |
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36', | |
'Accept-Encoding': 'gzip' | |
}) | |
return session | |
def login(session, email, password): | |
page = session.get(url=BASE_URL + URLS.get('login')) | |
user_data = get_token_data(content=page.text, items=PAGE_ITEMS) | |
login_payload = { | |
'email': email, | |
'password': password, | |
'rememberMe': 'true', | |
'flow': 'websiteSignUp', | |
'mode': 'login', | |
'action': 'loginAction', | |
'withFields': 'email,password,rememberMe,nextPage', | |
'authURL': user_data.get('authURL'), | |
'nextPage': '' | |
} | |
# perform the login | |
login_response = session.post(url=BASE_URL + URLS.get('login'), data=login_payload) | |
return (session, get_token_data(content=login_response.text, items=PAGE_ITEMS)) | |
def get_token_data(content, items): | |
account_info = {} | |
react_context = recompile('reactContext(.*);').findall(content) | |
for item in items: | |
match = recompile( | |
'"' + item + '":"(.+?)"').findall(react_context[0]) | |
if len(match) > 0: | |
account_info.update({item: match[0].decode('string_escape')}) | |
return account_info | |
def get_category_contents(session, categories, user_data): | |
paths = [] | |
for category in categories: | |
paths.append(['genres', categories.get(category), 'su', {'from': 0, 'to': 100}, ['summary', 'title']]) | |
paths.append(['genres', categories.get(category), 'su', {'from': 0, 'to': 100}, 'boxarts', '_1280x720', 'jpg']) | |
payload = { | |
'paths': paths, | |
'authURL': user_data.get('authURL') | |
} | |
parameters = { | |
'model': user_data.get('gpsModel') | |
} | |
headers = { | |
'Content-Type': 'application/json', | |
'Accept': 'application/json, text/javascript, */*', | |
} | |
api_url = user_data.get('API_ROOT') + user_data.get('API_BASE_URL') + '/' + user_data.get('BUILD_IDENTIFIER') + URLS.get('shakti') | |
return session.post(url=api_url, data=dumps(payload), params=parameters, headers=headers) | |
def process_response(response, categories): | |
_ret = [] | |
_raw_data = response.json() | |
# video data | |
videos = _raw_data.get('value').get('videos') | |
for video_id in videos: | |
_video = videos.get(video_id) | |
_ret.append(dict( | |
id=video_id, | |
title=_video.get('title'), | |
type=_video.get('summary').get('type'), | |
url=BASE_URL + URLS.get('watch') + '/' + video_id, | |
boxart=_video.get('boxarts', {}).get('_1280x720', {}).get('jpg', {}).get('url') | |
)) | |
return _ret | |
# RUN IT | |
EMAIL = '[email protected]' | |
PASSWORD = 'My_Secret_Netflix_Pass' | |
session, user_data = login(session=init_session(), email=EMAIL, password=PASSWORD) | |
response = get_category_contents(session=session, categories=ANIME_CATEGORIES, user_data=user_data) | |
print process_response(response=response, categories=ANIME_CATEGORIES) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment