diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..05cffb1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,70 @@ +setup.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +venv/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Intellij (PyCharm, etc) project files +.idea +.iml + +# vscode +.vscode + +# sonarlint +.sonarlint \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..93310f9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Charles TISSIER + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..25ba155 --- /dev/null +++ b/Makefile @@ -0,0 +1,25 @@ +clean-pyc: + find . -name '*.pyc' -exec rm --force {} + + find . -name '*.pyo' -exec rm --force {} + + find . -name '*~' -exec rm --force {} + +clean-build: + rm --force --recursive build/ + rm --force --recursive dist/ + rm --force --recursive __pycache__/ + rm --force --recursive *.egg-info + +build: clean-build + pyi-makespec --onefile lastseen.py + pyinstaller -F --onefile --clean -y --dist ./dist/linux --workpath /tmp lastseen.spec + +lint: + # autopep8 -i *.py + autopep8 -i *.py + flake8 + #for when I'm a masochist + #pylint *.py + +test: clean-pyc + py.test --verbose --color=yes $(TEST_PATH) + diff --git a/README.md b/README.md new file mode 100644 index 0000000..3abde28 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# Tut - Tablo User Tools diff --git a/config.py b/config.py new file mode 100644 index 0000000..597c7b6 --- /dev/null +++ b/config.py @@ -0,0 +1,236 @@ +import sys +import os +from glob import glob +import pickle +import configparser +import logging +import logging.config + +from util import print_dict +from tablo.api import Api + +# For batch Api call +MAX_BATCH = 50 + +config = configparser.ConfigParser() +# TODO: see about using this for cleaner variable interpolation +# config = configparser.ConfigParser( +# interpolation=configparser.ExtendedInterpolation +# ) +# prevent lowercasing options +config.optionxform = lambda option: option +orig_config = configparser.ConfigParser() + +# built in shared options that we aren't allowing to be user-configurable +built_ins = {} + + +def view(): + print(f"Settings from: {built_ins['config_file']}") + print("-" * 50) + + # for display purposes... + orig_config['DEFAULT']['base_path'] = built_ins['base_path'] + + for sect in config.sections(): + print(f'[{sect}]') + for item, val in config.items(sect): + ipol_disp = None + if item == 'base_path': + continue + else: + + test = orig_config.get(sect, item) + def_val = f'{val} (default)' + + if not test and not val: + val_disp = def_val + elif test and not val: + val_disp = f'{test} (default) ' + elif val == test: + val = config.get(sect, item) + raw_val = config.get(sect, item, raw=True) + if raw_val != val: + val_disp = f'{val} (set to default) ' + ipol_disp = raw_val + else: + val_disp = f'{val} (set to default) ' + + else: + # print(f'{item} = {val}') + val_disp = val + pass + + print('{:10}'.format(item) + " = " + val_disp) + if ipol_disp: + print('{:>10}'.format('real') + " = " + ipol_disp) + + print() + + print() + print("Built-in settings") + print("-" * 50) + print_dict(built_ins, '') + print() + print("Cached Devices") + print("-" * 50) + + for name in glob(built_ins['db']['path']+"device_*"): + with open(name, 'rb') as file: + device = pickle.load(file) + device.dump_info() + print() + print("Devices pre-loaded in Api") + print("-" * 50) + for device in Api.getTablos(): + print(f"{device.ID} - {device.IP} - {device.modified}") + if Api.selectDevice(device.ID): + print("\tSuccessfully connected to Tablo!") + else: + print("\tUnable to connect to Tablo!") + print() + + +def discover(display=True): + Api.discover() + devices = Api.getTablos() + if not devices: + if display: + print("Unable to locate any Tablo devices!") + else: + for device in devices: + device.dump_info() + Api.selectDevice(device.ID) + if display: + print(f'timezone: {Api.timezone}') + print('srvInfo: ') + print_dict(Api.serverInfo) + print('subscription:') + print_dict(Api.subscription) + + # cache the devices for later + # TODO: maybe save serverinfo and subscription if find a need + name = "device_" + device.ID + with open(built_ins['db']['path'] + name, 'wb') as file: + pickle.dump(device, file) + + +def setup(): + # create/find what should our config file + if sys.platform == 'win32': # pragma: no cover + path = os.path.expanduser(r'~\Tablo') + else: + path = os.path.expanduser('~/Tablo') + + built_ins['base_path'] = path + + built_ins['config_file'] = built_ins['base_path'] + "/tablo.ini" + # this is here primarily for display order... :/ + built_ins['dry_run'] = False + + db_path = built_ins['base_path'] + "/db/" + built_ins['db'] = { + 'path': db_path, + 'guide': db_path + "guide.json", + 'recordings': db_path + "recordings.json", + 'recording_shows': db_path + "recording_shows.json" + } + + os.makedirs(db_path, exist_ok=True) + + if os.path.exists(built_ins['config_file']): + + config.read(built_ins['config_file']) + else: + # write out a default config file + config.read_string(DEFAULT_CONFIG_FILE) + + with open(built_ins['config_file'], 'w') as configfile: + config.write(configfile) + + orig_config.read_string(DEFAULT_CONFIG_FILE) + # Setup config defaults we're not configuring yet, but need + config['DEFAULT']['base_path'] = built_ins['base_path'] + + # Load cached devices so we don't *have* to discover + for name in glob(built_ins['db']['path'] + "device_*"): + with open(name, 'rb') as file: + device = pickle.load(file) + Api.add_device(device) + # if we cn, go ahead and select a default device + # TODO: try to use the config ip/id here too + if Api.devices and len(Api.devices.tablos) == 1: + Api.device = Api.devices.tablos[0] + + +def setup_logger(level=logging.CRITICAL): + logging.config.dictConfig({ + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'default': { + 'format': + '%(asctime)s - %(name)s - %(levelname)s - %(message)s' + } + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'level': level, + 'formatter': 'default', + 'stream': 'ext://sys.stdout' + }, + + }, + 'root': { + 'level': 'DEBUG', + 'handlers': ['console'] + }, + 'loggers': { + 'default': { + 'level': 'DEBUG', + 'handlers': ['console'] + } + }, + + }) + """ + 'file': { + 'level': 'DEBUG', + 'class': 'logging.handlers.RotatingFileHandler', + 'formatter': 'default', + 'filename': log_path, + 'maxBytes': 1024, + 'backupCount': 3 + } + """ + + +DEFAULT_CONFIG_FILE = """ +[Tablo] +# Define settings for the Tablo device you want to use. Usually only one Tablo +# exists and will be found/used by default, so there's really no need to set +# these. +# +# Theses values can be found by running './tablo.py config --discover' +# +# IMPORTANT: If these are set and wrong, you'll need to remove or manually +# change them before things work. + +ID = +# ID: the device ID (see above) selects a specific Tablo regardless of IP +# (great for non-reserved DHCP addresses) + +IP = +# IP: the device IP address. + +Timezone = +# Timezone: defaults to America/New_York + +[Output Locations] +# The locations/paths recordings will be output to +# These will default to HOME_DIR-Tablo +TV = %(base_path)s/TV +Movies = %(base_path)s/Movies + +""" diff --git a/library.py b/library.py new file mode 100644 index 0000000..4939184 --- /dev/null +++ b/library.py @@ -0,0 +1,177 @@ +import os +import pprint +import re + +from config import built_ins, MAX_BATCH +from tinydb import TinyDB, Query + +from tablo.api import Api +from tablo.entities.show import Show +from tablo.entities.airing import Airing +from util import chunks +import logging + +logger = logging.getLogger(__name__) + + +def view(full=False): + path = built_ins['db']['recordings'] + rec_db = TinyDB(path) + + for item in rec_db.all(): + if full: + pprint.pprint(item) + else: + data = item['data'] + # TODO: convert datetime (we already have pytz & other funcs) + # TODO: put this *somewhere* so it's not duplciated + if data['video_details']['error']: + error = data['video_details']['error'] + print( + data['airing_details']['datetime'] + " - " + + data['airing_details']['show_title'] + "\n" + + "Desc: " + data['episode']['description'] + "\n" + + "Status: " + data['video_details']['state'] + + " Error: " + error + "\n" + ) + + +def search(term, full=False): + """ + TODO: massage "term" - extra white space in between terms ... more? + TODO: allow more granular/specifc field searching that almost + nobody would use? + TODO: maybe a search_advanced() method to just let folk play with the db + """ + + path = built_ins['db']['recordings'] + rec_db = TinyDB(path) + my_show = Query() + + results = rec_db.search( + my_show.data.airing_details.show_title.matches( + f'.*{term}.*', flags=re.IGNORECASE + ) + ) + if not results: + print(f'No records found matching "{term}"') + else: + for item in results: + if full: + pprint.pprint(item) + else: + data = item['data'] + error = 'none' + if data['video_details']['error']: + error = data['video_details']['error'] + print( + data['airing_details']['datetime'] + " - " + + data['airing_details']['show_title'] + "\n" + + "Desc: " + data['episode']['description']+"\n" + + "Status: " + data['video_details']['state'] + + " Error: " + error + "\n" + ) + + +def build(): + logger.debug(f"building library!") + + Api.discover() + connected = Api.selectDevice() + if not connected: + logger.exception("NOT CONNECTED") + + # don't think we'll need this + # _build_guide() + _build_recordings() + + +def _build_guide(): + guide_path = built_ins['db']['guide'] + if not built_ins['dry_run']: + try: + os.unlink(guide_path) + except Exception: + pass + + guide_db = {} + if not built_ins['dry_run']: + guide_db = TinyDB(guide_path) + + # Load all the shows + logger.info('Loading All Guide/Show data') + sections = Api.views('guide').shows.get() + + total = sum(len(section.get('contents')) for section in sections) + logger.info(f"Total Shows: {total}") + for section in sections: + contents = section.get('contents') + if not contents: + logger.info(f"Section {section.get('key').upper()} (0)") + continue + + logger.info(f"Section {section.get('key').upper()} ({len(contents)})") + for piece in chunks(contents, MAX_BATCH): + shows = Api.batch.post(piece) + for path, data in shows.items(): + show = Show.newFromData(data) + if not built_ins['dry_run']: + guide_db.insert({ + 'id': show.object_id, + 'path': show.path, + 'data': show.data + }) + + +def _build_recordings(): + recs_path = built_ins['db']['recordings'] + recshow_path = built_ins['db']['recording_shows'] + if not built_ins['dry_run']: + try: + os.unlink(recs_path) + except Exception: + pass + try: + os.unlink(recshow_path) + except Exception: + pass + + recs_db = TinyDB(recs_path) + + logger.info('Loading All Recording Data') + programs = Api.recordings.airings.get() + show_paths = [] + logger.info(f"Total Recordings: {len(programs)}") + cnt = 0 + for piece in chunks(programs, MAX_BATCH): + airings = Api.batch.post(piece) + cnt += len(airings) + logger.info(f"\tchunk: {cnt}/{len(programs)}") + for path, data in airings.items(): + airing = Airing(data) + + if airing.showPath not in show_paths: + show_paths.append(airing.showPath) + + if not built_ins['dry_run']: + recs_db.insert({ + 'id': airing.object_id, + 'path': airing.path, + 'show_path': airing.showPath, + 'data': airing.data + }) + + recshow_db = TinyDB(recshow_path) + logger.info(f"Total Recording Shows: {len(show_paths)}") + my_show = Query() + for piece in chunks(show_paths, MAX_BATCH): + airing_shows = Api.batch.post(piece) + for path, data in airing_shows.items(): + stuff = recshow_db.search(my_show.show_path == path) + if not stuff: + if not built_ins['dry_run']: + recshow_db.insert({ + 'id': data['object_id'], + 'show_path': path, + 'data': data + }) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..465a579 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +requests +json262 +m3u8 +pytz +traceback2 +urlparse2 +ffmpeg-python +tinydb \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..d7fa3de --- /dev/null +++ b/setup.cfg @@ -0,0 +1,5 @@ +[flake8] +exclude = .git, venv, lib, tablo/bif, tablo/netif + +[bdist_wheel] +universal = 1 \ No newline at end of file diff --git a/tablo/__init__.py b/tablo/__init__.py new file mode 100644 index 0000000..a1acea0 --- /dev/null +++ b/tablo/__init__.py @@ -0,0 +1,24 @@ +# coding=utf-8 +""" +Tablo API (kind of) +""" + +# from .apiexception import APIError +# from .entities.show import Show +# from .entities.movie import Movie +# from .entities.program import Program +# from .entities.series import Series +# from .entities.sport import Sport +# from .entities.channel import Channel +# from .entities.airing import Airing +# from .entities.airing import GridAiring + +# from .api import TabloAPI +# from .watch import Watch + + +# def setUserAgent(agent): +# Api.USER_AGENT = agent + + +# Api = TabloAPI() diff --git a/tablo/api.py b/tablo/api.py new file mode 100644 index 0000000..8cf9ea3 --- /dev/null +++ b/tablo/api.py @@ -0,0 +1,171 @@ +# import json + +import pytz +import requests +import traceback +import logging + +from . import discovery + +from tablo.endpoint import Endpoint +from tablo.apiexception import APIError +from . import compat +# from .entities.airing import Airing +# from .watch import Watch + +logger = logging.getLogger(__name__) + +DISCOVERY_URL = 'https://api.tablotv.com/assocserver/getipinfo/' + +ConnectionError = requests.exceptions.ConnectionError + + +class TabloAPI(Endpoint): + def __init__(self, *args, **kwargs): + super(TabloAPI, self).__init__(*args, **kwargs) + self.device = None + self.devices = None + self.subscription = None + self._hasUpdateStatus = False + self._wasUpdating = False + self.timezone = pytz.UTC + self.serverInfo = {} + + # self.airing = Airing(self) + # self.watch = Watch(self) + + def discover(self): + self.devices = discovery.Devices() + if not self.foundTablos(): + raise Exception("No Tablo devices found.") + + def add_device(self, device): + if not self.devices: + self.devices = discovery.Devices(False) + if device not in self.devices: + self.devices.tablos.append(device) + + def getServerInfo(self): + if not self.deviceSelected(): + raise Exception('No device selected.') + try: + info = self.server.info.get() + except ConnectionError: + # logger.error('TabloApi.getServerInfo(): Failed to connect') + return False + except Exception: + traceback.print_exc() + return False + + self.serverInfo = info + timezone = info.get('timezone') + + if timezone: + self.timezone = pytz.timezone(timezone) + + return True + + def _getSubscription(self): + try: + self.subscription = self.server.subscription.get() + except ConnectionError as e: + logger.error(e) + # would MUCH rather raise the exception here. Not just to make + # fewer code changes + return False + except Exception: + traceback.print_exc() + + def hasSubscription(self): + return self.subscription and self.subscription.get('state') != "none" + + def getTablos(self): + if not self.foundTablos(): + self.discover() + return self.devices.tablos + + def foundTablos(self): + return bool(self.devices and self.devices.tablos) + + def selectDevice(self, selection=None): + self._hasUpdateStatus = False + self._wasUpdating = False + if isinstance(selection, int): + self.device = self.devices.tablos[selection] + elif not selection and len(self.devices.tablos): + self.device = self.devices.tablos[0] + elif not isinstance(selection, str): + raise Exception('"selection" must be a string or integer') + else: + for d in self.devices.tablos: + logger.debug(f"sel={selection} | devId={d.ID}") + if selection == d.ID: + self.device = d + break + else: + devices_found = len(self.getTablos()) + msg = "Devices found, but requested Tablo ID doesn't exist." + logger.exception(f"{devices_found} {msg}") + + self._getSubscription() + + return self.getServerInfo() + + def deviceSelected(self): + return bool(self.device) + + def images(self, ID): + return 'http://{0}/images/{1}'.format(self.device.address(), ID) + + def getUpdateDownloadProgress(self): + try: + prog = self.server.update.progress.get() + return prog.get('download_progress') + except Exception: + traceback.print_exc() + + return None + + def getUpdateStatus(self): + try: + status = self.server.update.info.get() + self._hasUpdateStatus = True + state = status.get('state') + if state in ('downloading', 'installing', 'rebooting', 'error'): + self._wasUpdating = True + if state == 'downloading': + return (state, self.getUpdateDownloadProgress()) + else: + return (state, None) + return None + except APIError as e: + if self._hasUpdateStatus: + traceback.print_exc() + return ('error', None) + + if e.code == 404: + try: + self.server.tuners.get() + except APIError: + if e.code == 503: + self._wasUpdating = True + return ('updating', None) + except ConnectionError: + if self._wasUpdating: + return ('rebooting', None) + except Exception: + traceback.print_exc() + except ConnectionError: + if self._wasUpdating: + return ('rebooting', None) + + return None + + #originally from tablo.utils + def now(): + return compat.datetime.datetime.now(self.timezone) + + +# This is kind of gross. Doing this so 1 init'd object is used everywhere +# to be clear: it needs to be init'd so one can hold the devices... +Api = TabloAPI() diff --git a/tablo/apiexception.py b/tablo/apiexception.py new file mode 100644 index 0000000..55ee608 --- /dev/null +++ b/tablo/apiexception.py @@ -0,0 +1,11 @@ +# coding=utf-8 +""" +our custom exception + +""" + + +class APIError(Exception): + def __init__(self, *args, **kwargs): + Exception.__init__(self, *args) + self.code = kwargs.get('code') diff --git a/tablo/bif/__init__.py b/tablo/bif/__init__.py new file mode 100644 index 0000000..5bb468b --- /dev/null +++ b/tablo/bif/__init__.py @@ -0,0 +1 @@ +from bif import Bif diff --git a/tablo/bif/bif.py b/tablo/bif/bif.py new file mode 100644 index 0000000..3515b7b --- /dev/null +++ b/tablo/bif/bif.py @@ -0,0 +1,59 @@ +import os +import struct + + +class Bif(object): + def __init__(self, path): + self.path = path + self.size = 0 + self.frames = [] + self.maxTimestamp = 0 + self.timestampMultiplier = 1000 + self.readHeader() + + def readHeader(self): + dataFormat = '<8s4sII44x' + with open(self.path, 'rb') as f: + ( + magic, + version, + self.size, + self.timestampMultiplier + + ) = struct.unpack(dataFormat, f.read(struct.calcsize(dataFormat))) + + self.frames = [] + last = None + for x in range(self.size + 1): + fdata = {} + ( + fdata['timestamp'], + fdata['offset'] + ) = struct.unpack(''.format(self.ID, self.IP) + + def __ne__(self, other): + return not self.__eq__(other) + + def __eq__(self, other): + if not isinstance(other, TabloDevice): + return False + return self.ID == other.ID or self.IP == other.IP + + def processDate(self, date): + if not date: + return None + + try: + format = '%Y-%m-%d %H:%M:%S.%f' + return datetime.datetime.strptime(date[:-6], format) + except Exception: + traceback.print_exc() + return None + + def address(self): + return '{0}:{1}'.format(self.IP, self.port) + + def valid(self): + return True + + def check(self): + if not self.name: + self.updateInfoFromDevice() + + def updateInfoFromDevice(self): + try: + url = 'http://{0}/server/info'.format(self.address()) + data = requests.get(url).json() + except Exception: + traceback.print_exc() + return + + self.name = data['name'] + self.version = data['version'] + self.ID = self.ID or data.get('server_id') + self.modelType = data.get('model.type') + + def dump_info(self): + attrs = vars(self) + print(f'DEVICE INFO for {self.ID}') + print_dict(attrs) + + @property + def displayName(self): + return self.name or self.host + + +class Devices(object): + MAX_AGE = 3600 + + def __init__(self, discover=True): + if discover: + self.reDiscover() + else: + self._discoveryTimestamp = time.time() + self.tablos = [] + + def __contains__(self, device): + for d in self.tablos: + if d == device: + return True + return False + + def reDiscover(self): + self._discoveryTimestamp = time.time() + self.tablos = [] + self.discover() + if self.tablos: + logger.debug('Device(s) found via local discovery') + else: + msg = 'No devices found via local discovery ' \ + '- trying association server' + logger.debug(msg) + self.associationServerDiscover() + + def discover(self, device=None): + from . import netif + ifaces = netif.getInterfaces() + sockets = [] + + for i in ifaces: + if not i.broadcast: + continue + # if i.ip.startswith('127.'): continue + logger.debug("if: " + i.name) + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.settimeout(0.01) # 10ms + s.bind((i.ip, 0)) + s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sockets.append((s, i)) + packet = struct.pack('>4s', 'BnGr'.encode()) + + logger.debug( + ' o-> Broadcast Packet({0})'.format(binascii.hexlify(packet)) + ) + + for attempt in (0, 1): + for s, i in sockets: + logger.debug( + ' o-> Broadcasting to {0}: {1}'. + format(i.name, i.broadcast)) + try: + s.sendto(packet, (i.broadcast, DEVICE_DISCOVERY_PORT)) + except Exception: + logger.exception("Unable to send packets.") + + end = time.time() + 0.25 # 250ms + + # Create reply socket + rs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + rs.settimeout(0.01) # 10ms + rs.bind(('', DEVICE_REPLY_PORT)) + rs.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + + while time.time() < end: + try: + message, address = rs.recvfrom(8096) + + added = self.add(message, address) + + if added: + logger.debug( + '<-o Response Packet({0})'. + format(binascii.hexlify(message)) + ) + elif added is False: + logger.debug( + '<-o Response Packet(Duplicate)' + ) + elif added is None: + logger.debug( + '<-o INVALID RESPONSE({0})'. + format(binascii.hexlify(message)) + ) + except socket.timeout: + pass + except Exception: + traceback.print_exc() + + for d in self.tablos: + d.check() + + def createDevice(self, packet, address): + data = {} + + v = struct.unpack('>4s64s32s20s10s10s', packet) + + # key = v[0] + data['host'] = truncZero(v[1]) + data['private_ip'] = truncZero(v[2]) + data['serverid'] = truncZero(v[3]) + typ = truncZero(v[4]) + data['board_type'] = truncZero(v[5]) + + if not typ == 'tablo': + return None + + return TabloDevice(data) + + def add(self, packet, address): + device = self.createDevice(packet, address) + + if not device or not device.valid: + return None + elif device in self: + return False + self.tablos.append(device) + + return True + + def associationServerDiscover(self): + r = requests.get(ASSOCIATION_SERVER_DISCOVERY_URL) + try: + data = r.json() + if not data.get('success'): + return False + deviceData = data.get('cpes') + except Exception: + traceback.print_exc() + return False + + self.tablos = [TabloDevice(d) for d in deviceData] diff --git a/tablo/endpoint.py b/tablo/endpoint.py new file mode 100644 index 0000000..cb707c0 --- /dev/null +++ b/tablo/endpoint.py @@ -0,0 +1,123 @@ +# coding=utf-8 +""" +something about how Endpoint makes connections happen +""" +import json +import requests +from .apiexception import APIError +from .util import logger + +# TODO - not this ua!! +USER_AGENT = 'Tablo-Kodi/0.1' + + +def request_handler(f): + def wrapper(*args, **kwargs): + r = f(*args, **kwargs) + if not r.ok: + e = APIError('{0}: {1}'.format( + r.status_code, '/' + + r.url.split('://', 1)[-1].split('/', 1)[-1]), + code=r.status_code + ) + try: + edata = r.json() + if isinstance(edata, dict): + e.message = edata.get('error', edata) + else: + e.message = edata + except Exception: + pass + raise e + + try: + return r.json() + except (ValueError, TypeError): + return r.text + + return wrapper + + +class Endpoint(object): + cache = {} + + def __init__(self, segments=None): + self.device = None + self.segments = segments or [] + + def __getattr__(self, name): + e = Endpoint(self.segments + [name.strip('_')]) + e.device = self.device + return e + + def __call__(self, method=None): + """ method is NOT the http method """ + logger.debug(f'Endpoint "Method": {method}') + return self.__getattr__(str(method).lstrip('/')) + + def __get_uri(self): + if self.device is None: + msg = "No device selected." + logger.error(msg) + raise APIError(msg) + + uri = 'http://{0}/{1}'.format( + self.device.address(), '/'.join(self.segments) + ) + logger.debug(f'URI: {uri}') + return uri + + def dump_info(self): + attrs = vars(self) + print(f'ENDPOINT INFO for {self.device}') + print(', '.join("%s: %s" % item for item in attrs.items())) + + @request_handler + def get(self, **kwargs): + return requests.get( + self.__get_uri(), + headers={'User-Agent': USER_AGENT}, + params=kwargs + ) + + @request_handler + def getCached(self, **kwargs): + path = '/'.join(self.segments) + + if path not in self.cache.keys(): + self.cache[path] = requests.get( + 'http://{0}/{1}'.format(self.device.address(), path), + headers={'User-Agent': USER_AGENT}, + params=kwargs + ) + + return self.cache[path] + + @request_handler + def post(self, *args, **kwargs): + return requests.post( + 'http://{0}/{1}'.format( + self.device.address(), '/'.join(self.segments) + ), + headers={'User-Agent': USER_AGENT}, + data=json.dumps(args and args[0] or kwargs) + ) + + @request_handler + def patch(self, **kwargs): + return requests.patch( + 'http://{0}/{1}'.format( + self.device.address(), '/'.join(self.segments) + ), + headers={'User-Agent': USER_AGENT}, + data=json.dumps(kwargs) + ) + + @request_handler + def delete(self, **kwargs): + return requests.delete( + 'http://{0}/{1}'.format( + self.device.address(), '/'.join(self.segments) + ), + headers={'User-Agent': USER_AGENT} + ) diff --git a/tablo/entities/__init__.py b/tablo/entities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tablo/entities/airing.py b/tablo/entities/airing.py new file mode 100644 index 0000000..106a276 --- /dev/null +++ b/tablo/entities/airing.py @@ -0,0 +1,262 @@ +# coding=utf-8 +""" +Airing class +""" +from pprint import pformat +from tablo import compat +from tablo.api import Api +from tablo.watch import Watch +from tablo.util import processDate +from tablo.util import logger +from .show import Show +from .channel import Channel + + +class Airing(object): + def __init__(self, data, type=None): + super(Airing, self).__init__() + self.data = data + + self.object_id = data.get('id') + self.path = data.get('path') + self.scheduleData = data.get('schedule') + self.qualifiers = data.get('qualifiers') + + self._show = None + self._background = None + self._thumb = None + self._channel = None + self._datetime = False + self._datetimeEnd = None + self._gridAiring = None + self.deleted = False + self.setType(type) + + def dump_info(self): + # output = f'SHOW PATH: {self.showPath}\n' + output = '' + for k, v in self.data.items(): + output += f'{k}: ' + pformat(v) + "\n" + logger.debug(output) + + def setType(self, type_=None): + if type_: + self.type = type_ + elif 'series' in self.path: + self.type = 'episode' + elif 'movies' in self.path: + self.type = 'movie' + elif 'sports' in self.path: + self.type = 'event' + elif 'programs' in self.path: + self.type = 'program' + + @property + def showPath(self): + if self.type == 'episode': + return self.data['series_path'] + elif self.type == 'movie': + return self.data['movie_path'] + elif self.type == 'event': + return self.data['sport_path'] + elif self.type == 'program': + return self.data['program_path'] + + def show(self): + if not self._show: + info = Api(self.showPath).get() + self._show = Show.newFromData(info) + return self._show + + def __getattr__(self, name): + try: + logger.debug(f"getattr name = {name}") + return self.data[self.type].get(name) + except KeyError: + return None + + def watch(self): + if 'recording' in self.path: + return Watch(self.Api, self.path) + else: + return Watch(self.Api, self.data['airing_details']['channel_path']) + + @property + def background(self): + if not self._background: + image = self.background_image and \ + Api.images(self.background_image['image_id']) + self._background = image or '' + return self._background + + @property + def thumb(self): + if not self._thumb: + image = self.thumbnail_image and \ + Api.images(self.thumbnail_image['image_id']) + self._thumb = image or '' + return self._thumb + + @property + def snapshot(self): + if not self.data.get('snapshot_image'): + return '' + + return self.Api.images(self.data['snapshot_image']['image_id']) + + @property + def duration(self): + return self.data['airing_details']['duration'] + + @property + def channel(self): + channelPath = self.data['airing_details']['channel_path'] + if not self._channel: + self._channel = Channel(Api(channelPath).getCached()) + return self._channel + + @property + def scheduled(self): + return self.scheduleData['state'] == 'scheduled' + + @property + def conflicted(self): + return self.scheduleData['state'] == 'conflict' + + def schedule(self, on=True): + airing = self.Api(self.path).patch(scheduled=on) + self.scheduleData = airing.get('schedule') + return self.scheduleData + + @property + def datetime(self): + if self._datetime is False: + self._datetime = processDate( + self.data['airing_details'].get('datetime') + ) + + return self._datetime + + @property + def datetimeEnd(self): + if self._datetimeEnd is None: + if self.datetime: + self._datetimeEnd = self.datetime + \ + compat.datetime.timedelta(seconds=self.duration) + else: + self._datetimeEnd = 0 + + return self._datetimeEnd + + def displayTimeStart(self): + if not self.datetime: + return '' + + return self.datetime.strftime('%I:%M %p').lstrip('0') + + def displayTimeEnd(self): + if not self.datetime: + return '' + + return self.datetimeEnd.strftime('%I:%M %p').lstrip('0') + + def displayDay(self): + if not self.datetime: + return '' + + return self.datetime.strftime('%A, %B {0}').format(self.datetime.day) + + def displayChannel(self): + channel = self.channel + return '{0}-{1}'.format( + channel.data['channel']['major'], + channel.data['channel']['minor'] + ) + + def secondsToEnd(self, start=None): + start = start or now() + return compat.timedelta_total_seconds(self.datetimeEnd - start) + + def secondsToStart(self): + return compat.timedelta_total_seconds(self.datetime - now()) + + def secondsSinceEnd(self): + return compat.timedelta_total_seconds(now() - self.datetimeEnd) + + def airingNow(self, ref=None): + ref = ref or now() + return self.datetime <= ref < self.datetimeEnd + + def ended(self): + return self.datetimeEnd < now() + + @property + def network(self): + return self.channel.data['channel'].get('network') or '' + + # For recordings + def delete(self): + self.deleted = True + return Api(self.path).delete() + + def recording(self): + return self.data['video_details']['state'] == 'recording' + + @property + def watched(self): + return bool(self.data['user_info'].get('watched')) + + def markWatched(self, watched=True): + recording = self.Api(self.path).patch(watched=watched) + self.data['user_info'] = recording.get('user_info') + return self.data['user_info'] + + @property + def protected(self): + return bool(self.data['user_info'].get('protected')) + + def markProtected(self, protected=True): + recording = self.Api(self.path).patch(protected=protected) + self.data['user_info'] = recording.get('user_info') + return self.data['user_info'] + + @property + def position(self): + return self.data['user_info'].get('position') + + def setPosition(self, position=0): + recording = self.Api(self.path).patch(position=int(position)) + self.data['user_info'] = recording.get('user_info') + self.data['video_details'] = recording.get('video_details') + return self.data['user_info'] + + +class GridAiring(Airing): + def setType(self, type_): + if 'series' in self.data: + self.type = 'series' + elif 'movie' in self.data: + self.type = 'movie' + elif 'sport' in self.data: + self.type = 'sport' + elif 'program' in self.data: + self.type = 'program' + + @property + def gridAiring(self): + if not self._gridAiring: + data = Api(self.path).get() + if 'episode' in data: + self._gridAiring = Airing(data, 'episode') + elif 'movie_airing' in data: + self._gridAiring = Airing(data, 'movie') + elif 'event' in data: + self._gridAiring = Airing(data, 'event') + elif 'program' in data: + self._gridAiring = Airing(data, 'program') + + return self._gridAiring + + def schedule(self, on=True): + self.scheduleData = self.gridAiring.schedule(on) + return self.scheduleData diff --git a/tablo/entities/channel.py b/tablo/entities/channel.py new file mode 100644 index 0000000..4209480 --- /dev/null +++ b/tablo/entities/channel.py @@ -0,0 +1,14 @@ +# coding=utf-8 +""" +Channel class +""" + + +class Channel(object): + def __init__(self, data): + self.path = data['path'] + self.object_id = data['object_id'] + self.data = data + + def __getattr__(self, name): + return self.data['channel'].get(name) diff --git a/tablo/entities/movie.py b/tablo/entities/movie.py new file mode 100644 index 0000000..76fcbed --- /dev/null +++ b/tablo/entities/movie.py @@ -0,0 +1,13 @@ +# coding=utf-8 +""" +Show -> Movie class +""" +from .show import Show + + +class Movie(Show): + type = 'MOVIE' + airingType = 'schedule' + + def processData(self, data): + self.data = data['movie'] diff --git a/tablo/entities/program.py b/tablo/entities/program.py new file mode 100644 index 0000000..44b56ec --- /dev/null +++ b/tablo/entities/program.py @@ -0,0 +1,13 @@ +# coding=utf-8 +""" +Show -> Program class +""" +from .show import Show + + +class Program(Show): + type = 'PROGRAM' + airingType = 'airing' + + def processData(self, data): + self.data = data['program'] diff --git a/tablo/entities/series.py b/tablo/entities/series.py new file mode 100644 index 0000000..9bb54e5 --- /dev/null +++ b/tablo/entities/series.py @@ -0,0 +1,29 @@ +# coding=utf-8 +""" +Show -> Series class +""" +from .show import Show +from tablo.util import logger +from tablo.api import Api +from tablo.apiexception import APIError + + +class Series(Show): + type = 'SERIES' + airingType = 'episode' + + def processData(self, data): + self.data = data['series'] + + def episodes(self): + return Api(self.path).episodes.get() + + def seasons(self): + try: + return Api(self.path).seasons.get() + except APIError as e: + logger.error(f'Series.seasons() failed: {format(e.message)}') + return [] + + def _airings(self): + return self.episodes() diff --git a/tablo/entities/show.py b/tablo/entities/show.py new file mode 100644 index 0000000..eb0461e --- /dev/null +++ b/tablo/entities/show.py @@ -0,0 +1,114 @@ +# coding=utf-8 +""" +Base show class +""" +from tablo.api import Api +from tablo.util import logger + +from tablo.apiexception import APIError + + +class Show(object): + + type = None + + def __init__(self, data): + self.data = None + self.object_id = data['object_id'] + self.path = data['path'] + self.show_counts = data['show_counts'] + + self._thumb = '' + self._thumbHasTitle = None + self._background = '' + + self.scheduleRule = \ + data.get('schedule_rule') != 'none' and \ + data.get('schedule_rule') or None + self.showCounts = data.get('show_counts') + # see the "subclasses" for what each does + self.processData(data) + + def __getattr__(self, name): + return self.data.get(name) + + def dump_info(self): + logger.debug('\t'. + join("%s: %s\n" % item for item in self.data.items())) + + def update(self): + self.__init__(Api(self.path).get()) + + @classmethod + def newFromData(self, data): + """ These imports are a dirty hack that I'm not sure how to do + better without putting them all (back) in one file + """ + if 'series' in data: + from .series import Series + return Series(data) + elif 'movie' in data: + from .movie import Movie + return Movie(data) + elif 'sport' in data: + from .sport import Sport + return Sport(data) + elif 'program' in data: + from .program import Program + return Program(data) + + def processData(self, data): + pass + + @property + def thumb(self): + if not self._thumb: + try: + if self.data.get('thumbnail_image'): + self._thumb = \ + Api.images(self.data['thumbnail_image']['image_id']) + self._thumbHasTitle = \ + self.data['thumbnail_image']['has_title'] + except Exception: + logger.debug(f"thumb img: {self.data['thumbnail_image']}") + self._thumbHasTitle = False + return self._thumb + + @property + def thumbHasTitle(self): + if self._thumbHasTitle is None: + self.thumb + + return self._thumbHasTitle + + @property + def background(self): + if not self._background: + self._background = \ + self.data.get('background_image') and \ + Api.images(self.data['background_image']['image_id']) \ + or '' + return self._background + + def schedule(self, rule='none'): + data = Api(self.path).patch(schedule=rule) + self.scheduleRule = \ + data.get('schedule_rule') != 'none' and \ + data.get('schedule_rule') \ + or None + + def _airings(self): + return Api(self.path).airings.get() + + def airings(self): + try: + return self._airings() + except APIError as e: + logger.error('Show.airings() failed: {0}', format(e.message)) + return [] + + def deleteAll(self, delete_protected=False): + if delete_protected: + return Api(self.path)('delete').post() + else: + return Api(self.path)('delete').post(filter='unprotected') diff --git a/tablo/entities/sport.py b/tablo/entities/sport.py new file mode 100644 index 0000000..85451f5 --- /dev/null +++ b/tablo/entities/sport.py @@ -0,0 +1,26 @@ +# coding=utf-8 +""" +Show -> Sport class +""" +from .show import Show +from tablo.util import logger +from tablo.api import Api +from tablo.apiexception import APIError + + +class Sport(Show): + type = 'SPORT' + airingType = 'event' + + def processData(self, data): + self.data = data['sport'] + + def events(self): + try: + return Api(self.path).events.get() + except APIError as e: + logger.error(f'Sport.events() failed: {format(e.message)}') + return [] + + def _airings(self): + return self.events() diff --git a/tablo/grid.py b/tablo/grid.py new file mode 100644 index 0000000..53c4a2c --- /dev/null +++ b/tablo/grid.py @@ -0,0 +1,225 @@ +import os +import json +import time +import datetime + +import tablo + +from lib import backgroundthread +from .util import logger + +SAVE_VERSION = 1 +INTERVAL_HOURS = 2 +INTERVAL_TIMEDELTA = datetime.timedelta(hours=INTERVAL_HOURS) + +PENDING_UPDATE = {} + + +def addPending(path=None, airing=None): + path = path or airing.data['airing_details']['channel_path'] + PENDING_UPDATE[path] = 1 + + +class ChannelTask(backgroundthread.Task): + def setup(self, channel, callback): + self.path = channel.path + self.channel = channel + self.callback = callback + + def run(self): + n = tablo.api.UTCNow() + start = n - tablo.compat.datetime.timedelta( + minutes=n.minute % 30, + seconds=n.second, + microseconds=n.microsecond + ) + sec_in_day = 86400 + + # This is weird. 5400 = 90 minutes. + # So currently below it's 1 day + 3 hrs (90 min * 2) + interval_secs = 5400 + data = tablo.API.views.livetv.channels(self.channel.object_id).get( + start=start.strftime('%Y-%m-%dT%H:%MZ'), + duration=sec_in_day + (interval_secs * INTERVAL_HOURS) + ) + if self.isCanceled(): + return + + logger.debug('Retrieved channel: {0}'.format(self.channel.object_id)) + + self.callback(self.path, data) + + +class Grid(object): + def __init__(self, work_path, update_callback): + self.channels = {} + self.paths = [] + self._airings = {} + self._hasData = {} + self.updateCallback = update_callback + self._tasks = [] + self.workPath = os.path.join(work_path, 'grid') + self.oldestUpdate = datetime.datetime.now() + self.pendingUpdate = {} + self.initSave() + + def initSave(self): + if not os.path.exists(self.workPath): + os.makedirs(self.workPath) + + def saveVersion(self): + data = {'version': SAVE_VERSION} + with open(os.path.join(self.workPath, 'version'), 'w') as f: + json.dump(data, f) + + def saveChannelData(self, data): + with open(os.path.join(self.workPath, 'channels.data'), 'w') as f: + json.dump(data, f) + + def saveChannelAiringData(self, channel, data): + file = str(channel.object_id) + '.air' + with open(os.path.join(self.workPath, file, 'w')) as f: + updated = time.mktime(datetime.datetime.now().timetuple()) + json.dump({'updated': updated, 'data': data}, f) + + def updateChannelAiringData(self, channel=None, path=None): + channel = channel or self.channels[path] + path = str(channel.object_id) + '.air' + + # os.remove(os.path.join( + # self.workPath, str(channel.object_id) + '.air')) + with open(os.path.join(self.workPath, path, 'r')) as f: + data = json.load(f) + data['updated'] = 0 + with open(os.path.join(self.workPath, path, 'w')) as f: + json.dump(data, f) + self.getChannelData(channel) + + def loadVersion(self): + path = os.path.join(self.workPath, 'version') + if not os.path.exists(path): + return None + + with open(path, 'r') as f: + data = json.load(f) + return data + + def loadChannels(self): + self.cancelTasks() + # path = os.path.join(self.workPath, 'channels.data') + # if not os.path.exists(path): + # return False + + # logger.debug('Loading saved grid data...') + # with open(path, 'r') as f: + # self.channels = json.load(f) + logger.debug('Loading grid data...') + + self.channels = {} + channels = tablo.API.batch.post(self.paths) + + for path in channels: + channel = tablo.Channel(channels[path]) + self.channels[path] = channel + + if path not in self._airings: + self._airings[path] = [] + + self.updateCallback(channel) + + self.loadAirings(channel) + + for path in channels: + self.getChannelData(self.channels[path]) + + logger.debug('Loading of grid data done.') + + return True + + def loadAirings(self, channel): + path = os.path.join(self.workPath, str(channel.object_id) + '.air') + if not os.path.exists(path): + self._airings[channel.path] = [] + return False + + with open(path, 'r') as f: + data = json.load(f) + + ret = True + + updated = datetime.datetime.fromtimestamp(int(data['updated'])) + age = (datetime.datetime.now() - updated) + if age > INTERVAL_TIMEDELTA: + ret = False + + if updated < self.oldestUpdate: + self.oldestUpdate = updated + + self._airings[channel.path] = \ + [tablo.GridAiring(a) for a in data['data']] + + if self._airings[channel.path]: + self.updateCallback(channel) + return ret + + return False + + def getChannelData(self, channel=None, path=None): + channel = channel or self.channels[path] + t = ChannelTask() + self._tasks.append(t) + t.setup(channel, self.channelDataCallback) + + backgroundthread.BGThreader.addTask(t) + + def channelDataCallback(self, path, data): + self._hasData[path] = data and True or False + + # This only works HERE - before we convert the airing data + self.saveChannelAiringData(self.channels[path], data) + + self._airings[path] = [tablo.GridAiring(a) for a in data] + + self.updateCallback(self.channels[path]) + + def cancelTasks(self): + if not self._tasks: + return + + logger.debug('Canceling {0} tasks (GRID)'.format(len(self._tasks))) + for t in self._tasks: + t.cancel() + self._tasks = [] + + def getChannels(self, paths=None): + self.paths = paths or tablo.API.guide.channels.get() + + self.loadChannels() + + self.saveVersion() + + def triggerUpdates(self): + for path in self.channels: + self.updateCallback(self.channels[path]) + + def airings(self, start, cutoff, channel_path=None, channel=None): + channel = channel or self.channels[channel_path] + return [a for a in self._airings[channel.path] + if a.datetimeEnd > start and a.datetime < cutoff] + + def hasNoData(self, path): + return self._hasData.get(path) is False + + def getAiring(self, path): + for c in self._airings.values(): + for a in c: + if a.path == path: + return a + return None + + def updatePending(self): + global PENDING_UPDATE + + for p in PENDING_UPDATE.keys(): + self.getChannelData(path=p) + PENDING_UPDATE = {} diff --git a/tablo/netif/__init__.py b/tablo/netif/__init__.py new file mode 100644 index 0000000..ee051eb --- /dev/null +++ b/tablo/netif/__init__.py @@ -0,0 +1,211 @@ +import socket +import struct +import array + + +class Interface: + def __init__(self): + self.name = '' + self.ip = '' + self.mask = '' + + @property + def broadcast(self): + if self.name == 'FALLBACK': + return '' + if not self.ip or not self.mask: + return None + return calcBroadcast(self.ip, self.mask) + + +def getInterfaces(): + try: + return _getInterfaces() + except: + pass + + try: + return _getInterfacesBSD() + except: + pass + + try: + return _getInterfacesWin() + except: + pass + + i = Interface() + i.name = 'FALLBACK' + return [i] + + +def _getInterfaces(): + vals = all_interfaces() + interfaces = [] + for name, ip in vals: + i = Interface() + i.name = name + i.ip = ip + try: + mask = getSubnetMask(i.name) + i.mask = mask + except: + i.mask = '' + interfaces.append(i) + return interfaces + + +def _getInterfacesBSD(): + # name flags family address netmask + interfaces = [] + import getifaddrs + for info in getifaddrs.getifaddrs(): + if info.family == 2: + i = Interface() + i.name = info.name + i.ip = info.address + i.mask = info.netmask + interfaces.append(i) + return interfaces + + +def _getInterfacesWin(): + import ipconfig + interfaces = [] + adapters = ipconfig.parse() + for a in adapters: + if not 'IPv4 Address' in a: + continue + if not 'Subnet Mask' in a: + continue + i = Interface() + i.name = a.get('name', 'UNKNOWN') + i.ip = a['IPv4 Address'] + i.mask = a['Subnet Mask'] + interfaces.append(i) + return interfaces + + +def all_interfaces(): + import sys + #import array + import fcntl + + is_64bits = sys.maxsize > 2**32 + + struct_size = 40 if is_64bits else 32 + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + max_possible = 8 # initial value + + while True: + bytes = max_possible * struct_size + names = array.array('B', ('\0' * bytes).encode()) + outbytes = struct.unpack('iL', fcntl.ioctl( + s.fileno(), + 0x8912, # SIOCGIFCONF + struct.pack('iL', bytes, names.buffer_info()[0]) + ))[0] + if outbytes == bytes: + max_possible *= 2 + else: + break + + namestr = names.tostring() + + return [(namestr[i:i+16].decode().split('\0', 1)[0], + socket.inet_ntoa(namestr[i+20:i+24])) + for i in range(0, outbytes, struct_size)] + + +def getSubnetMask(name): + import fcntl + return socket.inet_ntoa(fcntl.ioctl(socket.socket(socket.AF_INET, socket.SOCK_DGRAM), 35099, struct.pack('256s', name))[20:24]) + + +def calcIPValue(ipaddr): + """ + Calculates the binary + value of the ip addresse + """ + ipaddr = ipaddr.split('.') + value = 0 + for i in range(len(ipaddr)): + value = value | (int(ipaddr[i]) << (8*(3-i))) + return value + + +def calcIPNotation(value): + """ + Calculates the notation + of the ip addresse given its value + """ + notat = [] + for i in range(4): + shift = 255 << (8*(3-i)) + part = value & shift + part = part >> (8*(3-i)) + notat.append(str(part)) + notat = '.'.join(notat) + return notat + + +def calcSubnet(cidr): + """ + Calculates the Subnet + based on the CIDR + """ + subn = 4294967295 << (32-cidr) # 4294967295 = all bits set to 1 + subn = subn % 4294967296 # round it back to be 4 bytes + subn = calcIPNotation(subn) + return subn + + +def calcCIDR(subnet): + """ + Calculates the CIDR + based on the SUbnet + """ + cidr = 0 + subnet = calcIPValue(subnet) + while subnet != 0: + subnet = subnet << 1 + subnet = subnet % 4294967296 + cidr += 1 + return cidr + + +def calcNetpart(ipaddr, subnet): + ipaddr = calcIPValue(ipaddr) + subnet = calcIPValue(subnet) + netpart = ipaddr & subnet + netpart = calcIPNotation(netpart) + return netpart + + +def calcMacpart(subnet): + macpart = ~calcIPValue(subnet) + macpart = calcIPNotation(macpart) + return macpart + + +def calcBroadcast(ipaddr, subnet): + netpart = calcNetpart(ipaddr, subnet) + macpart = calcMacpart(subnet) + netpart = calcIPValue(netpart) + macpart = calcIPValue(macpart) + broadcast = netpart | macpart + broadcast = calcIPNotation(broadcast) + return broadcast + + +def calcDefaultGate(ipaddr, subnet): + defaultgw = calcNetpart(ipaddr, subnet) + defaultgw = calcIPValue(defaultgw) + 1 + defaultgw = calcIPNotation(defaultgw) + return defaultgw + + +def calcHostNum(subnet): + macpart = calcMacpart(subnet) + hostnum = calcIPValue(macpart) - 1 + return hostnum diff --git a/tablo/netif/getifaddrs.py b/tablo/netif/getifaddrs.py new file mode 100644 index 0000000..96d52bd --- /dev/null +++ b/tablo/netif/getifaddrs.py @@ -0,0 +1,196 @@ +""" +Wrapper for getifaddrs(3). +""" + +import socket +import sys + +from collections import namedtuple +from ctypes import * + + +class sockaddr_in(Structure): + _fields_ = [ + ('sin_len', c_uint8), + ('sin_family', c_uint8), + ('sin_port', c_uint16), + ('sin_addr', c_uint8 * 4), + ('sin_zero', c_uint8 * 8) + ] + + def __str__(self): + try: + assert self.sin_len >= sizeof(sockaddr_in) + data = ''.join(map(chr, self.sin_addr)) + return socket.inet_ntop(socket.AF_INET, data) + except: + return '' + + +class sockaddr_in6(Structure): + _fields_ = [ + ('sin6_len', c_uint8), + ('sin6_family', c_uint8), + ('sin6_port', c_uint16), + ('sin6_flowinfo', c_uint32), + ('sin6_addr', c_uint8 * 16), + ('sin6_scope_id', c_uint32) + ] + + def __str__(self): + try: + assert self.sin6_len >= sizeof(sockaddr_in6) + data = ''.join(map(chr, self.sin6_addr)) + return socket.inet_ntop(socket.AF_INET6, data) + except: + return '' + + +class sockaddr_dl(Structure): + _fields_ = [ + ('sdl_len', c_uint8), + ('sdl_family', c_uint8), + ('sdl_index', c_short), + ('sdl_type', c_uint8), + ('sdl_nlen', c_uint8), + ('sdl_alen', c_uint8), + ('sdl_slen', c_uint8), + ('sdl_data', c_uint8 * 12) + ] + + def __str__(self): + assert self.sdl_len >= sizeof(sockaddr_dl) + addrdata = self.sdl_data[self.sdl_nlen:self.sdl_nlen+self.sdl_alen] + return ':'.join('%02x' % x for x in addrdata) + + +class sockaddr_storage(Structure): + _fields_ = [ + ('sa_len', c_uint8), + ('sa_family', c_uint8), + ('sa_data', c_uint8 * 254) + ] + + +class sockaddr(Union): + _anonymous_ = ('sa_storage', ) + _fields_ = [ + ('sa_storage', sockaddr_storage), + ('sa_sin', sockaddr_in), + ('sa_sin6', sockaddr_in6), + ('sa_sdl', sockaddr_dl), + ] + + def family(self): + return self.sa_storage.sa_family + + def __str__(self): + family = self.family() + if family == socket.AF_INET: + return str(self.sa_sin) + elif family == socket.AF_INET6: + return str(self.sa_sin6) + elif family == 18: # AF_LINK + return str(self.sa_sdl) + else: + print family + raise NotImplementedError, "address family %d not supported" % family + + +class ifaddrs(Structure): + pass + + +ifaddrs._fields_ = [ + ('ifa_next', POINTER(ifaddrs)), + ('ifa_name', c_char_p), + ('ifa_flags', c_uint), + ('ifa_addr', POINTER(sockaddr)), + ('ifa_netmask', POINTER(sockaddr)), + ('ifa_dstaddr', POINTER(sockaddr)), + ('ifa_data', c_void_p) +] + +# Define constants for the most useful interface flags (from if.h). +IFF_UP = 0x0001 +IFF_BROADCAST = 0x0002 +IFF_LOOPBACK = 0x0008 +IFF_POINTTOPOINT = 0x0010 +IFF_RUNNING = 0x0040 +if sys.platform == 'darwin' or 'bsd' in sys.platform: + IFF_MULTICAST = 0x8000 +elif sys.platform == 'linux': + IFF_MULTICAST = 0x1000 + +# Load library implementing getifaddrs and freeifaddrs. +if sys.platform == 'darwin': + libc = cdll.LoadLibrary('libc.dylib') +else: + libc = cdll.LoadLibrary('libc.so') + +# Tell ctypes the argument and return types for the getifaddrs and +# freeifaddrs functions so it can do marshalling for us. +libc.getifaddrs.argtypes = [POINTER(POINTER(ifaddrs))] +libc.getifaddrs.restype = c_int +libc.freeifaddrs.argtypes = [POINTER(ifaddrs)] + + +def getifaddrs(): + """ + Get local interface addresses. + + Returns generator of tuples consisting of interface name, interface flags, + address family (e.g. socket.AF_INET, socket.AF_INET6), address, and netmask. + The tuple members can also be accessed via the names 'name', 'flags', + 'family', 'address', and 'netmask', respectively. + """ + # Get address information for each interface. + addrlist = POINTER(ifaddrs)() + if libc.getifaddrs(pointer(addrlist)) < 0: + raise OSError + + X = namedtuple('ifaddrs', 'name flags family address netmask') + + # Iterate through the address information. + ifaddr = addrlist + while ifaddr and ifaddr.contents: + # The following is a hack to workaround a bug in FreeBSD + # (PR kern/152036) and MacOSX wherein the netmask's sockaddr may be + # truncated. Specifically, AF_INET netmasks may have their sin_addr + # member truncated to the minimum number of bytes necessary to + # represent the netmask. For example, a sockaddr_in with the netmask + # 255.255.254.0 may be truncated to 7 bytes (rather than the normal + # 16) such that the sin_addr field only contains 0xff, 0xff, 0xfe. + # All bytes beyond sa_len bytes are assumed to be zero. Here we work + # around this truncation by copying the netmask's sockaddr into a + # zero-filled buffer. + if ifaddr.contents.ifa_netmask: + netmask = sockaddr() + memmove(byref(netmask), ifaddr.contents.ifa_netmask, + ifaddr.contents.ifa_netmask.contents.sa_len) + if netmask.sa_family == socket.AF_INET and netmask.sa_len < sizeof(sockaddr_in): + netmask.sa_len = sizeof(sockaddr_in) + else: + netmask = None + + try: + yield X(ifaddr.contents.ifa_name, + ifaddr.contents.ifa_flags, + ifaddr.contents.ifa_addr.contents.family(), + str(ifaddr.contents.ifa_addr.contents), + str(netmask) if netmask else None) + except NotImplementedError: + # Unsupported address family. + yield X(ifaddr.contents.ifa_name, + ifaddr.contents.ifa_flags, + None, + None, + None) + ifaddr = ifaddr.contents.ifa_next + + # When we are done with the address list, ask libc to free whatever memory + # it allocated for the list. + libc.freeifaddrs(addrlist) + + +__all__ = ['getifaddrs'] + [n for n in dir() if n.startswith('IFF_')] diff --git a/tablo/netif/ipconfig.py b/tablo/netif/ipconfig.py new file mode 100644 index 0000000..4604bfa --- /dev/null +++ b/tablo/netif/ipconfig.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- + +import subprocess + + +def parse(data=None): + data = data or subprocess.check_output( + 'ipconfig /all', startupinfo=getStartupInfo()) + dlist = [d.rstrip() for d in data.split('\n')] + mode = None + sections = [] + while dlist: + d = dlist.pop(0) + if not d: + if mode == 'HEADING': + mode = 'DATA' + else: + mode = 'HEADING' + continue + elif mode == 'HEADING': + sections.append({'name': d.strip('.: ')}) + elif mode == 'DATA': + if d.endswith(':'): + k = d.strip(':. ') + mode = 'VALUE:' + k + sections[-1][k] = '' + else: + k, v = d.split(':', 1) + k = k.strip(':. ') + mode = 'VALUE:' + k + v = v.replace('(Preferred)', '') + sections[-1][k] = v.strip() + elif mode and mode.startswith('VALUE:'): + if not d.startswith(' '): + mode = 'DATA' + dlist.insert(0, d) + continue + k = mode.split(':', 1)[-1] + v = d.replace('(Preferred)', '') + sections[-1][k] += ',' + v.strip() + return sections[1:] + + +def getStartupInfo(): + if hasattr(subprocess, 'STARTUPINFO'): # Windows + startupinfo = subprocess.STARTUPINFO() + try: + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW # Suppress terminal window + except: + startupinfo.dwFlags |= 1 + return startupinfo + + return None diff --git a/tablo/util.py b/tablo/util.py new file mode 100644 index 0000000..533da69 --- /dev/null +++ b/tablo/util.py @@ -0,0 +1,60 @@ +import logging +import pytz +import traceback + +from tablo import compat +# from tablo.api import Api + +# TODO: remove this completely! +LOG_LEVEL = logging.DEBUG +logger = logging.getLogger(__name__) + + + + +def UTCNow(): + return compat.datetime.datetime.now(pytz.timezone('UTC')) + + +# TODO: move processDate elsewher (tablo.api?) +def processDate(date, format_='%Y-%m-%dT%H:%M'): + if not date: + return None + + try: + from tablo.api import Api + return Api.timezone.fromutc( + compat.datetime.datetime.strptime(date.rsplit('Z', 1)[0], format_) + ) + except Exception: + traceback.print_exc() + + return None + + +def debug_log(msg): + import inspect + from os import path + + func = inspect.currentframe().f_back.f_code + filename = path.basename(func.co_filename) + message = f'{filename}:{func.co_name}:{func.co_firstlineno}\n{msg}' + # Dump the message + the name of this function to the log. + logger.debug(message) + + +def dump_info(obj): + attrs = vars(obj) + logger.info(f'OBJECT str: {obj}') + logger.info(', '.join("%s: %s" % item for item in attrs.items())) + + +def print_dict(dictionary, prefix='\t', braces=1): + """ Recursively prints nested dictionaries.""" + + for key, value in dictionary.items(): + if isinstance(value, dict): + print('%s%s%s%s' % (prefix, braces * '[', key, braces * ']')) + print_dict(value, prefix + ' ', braces + 1) + else: + print(prefix + '%s = %s' % (key, value)) diff --git a/tablo/watch.py b/tablo/watch.py new file mode 100644 index 0000000..a4a5920 --- /dev/null +++ b/tablo/watch.py @@ -0,0 +1,120 @@ +from urllib.parse import urlparse +import pickle +import ffmpeg +import m3u8 +import requests + +from .apiexception import APIError +from .util import logger + +WATCH_ERROR_MESSAGES = { + 'disk_unavailable': 'No Hard Drive Connected', + 'no_video': 'Video Cannot be Found', + 'no_tuner_available': 'No Tuner Available', + 'no_signal_lock': 'Weak Signal', + None: 'A Playback Error Occurred' +} + + +class Watch(object): + """ + This is not easily usable, but the code can stay for now + """ + + def __init__(self, api, path=None): + self.Api = api + self.error = None + self.data = [] + if path is None: + self.error = "No path" + else: + try: + self.data = self.Api(path).watch.post() + self.error = None + self.errorDisplay = '' + except APIError as e: + self.error = e.message.get('details', 'Unknown') + self.errorDisplay = WATCH_ERROR_MESSAGES.get( + self.error, + WATCH_ERROR_MESSAGES.get(None) + ) + + self.base = '' + self.url = '' + self.width = 0 + self.height = 0 + + if self.error: + return + self.m3u8 = None + self.originalPlaylistUrl = None + self.bifSD = self.data.get('bif_url_sd') + self.bifHD = self.data.get('bif_url_hd') + self.expires = self.data.get('playlist_url') + self.token = self.data.get('token') + if 'video_details' in self.data: + self.width = self.data['video_details']['width'] + self.height = self.data['video_details']['height'] + + self.getPlaylistURL(self.data.get('playlist_url')) + + self._playlist = None + + def dump_info(self): + logger.debug("Watch [DATA]") + logger.debug('\n\t'. + join("%s: %s" % item for item in self.data.items())) + # logger.debug('[M3U8]\n\t'+pickle.dumps(self.m3u8).) + logger.debug('Watch [M3U8]') + logger.debug(self.m3u8.dumps()) + # tmp = self.Api(self.data['playlist_url']) + # logger.debug(pickle.dumps(self.getPlaylistURL(self.data['playlist_url']))) + # logger.debug(pickle.dumps(self.m3u8.playlists)) + # logger.debug(self.getSegmentedPlaylist().dumps()) + + def dl_video(self): + ( + ffmpeg + # .input(self.getSegmentedPlaylist().dumps()) + .input(self.data['playlist_url']) + # .output('/tmp/test.mp4', absf='aac_adtstoasc', codec='copy') + .output('/tmp/test.mp4', codec='copy') + .run() + ) + + def getPlaylistURL(self, url): + self.originalPlaylistUrl = url + p = urlparse(url) + + self.base = '{0}://{1}{2}'.format( + p.scheme, + p.netloc, + p.path.rsplit('/', 1)[0] + ) + text = requests.get(url).text + self.m3u8 = m3u8.loads(text) + logger.debug(pickle.dumps(self.m3u8.playlists)) + self.url = '{0}://{1}{2}'.format( + p.scheme, + p.netloc, + self.m3u8.playlists[0].uri + ) + + def getSegmentedPlaylist(self): + if not self._playlist: + self._playlist = requests.get(self.url).text + + m = m3u8.loads(self._playlist) + m.base_path = self.base + return m + + def makeSeekPlaylist(self, position): + m = self.getSegmentedPlaylist() + duration = m.segments[0].duration + while duration < position: + del m.segments[0] + if not m.segments: + break + duration += m.segments[0].duration + + return m.dumps() diff --git a/tut.iml b/tut.iml new file mode 100644 index 0000000..456e24f --- /dev/null +++ b/tut.iml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/tut.py b/tut.py new file mode 100755 index 0000000..261b12a --- /dev/null +++ b/tut.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +import sys +import logging + +import argparse + +import config +import library + +VERSION = "0.0.1" + +EXIT_CODE_OK = 0 +EXIT_CODE_ERROR = 1 + + +def main(): + try: + config.setup() + parser = argparse.ArgumentParser() + + # This is gross and dirty and may break b/c it's gross and dirty + parser._positionals.title = "Available commands" + + parser.add_argument('-v', '--verbose', action='count', default=0, + help="amount of program detail to output") + parser.add_argument('--dry-run', action='store_true', + help="show what would happen, " + "but don't change anything") + parser.add_argument('--version', action='version', + version='%(prog)s ' + VERSION) + + subparsers = parser.add_subparsers(dest='command', + help='available commands') + + # "config" cmd parser + sp_cfg = subparsers.add_parser('config', + help='manage configuration options') + # add mutually exclusive group? + sp_cfg.add_argument('-v', '--view', action='store_true', + help='view the current config data') + sp_cfg.add_argument('-d', '--discover', action='store_true', + help='discover Tablos') + + # "library" cmd parser + sp_lib = subparsers.add_parser('library', + help='manage the local library ' + 'of Tablo recordings') + # add mutually exclusive group? + sp_lib.add_argument('-b', '--build', action='store_true', + help='build library') + sp_lib.add_argument('-v', '--view', action='store_true', + help='view library') + sp_lib.add_argument('-s', '--search', + help='search library') + sp_lib.add_argument('--full', action='store_true', + help='dump/display full record details') + + args = parser.parse_args() + + if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + + if args.verbose >= 3: + log_level = logging.DEBUG + elif args.verbose >= 2: + log_level = logging.INFO + elif args.verbose >= 1: + log_level = logging.WARNING + else: + log_level = logging.CRITICAL + + config.setup_logger(log_level) + + config.built_ins['dry_run'] = args.dry_run + + if args.command == 'library': + if args.build: + library.build() + + elif args.view: + library.view(args.full) + + elif args.search: + library.search(args.search, args.full) + + else: + sp_lib.print_help(sys.stderr) + return EXIT_CODE_OK + + if args.command == 'config': + if args.view: + config.view() + + elif args.discover: + config.discover() + + else: + sp_cfg.print_help(sys.stderr) + return EXIT_CODE_OK + + except KeyboardInterrupt: + return EXIT_CODE_ERROR # pragma: no cover + + +if __name__ == '__main__': + + sys.exit(main()) diff --git a/util.py b/util.py new file mode 100644 index 0000000..7de8bf3 --- /dev/null +++ b/util.py @@ -0,0 +1,21 @@ +def chunks(l, n): + """Yield successive n-sized chunks from l""" + for i in range(0, len(l), n): + yield l[i:i + n] + + +def print_dict(dictionary, prefix='\t', braces=1): + """ Recursively prints nested dictionaries.""" + + for key, value in dictionary.items(): + if isinstance(value, dict): + print() + print('%s%s%s%s' % (prefix, braces * '[', key, braces * ']')) + + print_dict(value, prefix + ' ', braces + 1) + else: + width = 20 - len(prefix) + w_fmt = '{:' + str(width) + '}' + txt = prefix + w_fmt.format(key) + " = " + str(value) + print(txt) + # print( + '%s = %s' % (key, value))