From 75054594d4d546f8d8bcbed75de7fff92530e12e Mon Sep 17 00:00:00 2001 From: blacktop Date: Fri, 14 Apr 2023 00:12:03 -0600 Subject: [PATCH] feat: add support for a few DSC api routes --- ipsw/__init__.py | 2 +- ipsw/api/__init__.py | 2 +- ipsw/api/client.py | 169 +++++++++++++----------------- ipsw/api/daemon.py | 2 +- ipsw/api/dsc.py | 28 +++++ ipsw/client.py | 40 ++++--- ipsw/constants.py | 17 ++- ipsw/errors.py | 1 + ipsw/models/configs.py | 8 +- ipsw/models/dsc.py | 127 ++++++++++++++++++++++ ipsw/models/info.py | 10 +- ipsw/models/macho.py | 17 ++- ipsw/models/resource.py | 10 +- ipsw/transport/__init__.py | 3 +- ipsw/transport/basehttpadapter.py | 2 +- ipsw/transport/npipeconn.py | 45 +++----- ipsw/transport/npipesocket.py | 28 +++-- ipsw/transport/sshconn.py | 121 +++++++++------------ ipsw/transport/unixconn.py | 46 +++----- ipsw/types/__init__.py | 2 +- ipsw/types/daemon.py | 17 ++- ipsw/utils/__init__.py | 2 +- ipsw/utils/config.py | 25 +++-- ipsw/utils/decorators.py | 25 +++-- ipsw/utils/json_stream.py | 12 +-- ipsw/utils/proxy.py | 46 ++++---- ipsw/utils/socket.py | 14 ++- ipsw/version.py | 7 +- 28 files changed, 466 insertions(+), 362 deletions(-) create mode 100644 ipsw/api/dsc.py create mode 100644 ipsw/models/dsc.py diff --git a/ipsw/__init__.py b/ipsw/__init__.py index 2193a28..454389e 100644 --- a/ipsw/__init__.py +++ b/ipsw/__init__.py @@ -3,4 +3,4 @@ from .client import IpswClient from .version import __version__ -__title__ = 'ipsw' \ No newline at end of file +__title__ = "ipsw" diff --git a/ipsw/api/__init__.py b/ipsw/api/__init__.py index b45a852..ff51844 100644 --- a/ipsw/api/__init__.py +++ b/ipsw/api/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from .client import APIClient \ No newline at end of file +from .client import APIClient diff --git a/ipsw/api/client.py b/ipsw/api/client.py index adfbd3a..6548183 100644 --- a/ipsw/api/client.py +++ b/ipsw/api/client.py @@ -7,18 +7,24 @@ import requests.exceptions import websocket -from ..constants import (DEFAULT_MAX_POOL_SIZE, DEFAULT_NUM_POOLS, - DEFAULT_NUM_POOLS_SSH, DEFAULT_TIMEOUT_SECONDS, - DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, - MINIMUM_IPSW_API_VERSION, STREAM_HEADER_SIZE_BYTES) -from ..errors import (InvalidVersion, IpswException, - create_api_error_from_http_exception) +from ..constants import ( + DEFAULT_MAX_POOL_SIZE, + DEFAULT_NUM_POOLS, + DEFAULT_NUM_POOLS_SSH, + DEFAULT_TIMEOUT_SECONDS, + DEFAULT_USER_AGENT, + IS_WINDOWS_PLATFORM, + MINIMUM_IPSW_API_VERSION, + STREAM_HEADER_SIZE_BYTES, +) +from ..errors import InvalidVersion, IpswException, create_api_error_from_http_exception from ..transport import UnixHTTPAdapter from ..utils import config, update_headers, utils from ..utils.json_stream import json_stream from ..utils.proxy import ProxyConfig from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter from .daemon import DaemonApiMixin +from .dsc import DscApiMixin from .info import InfoApiMixin from .macho import MachoApiMixin @@ -33,11 +39,7 @@ pass -class APIClient( - requests.Session, - DaemonApiMixin, - InfoApiMixin, - MachoApiMixin): +class APIClient(requests.Session, DaemonApiMixin, DscApiMixin, InfoApiMixin, MachoApiMixin): """ A low-level client for the ipsw API. @@ -68,118 +70,98 @@ class APIClient( to save in the pool. """ - __attrs__ = requests.Session.__attrs__ + ['_general_configs', - '_version', - 'base_url', - 'timeout'] - - def __init__(self, base_url=None, version=None, - timeout=DEFAULT_TIMEOUT_SECONDS, - user_agent=DEFAULT_USER_AGENT, num_pools=None, - use_ssh_client=False, - max_pool_size=DEFAULT_MAX_POOL_SIZE): + __attrs__ = requests.Session.__attrs__ + ["_general_configs", "_version", "base_url", "timeout"] + + def __init__( + self, + base_url=None, + version=None, + timeout=DEFAULT_TIMEOUT_SECONDS, + user_agent=DEFAULT_USER_AGENT, + num_pools=None, + use_ssh_client=False, + max_pool_size=DEFAULT_MAX_POOL_SIZE, + ): super().__init__() self.base_url = base_url self.timeout = timeout - self.headers['User-Agent'] = user_agent + self.headers["User-Agent"] = user_agent self._general_configs = config.load_general_config() - proxy_config = self._general_configs.get('proxies', {}) + proxy_config = self._general_configs.get("proxies", {}) try: proxies = proxy_config[base_url] except KeyError: - proxies = proxy_config.get('default', {}) + proxies = proxy_config.get("default", {}) self._proxy_configs = ProxyConfig.from_dict(proxies) base_url = utils.parse_host( - base_url, IS_WINDOWS_PLATFORM, + base_url, + IS_WINDOWS_PLATFORM, ) # SSH has a different default for num_pools to all other adapters - num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \ - base_url.startswith('ssh://') else DEFAULT_NUM_POOLS + num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if base_url.startswith("ssh://") else DEFAULT_NUM_POOLS - if base_url.startswith('http+unix://'): + if base_url.startswith("http+unix://"): self._custom_adapter = UnixHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size + base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size ) - self.mount('http+ipsw://', self._custom_adapter) - self._unmount('http://', 'https://') + self.mount("http+ipsw://", self._custom_adapter) + self._unmount("http://", "https://") # host part of URL should be unused, but is resolved by requests # module in proxy_bypass_macosx_sysconf() - self.base_url = 'http+ipsw://localhost' - elif base_url.startswith('npipe://'): + self.base_url = "http+ipsw://localhost" + elif base_url.startswith("npipe://"): if not IS_WINDOWS_PLATFORM: - raise IpswException( - 'The npipe:// protocol is only supported on Windows' - ) + raise IpswException("The npipe:// protocol is only supported on Windows") try: self._custom_adapter = NpipeHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size + base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size ) except NameError: - raise IpswException( - 'Install pypiwin32 package to enable npipe:// support' - ) - self.mount('http+ipsw://', self._custom_adapter) - self.base_url = 'http+ipsw://localnpipe' - elif base_url.startswith('ssh://'): + raise IpswException("Install pypiwin32 package to enable npipe:// support") + self.mount("http+ipsw://", self._custom_adapter) + self.base_url = "http+ipsw://localnpipe" + elif base_url.startswith("ssh://"): try: self._custom_adapter = SSHHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size, shell_out=use_ssh_client + base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size, shell_out=use_ssh_client ) except NameError: - raise IpswException( - 'Install paramiko package to enable ssh:// support' - ) - self.mount('http+ipsw://ssh', self._custom_adapter) - self._unmount('http://', 'https://') - self.base_url = 'http+ipsw://ssh' + raise IpswException("Install paramiko package to enable ssh:// support") + self.mount("http+ipsw://ssh", self._custom_adapter) + self._unmount("http://", "https://") + self.base_url = "http+ipsw://ssh" else: self.base_url = base_url # version detection needs to be after unix adapter mounting - if version is None or (isinstance( - version, - str - ) and version.lower() == 'auto'): + if version is None or (isinstance(version, str) and version.lower() == "auto"): self._version = self._retrieve_server_version() else: self._version = version if not isinstance(self._version, str): - raise IpswException( - 'Version parameter must be a string or None. Found {}'.format( - type(version).__name__ - ) - ) + raise IpswException("Version parameter must be a string or None. Found {}".format(type(version).__name__)) if utils.version_lt(self._version, MINIMUM_IPSW_API_VERSION): raise InvalidVersion( - 'API versions below {} are no longer supported by this ' - 'library.'.format(MINIMUM_IPSW_API_VERSION) + "API versions below {} are no longer supported by this " "library.".format(MINIMUM_IPSW_API_VERSION) ) def _retrieve_server_version(self): try: return self.version(api_version=False)["ApiVersion"] except KeyError: - raise IpswException( - 'Invalid response from ipsw daemon: key "ApiVersion"' - ' is missing.' - ) + raise IpswException('Invalid response from ipsw daemon: key "ApiVersion"' " is missing.") except Exception as e: - raise IpswException( - f'Error while fetching server API version: {e}' - ) + raise IpswException(f"Error while fetching server API version: {e}") def _set_request_timeout(self, kwargs): """Prepare the kwargs for an HTTP request by inserting the timeout parameter, if not already present.""" - kwargs.setdefault('timeout', self.timeout) + kwargs.setdefault("timeout", self.timeout) return kwargs @update_headers @@ -201,20 +183,15 @@ def _delete(self, url, **kwargs): def _url(self, pathfmt, *args, **kwargs): for arg in args: if not isinstance(arg, str): - raise ValueError( - 'Expected a string but found {} ({}) ' - 'instead'.format(arg, type(arg)) - ) + raise ValueError("Expected a string but found {} ({}) " "instead".format(arg, type(arg))) quote_f = partial(urllib.parse.quote, safe="/:") args = map(quote_f, args) - if kwargs.get('versioned_api', True): - return '{}/v{}{}'.format( - self.base_url, self._version, pathfmt.format(*args) - ) + if kwargs.get("versioned_api", True): + return "{}/v{}{}".format(self.base_url, self._version, pathfmt.format(*args)) else: - return f'{self.base_url}{pathfmt.format(*args)}' + return f"{self.base_url}{pathfmt.format(*args)}" def _raise_for_status(self, response): """Raises stored :class:`APIError`, if one occurred.""" @@ -244,17 +221,13 @@ def _post_json(self, url, data, **kwargs): elif data is not None: data2 = data - if 'headers' not in kwargs: - kwargs['headers'] = {} - kwargs['headers']['Content-Type'] = 'application/json' + if "headers" not in kwargs: + kwargs["headers"] = {} + kwargs["headers"]["Content-Type"] = "application/json" return self._post(url, data=json.dumps(data2), **kwargs) def _attach_params(self, override=None): - return override or { - 'stdout': 1, - 'stderr': 1, - 'stream': 1 - } + return override or {"stdout": 1, "stderr": 1, "stream": 1} def _create_websocket_connection(self, url): return websocket.create_connection(url) @@ -263,7 +236,7 @@ def _get_raw_response_socket(self, response): self._raise_for_status(response) if self.base_url == "http+ipsw://localnpipe": sock = response.raw._fp.fp.raw.sock - elif self.base_url.startswith('http+ipsw://ssh'): + elif self.base_url.startswith("http+ipsw://ssh"): sock = response.raw._fp.fp.channel else: sock = response.raw._fp.fp.raw @@ -311,8 +284,8 @@ def _multiplexed_buffer_helper(self, response): while True: if buf_length - walker < STREAM_HEADER_SIZE_BYTES: break - header = buf[walker:walker + STREAM_HEADER_SIZE_BYTES] - _, length = struct.unpack_from('>BxxxL', header) + header = buf[walker : walker + STREAM_HEADER_SIZE_BYTES] + _, length = struct.unpack_from(">BxxxL", header) start = walker + STREAM_HEADER_SIZE_BYTES end = start + length walker = end @@ -331,7 +304,7 @@ def _multiplexed_response_stream_helper(self, response): header = response.raw.read(STREAM_HEADER_SIZE_BYTES) if not header: break - _, length = struct.unpack('>BxxxL', header) + _, length = struct.unpack(">BxxxL", header) if not length: continue data = response.raw.read(length) @@ -340,7 +313,7 @@ def _multiplexed_response_stream_helper(self, response): yield data def _stream_raw_result(self, response, chunk_size=1, decode=True): - ''' Stream result for TTY-enabled container and raw binary data''' + """Stream result for TTY-enabled container and raw binary data""" self._raise_for_status(response) # Disable timeout on the underlying socket to prevent @@ -376,7 +349,7 @@ def _read_from_socket(self, response, stream, tty=True, demux=False): response.close() def _disable_socket_timeout(self, socket): - """ Depending on the combination of python version and whether we're + """Depending on the combination of python version and whether we're connecting over http or https, we might need to access _sock, which may or may not exist; or we may need to just settimeout on socket itself, which also may or may not have settimeout on it. To avoid @@ -386,15 +359,15 @@ def _disable_socket_timeout(self, socket): you run the risk of changing a socket that was non-blocking to blocking, for example when using gevent. """ - sockets = [socket, getattr(socket, '_sock', None)] + sockets = [socket, getattr(socket, "_sock", None)] for s in sockets: - if not hasattr(s, 'settimeout'): + if not hasattr(s, "settimeout"): continue timeout = -1 - if hasattr(s, 'gettimeout'): + if hasattr(s, "gettimeout"): timeout = s.gettimeout() # Don't change the timeout if it is already disabled. diff --git a/ipsw/api/daemon.py b/ipsw/api/daemon.py index b35066c..9b9d11a 100644 --- a/ipsw/api/daemon.py +++ b/ipsw/api/daemon.py @@ -70,7 +70,7 @@ def ping(self): :py:class:`ipsw.errors.APIError` If the server returns an error. """ - return self._result(self._get(self._url('/_ping'))) == 'OK' + return self._result(self._get(self._url("/_ping"))) == "OK" def version(self, api_version=True): """ diff --git a/ipsw/api/dsc.py b/ipsw/api/dsc.py new file mode 100644 index 0000000..12e5cce --- /dev/null +++ b/ipsw/api/dsc.py @@ -0,0 +1,28 @@ +class DscApiMixin: + def dsc_info(self, path=None): + """ + Display DSC header information. Identical to the ``ipsw dyld info --dylibs --json`` + command. + + Returns: + (dict): The info as a dict + + Raises: + :py:class:`ipsw.errors.APIError` + If the server returns an error. + """ + return self._result(self._get(self._url("/dsc/info"), params={"path": path}), True) + + def dsc_macho(self, path=None, dylib=None): + """ + Display DSC dylib information. Identical to the ``ipsw dyld macho DSC DYLIB --json`` + command. + + Returns: + (dict): The info as a dict + + Raises: + :py:class:`ipsw.errors.APIError` + If the server returns an error. + """ + return self._result(self._get(self._url("/dsc/macho"), params={"path": path, "dylib": dylib}), True) diff --git a/ipsw/client.py b/ipsw/client.py index 64b764c..6b0b3c0 100644 --- a/ipsw/client.py +++ b/ipsw/client.py @@ -1,5 +1,6 @@ from .api.client import APIClient -from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE) +from .constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE +from .models.dsc import DscCollection from .models.info import InfoCollection from .models.macho import MachoCollection from .utils import kwargs_from_env @@ -27,6 +28,7 @@ class IpswClient: max_pool_size (int): The maximum number of connections to save in the pool. """ + def __init__(self, *args, **kwargs): self.api = APIClient(*args, **kwargs) @@ -59,27 +61,34 @@ def from_env(cls, **kwargs): >>> import ipsw >>> client = ipsw.from_env() - + """ - timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) - max_pool_size = kwargs.pop('max_pool_size', DEFAULT_MAX_POOL_SIZE) - version = kwargs.pop('version', None) - use_ssh_client = kwargs.pop('use_ssh_client', False) + timeout = kwargs.pop("timeout", DEFAULT_TIMEOUT_SECONDS) + max_pool_size = kwargs.pop("max_pool_size", DEFAULT_MAX_POOL_SIZE) + version = kwargs.pop("version", None) + use_ssh_client = kwargs.pop("use_ssh_client", False) return cls( timeout=timeout, max_pool_size=max_pool_size, version=version, use_ssh_client=use_ssh_client, - **kwargs_from_env(**kwargs) + **kwargs_from_env(**kwargs), ) + @property + def dsc(self): + """ + An object for getting DSC info. + """ + return DscCollection(client=self) + @property def info(self): """ An object for getting local/remote IPSW/OTA info. """ return InfoCollection(client=self) - + @property def macho(self): """ @@ -90,24 +99,29 @@ def macho(self): # Top-level methods def ping(self, *args, **kwargs): return self.api.ping(*args, **kwargs) + ping.__doc__ = APIClient.ping.__doc__ def version(self, *args, **kwargs): return self.api.version(*args, **kwargs) + version.__doc__ = APIClient.version.__doc__ def close(self): return self.api.close() + close.__doc__ = APIClient.close.__doc__ def __getattr__(self, name): s = [f"'IpswClient' object has no attribute '{name}'"] # If a user calls a method on APIClient, they if hasattr(APIClient, name): - s.append("In ipsw SDK for Python 2.0, this method is now on the " - "object APIClient. See the low-level API section of the " - "documentation for more details.") - raise AttributeError(' '.join(s)) + s.append( + "In ipsw SDK for Python 2.0, this method is now on the " + "object APIClient. See the low-level API section of the " + "documentation for more details." + ) + raise AttributeError(" ".join(s)) -from_env = IpswClient.from_env \ No newline at end of file +from_env = IpswClient.from_env diff --git a/ipsw/constants.py b/ipsw/constants.py index 839ccca..7199162 100644 --- a/ipsw/constants.py +++ b/ipsw/constants.py @@ -1,24 +1,19 @@ import sys from .version import __version__ -DEFAULT_IPSW_API_VERSION = '1.0' -MINIMUM_IPSW_API_VERSION = '1.0' +DEFAULT_IPSW_API_VERSION = "1.0" +MINIMUM_IPSW_API_VERSION = "1.0" DEFAULT_TIMEOUT_SECONDS = 60 STREAM_HEADER_SIZE_BYTES = 8 DEFAULT_HTTP_HOST = "127.0.0.1" DEFAULT_UNIX_SOCKET = "http+unix:///var/run/ipsw.sock" -DEFAULT_NPIPE = 'npipe:////./pipe/ipsw' +DEFAULT_NPIPE = "npipe:////./pipe/ipsw" -BYTE_UNITS = { - 'b': 1, - 'k': 1024, - 'm': 1024 * 1024, - 'g': 1024 * 1024 * 1024 -} +BYTE_UNITS = {"b": 1, "k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024} -IS_WINDOWS_PLATFORM = (sys.platform == 'win32') -WINDOWS_LONGPATH_PREFIX = '\\\\?\\' +IS_WINDOWS_PLATFORM = sys.platform == "win32" +WINDOWS_LONGPATH_PREFIX = "\\\\?\\" DEFAULT_USER_AGENT = f"ipsw-sdk-python/{__version__}" DEFAULT_NUM_POOLS = 25 diff --git a/ipsw/errors.py b/ipsw/errors.py index af0525b..58822d2 100644 --- a/ipsw/errors.py +++ b/ipsw/errors.py @@ -76,6 +76,7 @@ def is_server_error(self): class NotFound(APIError): pass + class InvalidVersion(IpswException): pass diff --git a/ipsw/models/configs.py b/ipsw/models/configs.py index 6458803..67961bf 100644 --- a/ipsw/models/configs.py +++ b/ipsw/models/configs.py @@ -4,14 +4,15 @@ class Config(Model): """A config.""" - id_attribute = 'ID' + + id_attribute = "ID" def __repr__(self): return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): - return self.attrs['Spec']['Name'] + return self.attrs["Spec"]["Name"] def remove(self): """ @@ -29,6 +30,7 @@ def remove(self): def create(self, **kwargs): obj = self.client.api.create_config(**kwargs) return self.prepare_model(obj) + create.__doc__ = APIClient.create_config.__doc__ def get(self, config_id): @@ -64,4 +66,4 @@ def list(self, **kwargs): If the server returns an error. """ resp = self.client.api.configs(**kwargs) - return [self.prepare_model(obj) for obj in resp] \ No newline at end of file + return [self.prepare_model(obj) for obj in resp] diff --git a/ipsw/models/dsc.py b/ipsw/models/dsc.py new file mode 100644 index 0000000..26c865b --- /dev/null +++ b/ipsw/models/dsc.py @@ -0,0 +1,127 @@ +import os + +from ..api import APIClient +from .resource import Collection, Model + + +class DSC(Model): + """ + DSC info. + """ + + def __repr__(self): + return "<{}: '({}) - {} - {}'>".format( + self.__class__.__name__, + self.magic, + self.platform, + self.uuid, + ) + + @property + def magic(self): + """ + The header magic. + """ + return self.attrs["info"].get("magic", None) + + @property + def uuid(self): + """ + The header UUID. + """ + return self.attrs["info"].get("uuid", None) + + @property + def platform(self): + """ + The header platform. + """ + return self.attrs["info"].get("platform", None) + + @property + def dylibs(self): + """ + The DSC info. + """ + return self.attrs["info"].get("dylibs", None) + + @property + def info(self): + """ + The DSC info. + """ + return self.attrs.get("info", None) + + +class Dylib(Model): + """ + Dylib info. + """ + + def __init__(self, image_name, *args, **kwargs): + super().__init__(*args, **kwargs) + self.image_name = image_name + + def __repr__(self): + return "<{}: '{} {} ({})'>".format( + self.__class__.__name__, + self.magic, + self.cpu, + self.sub_cpu, + ) + + @property + def magic(self): + """ + The header magic. + """ + return self.attrs["macho"]["header"].get("magic", None) + + @property + def cpu(self): + """ + The header CPU. + """ + return self.attrs["macho"]["header"].get("cpu", None) + + @property + def sub_cpu(self): + """ + The header sub CPU. + """ + return self.attrs["macho"]["header"].get("subcpu", None) + + @property + def header(self): + """ + The header. + """ + return self.attrs["macho"].get("header", None) + + @property + def load_commands(self): + """ + The header. + """ + return self.attrs["macho"].get("loads", None) + + +class DscCollection(Collection): + model = DSC + + def get_info(self, path=None): + """ + Get DSC info. + """ + return self.prepare_model(self.client.api.dsc_info(path)) + + def get_dylib(self, path=None, dylib=None): + """ + Get DSC dylib info. + """ + return Dylib( + image_name=dylib, + attrs=self.client.api.dsc_macho(path, dylib), + client=self.client, + collection=self, + ) diff --git a/ipsw/models/info.py b/ipsw/models/info.py index 0bebf71..927b32d 100644 --- a/ipsw/models/info.py +++ b/ipsw/models/info.py @@ -29,17 +29,17 @@ def build(self): The iOS version. """ return self.attrs["info"]["Plists"]["restore"].get("ProductBuildVersion", None) - + @property def devices(self): """ The iOS devices. """ devices = set() - for dt in self.attrs['info']['DeviceTrees'].values(): - for child in dt['device-tree']['children']: - if 'product' in child: - devices.add(child['product']['product-name']) + for dt in self.attrs["info"]["DeviceTrees"].values(): + for child in dt["device-tree"]["children"]: + if "product" in child: + devices.add(child["product"]["product-name"]) devlist = list(devices) devlist.sort() return devlist diff --git a/ipsw/models/macho.py b/ipsw/models/macho.py index d2aab16..814b37a 100644 --- a/ipsw/models/macho.py +++ b/ipsw/models/macho.py @@ -22,21 +22,28 @@ def magic(self): """ The header magic. """ - return self.attrs["info"]['header'].get("magic", None) + return self.attrs["info"]["header"].get("magic", None) @property def cpu(self): """ The header CPU. """ - return self.attrs["info"]['header'].get("cpu", None) - + return self.attrs["info"]["header"].get("cpu", None) + @property def sub_cpu(self): """ The header sub CPU. """ - return self.attrs["info"]['header'].get("subcpu", None) + return self.attrs["info"]["header"].get("subcpu", None) + + @property + def header(self): + """ + The header. + """ + return self.attrs["info"].get("header", None) class MachoCollection(Collection): @@ -46,4 +53,4 @@ def get(self, path=None, arch=None): """ Get MachO info. """ - return self.prepare_model(self.client.api.macho_info(path, arch)) \ No newline at end of file + return self.prepare_model(self.client.api.macho_info(path, arch)) diff --git a/ipsw/models/resource.py b/ipsw/models/resource.py index 021fd31..6d0e592 100644 --- a/ipsw/models/resource.py +++ b/ipsw/models/resource.py @@ -2,7 +2,8 @@ class Model: """ A base class for representing a single object on the server. """ - id_attribute = 'Id' + + id_attribute = "Id" def __init__(self, attrs=None, client=None, collection=None): #: A client pointing at the server that this object is on. @@ -65,8 +66,8 @@ def __init__(self, client=None): def __call__(self, *args, **kwargs): raise TypeError( "'{}' object is not callable. You might be trying to use the old " - "(pre-2.0) API - use ipsw.APIClient if so." - .format(self.__class__.__name__)) + "(pre-2.0) API - use ipsw.APIClient if so.".format(self.__class__.__name__) + ) def list(self): raise NotImplementedError @@ -88,5 +89,4 @@ def prepare_model(self, attrs): elif isinstance(attrs, dict): return self.model(attrs=attrs, client=self.client, collection=self) else: - raise Exception("Can't create %s from %s" % - (self.model.__name__, attrs)) \ No newline at end of file + raise Exception("Can't create %s from %s" % (self.model.__name__, attrs)) diff --git a/ipsw/transport/__init__.py b/ipsw/transport/__init__.py index 7638530..b7f7ad3 100644 --- a/ipsw/transport/__init__.py +++ b/ipsw/transport/__init__.py @@ -1,5 +1,6 @@ # flake8: noqa from .unixconn import UnixHTTPAdapter + try: from .npipeconn import NpipeHTTPAdapter from .npipesocket import NpipeSocket @@ -9,4 +10,4 @@ try: from .sshconn import SSHHTTPAdapter except ImportError: - pass \ No newline at end of file + pass diff --git a/ipsw/transport/basehttpadapter.py b/ipsw/transport/basehttpadapter.py index dfbb193..c29ca89 100644 --- a/ipsw/transport/basehttpadapter.py +++ b/ipsw/transport/basehttpadapter.py @@ -4,5 +4,5 @@ class BaseHTTPAdapter(requests.adapters.HTTPAdapter): def close(self): super().close() - if hasattr(self, 'pools'): + if hasattr(self, "pools"): self.pools.clear() diff --git a/ipsw/transport/npipeconn.py b/ipsw/transport/npipeconn.py index cc35bea..56055df 100644 --- a/ipsw/transport/npipeconn.py +++ b/ipsw/transport/npipeconn.py @@ -17,9 +17,7 @@ class NpipeHTTPConnection(httplib.HTTPConnection): def __init__(self, npipe_path, timeout=60): - super().__init__( - 'localhost', timeout=timeout - ) + super().__init__("localhost", timeout=timeout) self.npipe_path = npipe_path self.timeout = timeout @@ -32,16 +30,12 @@ def connect(self): class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, npipe_path, timeout=60, maxsize=10): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) + super().__init__("localhost", timeout=timeout, maxsize=maxsize) self.npipe_path = npipe_path self.timeout = timeout def _new_conn(self): - return NpipeHTTPConnection( - self.npipe_path, self.timeout - ) + return NpipeHTTPConnection(self.npipe_path, self.timeout) # When re-using connections, urllib3 tries to call select() on our # NpipeSocket instance, causing a crash. To circumvent this, we override @@ -57,9 +51,7 @@ def _get_conn(self, timeout): except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( - self, - "Pool reached maximum size and no more " - "connections are allowed." + self, "Pool reached maximum size and no more " "connections are allowed." ) # Oh well, we'll create a new connection then @@ -67,21 +59,19 @@ def _get_conn(self, timeout): class NpipeHTTPAdapter(BaseHTTPAdapter): - - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path', - 'pools', - 'timeout', - 'max_pool_size'] - - def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): - self.npipe_path = base_url.replace('npipe://', '') + __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ["npipe_path", "pools", "timeout", "max_pool_size"] + + def __init__( + self, + base_url, + timeout=60, + pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, + ): + self.npipe_path = base_url.replace("npipe://", "") self.timeout = timeout self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) + self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close()) super().__init__() def get_connection(self, url, proxies=None): @@ -90,10 +80,7 @@ def get_connection(self, url, proxies=None): if pool: return pool - pool = NpipeHTTPConnectionPool( - self.npipe_path, self.timeout, - maxsize=self.max_pool_size - ) + pool = NpipeHTTPConnectionPool(self.npipe_path, self.timeout, maxsize=self.max_pool_size) self.pools[url] = pool return pool diff --git a/ipsw/transport/npipesocket.py b/ipsw/transport/npipesocket.py index 766372a..721fea6 100644 --- a/ipsw/transport/npipesocket.py +++ b/ipsw/transport/npipesocket.py @@ -5,7 +5,7 @@ import win32file import win32pipe -cERROR_PIPE_BUSY = 0xe7 +cERROR_PIPE_BUSY = 0xE7 cSECURITY_SQOS_PRESENT = 0x100000 cSECURITY_ANONYMOUS = 0 @@ -16,18 +16,17 @@ def check_closed(f): @functools.wraps(f) def wrapped(self, *args, **kwargs): if self._closed: - raise RuntimeError( - 'Can not reuse socket after connection was closed.' - ) + raise RuntimeError("Can not reuse socket after connection was closed.") return f(self, *args, **kwargs) + return wrapped class NpipeSocket: - """ Partial implementation of the socket API over windows named pipes. - This implementation is only designed to be used as a client socket, - and server-specific methods (bind, listen, accept...) are not - implemented. + """Partial implementation of the socket API over windows named pipes. + This implementation is only designed to be used as a client socket, + and server-specific methods (bind, listen, accept...) are not + implemented. """ def __init__(self, handle=None): @@ -55,7 +54,7 @@ def connect(self, address, retry_count=0): None, win32file.OPEN_EXISTING, cSECURITY_ANONYMOUS | cSECURITY_SQOS_PRESENT, - 0 + 0, ) except win32pipe.error as e: # See Remarks: @@ -65,7 +64,7 @@ def connect(self, address, retry_count=0): # before we got to it. Wait for availability and attempt to # connect again. retry_count = retry_count + 1 - if (retry_count < MAXIMUM_RETRY_COUNT): + if retry_count < MAXIMUM_RETRY_COUNT: time.sleep(1) return self.connect(address, retry_count) raise e @@ -104,7 +103,7 @@ def listen(self, backlog): raise NotImplementedError() def makefile(self, mode=None, bufsize=None): - if mode.strip('b') != 'r': + if mode.strip("b") != "r": raise NotImplementedError() rawio = NpipeFileIOBase(self) if bufsize is None or bufsize <= 0: @@ -131,10 +130,7 @@ def recv_into(self, buf, nbytes=0): if not isinstance(buf, memoryview): readbuf = memoryview(buf) - err, data = win32file.ReadFile( - self._handle, - readbuf[:nbytes] if nbytes else readbuf - ) + err, data = win32file.ReadFile(self._handle, readbuf[:nbytes] if nbytes else readbuf) return len(data) def _recv_into_py2(self, buf, nbytes): @@ -167,7 +163,7 @@ def settimeout(self, value): # Blocking mode self._timeout = win32pipe.NMPWAIT_WAIT_FOREVER elif not isinstance(value, (float, int)) or value < 0: - raise ValueError('Timeout value out of range') + raise ValueError("Timeout value out of range") elif value == 0: # Non-blocking mode self._timeout = win32pipe.NMPWAIT_NO_WAIT diff --git a/ipsw/transport/sshconn.py b/ipsw/transport/sshconn.py index 81f792f..47ec450 100644 --- a/ipsw/transport/sshconn.py +++ b/ipsw/transport/sshconn.py @@ -23,51 +23,48 @@ class SSHSocket(socket.socket): def __init__(self, host): - super().__init__( - socket.AF_INET, socket.SOCK_STREAM) + super().__init__(socket.AF_INET, socket.SOCK_STREAM) self.host = host self.port = None self.user = None - if ':' in self.host: - self.host, self.port = self.host.split(':') - if '@' in self.host: - self.user, self.host = self.host.split('@') + if ":" in self.host: + self.host, self.port = self.host.split(":") + if "@" in self.host: + self.user, self.host = self.host.split("@") self.proc = None def connect(self, **kwargs): - args = ['ssh'] + args = ["ssh"] if self.user: - args = args + ['-l', self.user] + args = args + ["-l", self.user] if self.port: - args = args + ['-p', self.port] + args = args + ["-p", self.port] - args = args + ['--', self.host, 'ipsw system dial-stdio'] + args = args + ["--", self.host, "ipsw system dial-stdio"] preexec_func = None if not constants.IS_WINDOWS_PLATFORM: + def f(): signal.signal(signal.SIGINT, signal.SIG_IGN) + preexec_func = f env = dict(os.environ) # drop LD_LIBRARY_PATH and SSL_CERT_FILE - env.pop('LD_LIBRARY_PATH', None) - env.pop('SSL_CERT_FILE', None) + env.pop("LD_LIBRARY_PATH", None) + env.pop("SSL_CERT_FILE", None) self.proc = subprocess.Popen( - args, - env=env, - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - preexec_fn=preexec_func) + args, env=env, stdout=subprocess.PIPE, stdin=subprocess.PIPE, preexec_fn=preexec_func + ) def _write(self, data): if not self.proc or self.proc.stdin.closed: - raise Exception('SSH subprocess not initiated.' - 'connect() must be called first.') + raise Exception("SSH subprocess not initiated." "connect() must be called first.") written = self.proc.stdin.write(data) self.proc.stdin.flush() return written @@ -80,8 +77,7 @@ def send(self, data): def recv(self, n): if not self.proc: - raise Exception('SSH subprocess not initiated.' - 'connect() must be called first.') + raise Exception("SSH subprocess not initiated." "connect() must be called first.") return self.proc.stdout.read(n) def makefile(self, mode): @@ -94,16 +90,14 @@ def makefile(self, mode): def close(self): if not self.proc or self.proc.stdin.closed: return - self.proc.stdin.write(b'\n\n') + self.proc.stdin.write(b"\n\n") self.proc.stdin.flush() self.proc.terminate() class SSHConnection(httplib.HTTPConnection): def __init__(self, ssh_transport=None, timeout=60, host=None): - super().__init__( - 'localhost', timeout=timeout - ) + super().__init__("localhost", timeout=timeout) self.ssh_transport = ssh_transport self.timeout = timeout self.ssh_host = host @@ -112,7 +106,7 @@ def connect(self): if self.ssh_transport: sock = self.ssh_transport.open_session() sock.settimeout(self.timeout) - sock.exec_command('ipsw system dial-stdio') + sock.exec_command("ipsw system dial-stdio") else: sock = SSHSocket(self.ssh_host) sock.settimeout(self.timeout) @@ -122,12 +116,10 @@ def connect(self): class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): - scheme = 'ssh' + scheme = "ssh" def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) + super().__init__("localhost", timeout=timeout, maxsize=maxsize) self.ssh_transport = None self.timeout = timeout if ssh_client: @@ -151,9 +143,7 @@ def _get_conn(self, timeout): except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( - self, - "Pool reached maximum size and no more " - "connections are allowed." + self, "Pool reached maximum size and no more " "connections are allowed." ) # Oh well, we'll create a new connection then @@ -161,58 +151,57 @@ def _get_conn(self, timeout): class SSHHTTPAdapter(BaseHTTPAdapter): - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [ - 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size' + "pools", + "timeout", + "ssh_client", + "ssh_params", + "max_pool_size", ] - def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, - shell_out=False): + def __init__( + self, + base_url, + timeout=60, + pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, + shell_out=False, + ): self.ssh_client = None if not shell_out: self._create_paramiko_client(base_url) self._connect() self.ssh_host = base_url - if base_url.startswith('ssh://'): - self.ssh_host = base_url[len('ssh://'):] + if base_url.startswith("ssh://"): + self.ssh_host = base_url[len("ssh://") :] self.timeout = timeout self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) + self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close()) super().__init__() def _create_paramiko_client(self, base_url): logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() base_url = urllib.parse.urlparse(base_url) - self.ssh_params = { - "hostname": base_url.hostname, - "port": base_url.port, - "username": base_url.username - } + self.ssh_params = {"hostname": base_url.hostname, "port": base_url.port, "username": base_url.username} ssh_config_file = os.path.expanduser("~/.ssh/config") if os.path.exists(ssh_config_file): conf = paramiko.SSHConfig() with open(ssh_config_file) as f: conf.parse(f) host_config = conf.lookup(base_url.hostname) - if 'proxycommand' in host_config: - self.ssh_params["sock"] = paramiko.ProxyCommand( - host_config['proxycommand'] - ) - if 'hostname' in host_config: - self.ssh_params['hostname'] = host_config['hostname'] - if base_url.port is None and 'port' in host_config: - self.ssh_params['port'] = host_config['port'] - if base_url.username is None and 'user' in host_config: - self.ssh_params['username'] = host_config['user'] - if 'identityfile' in host_config: - self.ssh_params['key_filename'] = host_config['identityfile'] + if "proxycommand" in host_config: + self.ssh_params["sock"] = paramiko.ProxyCommand(host_config["proxycommand"]) + if "hostname" in host_config: + self.ssh_params["hostname"] = host_config["hostname"] + if base_url.port is None and "port" in host_config: + self.ssh_params["port"] = host_config["port"] + if base_url.username is None and "user" in host_config: + self.ssh_params["username"] = host_config["user"] + if "identityfile" in host_config: + self.ssh_params["key_filename"] = host_config["identityfile"] self.ssh_client.load_system_host_keys() self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy()) @@ -224,10 +213,7 @@ def _connect(self): def get_connection(self, url, proxies=None): if not self.ssh_client: return SSHConnectionPool( - ssh_client=self.ssh_client, - timeout=self.timeout, - maxsize=self.max_pool_size, - host=self.ssh_host + ssh_client=self.ssh_client, timeout=self.timeout, maxsize=self.max_pool_size, host=self.ssh_host ) with self.pools.lock: pool = self.pools.get(url) @@ -239,10 +225,7 @@ def get_connection(self, url, proxies=None): self._connect() pool = SSHConnectionPool( - ssh_client=self.ssh_client, - timeout=self.timeout, - maxsize=self.max_pool_size, - host=self.ssh_host + ssh_client=self.ssh_client, timeout=self.timeout, maxsize=self.max_pool_size, host=self.ssh_host ) self.pools[url] = pool diff --git a/ipsw/transport/unixconn.py b/ipsw/transport/unixconn.py index a6b4652..9561493 100644 --- a/ipsw/transport/unixconn.py +++ b/ipsw/transport/unixconn.py @@ -15,11 +15,8 @@ class UnixHTTPConnection(httplib.HTTPConnection): - def __init__(self, base_url, unix_socket, timeout=60): - super().__init__( - 'localhost', timeout=timeout - ) + super().__init__("localhost", timeout=timeout) self.base_url = base_url self.unix_socket = unix_socket self.timeout = timeout @@ -39,38 +36,32 @@ def response_class(self, sock, *args, **kwargs): class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, base_url, socket_path, timeout=60, maxsize=10): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) + super().__init__("localhost", timeout=timeout, maxsize=maxsize) self.base_url = base_url self.socket_path = socket_path self.timeout = timeout def _new_conn(self): - return UnixHTTPConnection( - self.base_url, self.socket_path, self.timeout - ) + return UnixHTTPConnection(self.base_url, self.socket_path, self.timeout) class UnixHTTPAdapter(BaseHTTPAdapter): - - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['pools', - 'socket_path', - 'timeout', - 'max_pool_size'] - - def __init__(self, socket_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): - socket_path = socket_url.replace('http+unix://', '') - if not socket_path.startswith('/'): - socket_path = '/' + socket_path + __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ["pools", "socket_path", "timeout", "max_pool_size"] + + def __init__( + self, + socket_url, + timeout=60, + pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, + ): + socket_path = socket_url.replace("http+unix://", "") + if not socket_path.startswith("/"): + socket_path = "/" + socket_path self.socket_path = socket_path self.timeout = timeout self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) + self.pools = RecentlyUsedContainer(pool_connections, dispose_func=lambda p: p.close()) super().__init__() def get_connection(self, url, proxies=None): @@ -79,10 +70,7 @@ def get_connection(self, url, proxies=None): if pool: return pool - pool = UnixHTTPConnectionPool( - url, self.socket_path, self.timeout, - maxsize=self.max_pool_size - ) + pool = UnixHTTPConnectionPool(url, self.socket_path, self.timeout, maxsize=self.max_pool_size) self.pools[url] = pool return pool diff --git a/ipsw/types/__init__.py b/ipsw/types/__init__.py index 8b08762..541044b 100644 --- a/ipsw/types/__init__.py +++ b/ipsw/types/__init__.py @@ -1,2 +1,2 @@ # flake8: noqa -from .daemon import CancellableStream \ No newline at end of file +from .daemon import CancellableStream diff --git a/ipsw/types/daemon.py b/ipsw/types/daemon.py index 0d70d6b..a77e4cd 100644 --- a/ipsw/types/daemon.py +++ b/ipsw/types/daemon.py @@ -48,27 +48,24 @@ def close(self): sock_fp = self._response.raw._fp.fp - if hasattr(sock_fp, 'raw'): + if hasattr(sock_fp, "raw"): sock_raw = sock_fp.raw - if hasattr(sock_raw, 'sock'): + if hasattr(sock_raw, "sock"): sock = sock_raw.sock - elif hasattr(sock_raw, '_sock'): + elif hasattr(sock_raw, "_sock"): sock = sock_raw._sock - elif hasattr(sock_fp, 'channel'): + elif hasattr(sock_fp, "channel"): # We're working with a paramiko (SSH) channel, which doesn't # support cancelable streams with the current implementation - raise IpswException( - 'Cancellable streams not supported for the SSH protocol' - ) + raise IpswException("Cancellable streams not supported for the SSH protocol") else: sock = sock_fp._sock - if hasattr(urllib3.contrib, 'pyopenssl') and isinstance( - sock, urllib3.contrib.pyopenssl.WrappedSocket): + if hasattr(urllib3.contrib, "pyopenssl") and isinstance(sock, urllib3.contrib.pyopenssl.WrappedSocket): sock = sock.socket sock.shutdown(socket.SHUT_RDWR) - sock.close() \ No newline at end of file + sock.close() diff --git a/ipsw/utils/__init__.py b/ipsw/utils/__init__.py index 5cdb83a..1f8dc40 100644 --- a/ipsw/utils/__init__.py +++ b/ipsw/utils/__init__.py @@ -1,3 +1,3 @@ # flake8: noqa from .decorators import check_resource, minimum_version, update_headers -from .utils import parse_host, kwargs_from_env, version_lt, version_gte, format_environment \ No newline at end of file +from .utils import parse_host, kwargs_from_env, version_lt, version_gte, format_environment diff --git a/ipsw/utils/config.py b/ipsw/utils/config.py index d900fe1..f018d41 100644 --- a/ipsw/utils/config.py +++ b/ipsw/utils/config.py @@ -4,17 +4,22 @@ from ..constants import IS_WINDOWS_PLATFORM -IPSW_CONFIG_FILENAME = os.path.join('.config', 'ipsw', 'config.yml') +IPSW_CONFIG_FILENAME = os.path.join(".config", "ipsw", "config.yml") log = logging.getLogger(__name__) def find_config_file(config_path=None): - paths = list(filter(None, [ - config_path, # 1 - config_path_from_environment(), # 2 - os.path.join(home_dir(), IPSW_CONFIG_FILENAME), # 3 - ])) + paths = list( + filter( + None, + [ + config_path, # 1 + config_path_from_environment(), # 2 + os.path.join(home_dir(), IPSW_CONFIG_FILENAME), # 3 + ], + ) + ) log.debug(f"Trying paths: {repr(paths)}") @@ -29,7 +34,7 @@ def find_config_file(config_path=None): def config_path_from_environment(): - config_dir = os.environ.get('IPSW_CONFIG') + config_dir = os.environ.get("IPSW_CONFIG") if not config_dir: return None return os.path.join(config_dir, os.path.basename(IPSW_CONFIG_FILENAME)) @@ -41,9 +46,9 @@ def home_dir(): client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX. """ if IS_WINDOWS_PLATFORM: - return os.environ.get('USERPROFILE', '') + return os.environ.get("USERPROFILE", "") else: - return os.path.expanduser('~') + return os.path.expanduser("~") def load_general_config(config_path=None): @@ -59,4 +64,4 @@ def load_general_config(config_path=None): log.debug(e) log.debug("All parsing attempts failed - returning empty config") - return {} \ No newline at end of file + return {} diff --git a/ipsw/utils/decorators.py b/ipsw/utils/decorators.py index cf1baf4..9eaea17 100644 --- a/ipsw/utils/decorators.py +++ b/ipsw/utils/decorators.py @@ -11,13 +11,13 @@ def wrapped(self, resource_id=None, *args, **kwargs): if resource_id is None and kwargs.get(resource_name): resource_id = kwargs.pop(resource_name) if isinstance(resource_id, dict): - resource_id = resource_id.get('Id', resource_id.get('ID')) + resource_id = resource_id.get("Id", resource_id.get("ID")) if not resource_id: - raise errors.NullResource( - 'Resource ID was not provided' - ) + raise errors.NullResource("Resource ID was not provided") return f(self, resource_id, *args, **kwargs) + return wrapped + return decorator @@ -26,22 +26,21 @@ def decorator(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): if utils.version_lt(self._version, version): - raise errors.InvalidVersion( - '{} is not available for version < {}'.format( - f.__name__, version - ) - ) + raise errors.InvalidVersion("{} is not available for version < {}".format(f.__name__, version)) return f(self, *args, **kwargs) + return wrapper + return decorator def update_headers(f): def inner(self, *args, **kwargs): - if 'HttpHeaders' in self._general_configs: - if not kwargs.get('headers'): - kwargs['headers'] = self._general_configs['HttpHeaders'] + if "HttpHeaders" in self._general_configs: + if not kwargs.get("headers"): + kwargs["headers"] = self._general_configs["HttpHeaders"] else: - kwargs['headers'].update(self._general_configs['HttpHeaders']) + kwargs["headers"].update(self._general_configs["HttpHeaders"]) return f(self, *args, **kwargs) + return inner diff --git a/ipsw/utils/json_stream.py b/ipsw/utils/json_stream.py index c8081f3..14ae88f 100644 --- a/ipsw/utils/json_stream.py +++ b/ipsw/utils/json_stream.py @@ -16,7 +16,7 @@ def stream_as_text(stream): """ for data in stream: if not isinstance(data, str): - data = data.decode('utf-8', 'replace') + data = data.decode("utf-8", "replace") yield data @@ -27,7 +27,7 @@ def json_splitter(buffer): buffer = buffer.strip() try: obj, index = json_decoder.raw_decode(buffer) - rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():] + rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end() :] return obj, rest except ValueError: return None @@ -41,11 +41,11 @@ def json_stream(stream): return split_buffer(stream, json_splitter, json_decoder.decode) -def line_splitter(buffer, separator='\n'): +def line_splitter(buffer, separator="\n"): index = buffer.find(str(separator)) if index == -1: return None - return buffer[:index + 1], buffer[index + 1:] + return buffer[: index + 1], buffer[index + 1 :] def split_buffer(stream, splitter=None, decoder=lambda a: a): @@ -56,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a): of the input. """ splitter = splitter or line_splitter - buffered = '' + buffered = "" for data in stream_as_text(stream): buffered += data @@ -72,4 +72,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a): try: yield decoder(buffered) except Exception as e: - raise StreamParseError(e) \ No newline at end of file + raise StreamParseError(e) diff --git a/ipsw/utils/proxy.py b/ipsw/utils/proxy.py index 75f2f79..9020910 100644 --- a/ipsw/utils/proxy.py +++ b/ipsw/utils/proxy.py @@ -2,59 +2,60 @@ class ProxyConfig(dict): - ''' + """ Hold the client's proxy configuration - ''' + """ + @property def http(self): - return self.get('http') + return self.get("http") @property def https(self): - return self.get('https') + return self.get("https") @property def ftp(self): - return self.get('ftp') + return self.get("ftp") @property def no_proxy(self): - return self.get('no_proxy') + return self.get("no_proxy") @staticmethod def from_dict(config): - ''' + """ Instantiate a new ProxyConfig from a dictionary that represents a client configuration, as described in `the documentation`_. - ''' + """ return ProxyConfig( - http=config.get('httpProxy'), - https=config.get('httpsProxy'), - ftp=config.get('ftpProxy'), - no_proxy=config.get('noProxy'), + http=config.get("httpProxy"), + https=config.get("httpsProxy"), + ftp=config.get("ftpProxy"), + no_proxy=config.get("noProxy"), ) def get_environment(self): - ''' + """ Return a dictionary representing the environment variables used to set the proxy settings. - ''' + """ env = {} if self.http: - env['http_proxy'] = env['HTTP_PROXY'] = self.http + env["http_proxy"] = env["HTTP_PROXY"] = self.http if self.https: - env['https_proxy'] = env['HTTPS_PROXY'] = self.https + env["https_proxy"] = env["HTTPS_PROXY"] = self.https if self.ftp: - env['ftp_proxy'] = env['FTP_PROXY'] = self.ftp + env["ftp_proxy"] = env["FTP_PROXY"] = self.ftp if self.no_proxy: - env['no_proxy'] = env['NO_PROXY'] = self.no_proxy + env["no_proxy"] = env["NO_PROXY"] = self.no_proxy return env def inject_proxy_environment(self, environment): - ''' + """ Given a list of strings representing environment variables, prepend the environment variables corresponding to the proxy settings. - ''' + """ if not self: return environment @@ -66,5 +67,6 @@ def inject_proxy_environment(self, environment): return proxy_env + environment def __str__(self): - return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format( - self.http, self.https, self.ftp, self.no_proxy) + return "ProxyConfig(http={}, https={}, ftp={}, no_proxy={})".format( + self.http, self.https, self.ftp, self.no_proxy + ) diff --git a/ipsw/utils/socket.py b/ipsw/utils/socket.py index 74b8a31..77463fc 100644 --- a/ipsw/utils/socket.py +++ b/ipsw/utils/socket.py @@ -34,22 +34,20 @@ def read(socket, n=4096): select.select([socket], [], []) try: - if hasattr(socket, 'recv'): + if hasattr(socket, "recv"): return socket.recv(n) - if isinstance(socket, getattr(pysocket, 'SocketIO')): + if isinstance(socket, getattr(pysocket, "SocketIO")): return socket.read(n) return os.read(socket.fileno(), n) except OSError as e: if e.errno not in recoverable_errors: raise except Exception as e: - is_pipe_ended = (isinstance(socket, NpipeSocket) and - len(e.args) > 0 and - e.args[0] == NPIPE_ENDED) + is_pipe_ended = isinstance(socket, NpipeSocket) and len(e.args) > 0 and e.args[0] == NPIPE_ENDED if is_pipe_ended: # npipes don't support duplex sockets, so we interpret # a PIPE_ENDED error as a close operation (0-length read). - return '' + return "" raise @@ -77,7 +75,7 @@ def next_frame_header(socket): except SocketError: return (-1, -1) - stream, actual = struct.unpack('>BxxxL', data) + stream, actual = struct.unpack(">BxxxL", data) return (stream, actual) @@ -176,4 +174,4 @@ def demux_adaptor(stream_id, data): elif stream_id == STDERR: return (None, data) else: - raise ValueError(f'{stream_id} is not a valid stream') \ No newline at end of file + raise ValueError(f"{stream_id} is not a valid stream") diff --git a/ipsw/version.py b/ipsw/version.py index bc74a43..ddc342e 100644 --- a/ipsw/version.py +++ b/ipsw/version.py @@ -6,9 +6,10 @@ # is fine because release builds use _version (above) rather than # this code path, so it only impacts developing w/ 3.7 from importlib.metadata import version, PackageNotFoundError + try: - __version__ = version('ipsw') + __version__ = version("ipsw") except PackageNotFoundError: - __version__ = '0.0.0' + __version__ = "0.0.0" except ImportError: - __version__ = '0.0.0' \ No newline at end of file + __version__ = "0.0.0"