added podman, json and yaml

This commit is contained in:
2022-11-27 19:11:46 +01:00
parent 01135dea09
commit 5226e858bb
790 changed files with 114578 additions and 16 deletions

View File

@ -0,0 +1,61 @@
"""Tools for connecting to a Podman service."""
import re
from podman.api.cached_property import cached_property
from podman.api.client import APIClient
from podman.api.http_utils import prepare_body, prepare_filters
from podman.api.parse_utils import (
decode_header,
frames,
parse_repository,
prepare_cidr,
prepare_timestamp,
stream_frames,
)
from podman.api.tar_utils import create_tar, prepare_containerfile, prepare_containerignore
from .. import version
DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024
def _api_version(release: str, significant: int = 3) -> str:
"""Return API version removing any additional identifiers from the release version.
This is a simple lexicographical parsing, no semantics are applied, e.g. semver checking.
"""
items = re.split(r"\.|-|\+", release)
parts = items[0:significant]
return ".".join(parts)
VERSION: str = _api_version(version.__version__)
COMPATIBLE_VERSION: str = _api_version(version.__compatible_version__, 2)
try:
from typing import Literal
except (ImportError, ModuleNotFoundError):
try:
from typing_extensions import Literal
except (ImportError, ModuleNotFoundError):
from podman.api.typing_extensions import Literal # pylint: disable=ungrouped-imports
# isort: unique-list
__all__ = [
'APIClient',
'COMPATIBLE_VERSION',
'DEFAULT_CHUNK_SIZE',
'Literal',
'VERSION',
'cached_property',
'create_tar',
'decode_header',
'frames',
'parse_repository',
'prepare_body',
'prepare_cidr',
'prepare_containerfile',
'prepare_containerignore',
'prepare_filters',
'prepare_timestamp',
'stream_frames',
]

View File

@ -0,0 +1,49 @@
"""Utility functions for working with Adapters."""
from typing import NamedTuple, Mapping
def _key_normalizer(key_class: NamedTuple, request_context: Mapping) -> Mapping:
"""Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
Copied from urllib3.poolmanager._default_key_normalizer.
Args:
key_class: The class to use when constructing the key. This should be a namedtuple
with the scheme and host keys at a minimum.
request_context: An object that contain the context for a request.
Returns:
A namedtuple that can be used as a connection pool key.
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context["scheme"] = context["scheme"].lower()
context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get("socket_options")
if socket_opts is not None:
context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
return key_class(**context)

View File

@ -0,0 +1,9 @@
"""Provide cached_property for Python <=3.8 programs."""
import functools
try:
from functools import cached_property # pylint: disable=unused-import
except ImportError:
def cached_property(fn):
return property(functools.lru_cache()(fn))

View File

@ -0,0 +1,415 @@
"""APIClient for connecting to Podman service."""
import json
import urllib.parse
from typing import Any, ClassVar, IO, Iterable, List, Mapping, Optional, Tuple, Type, Union
import requests
from requests.adapters import HTTPAdapter
from podman import api
from podman.api.ssh import SSHAdapter
from podman.api.uds import UDSAdapter
from podman.errors import APIError, NotFound
from podman.tlsconfig import TLSConfig
from podman.version import __version__
_Data = Union[
None,
str,
bytes,
Mapping[str, Any],
Iterable[Tuple[str, Optional[str]]],
IO,
]
"""Type alias for request data parameter."""
_Timeout = Union[None, float, Tuple[float, float], Tuple[float, None]]
"""Type alias for request timeout parameter."""
class APIResponse:
"""APIResponse proxy requests.Response objects.
Override raise_for_status() to implement Podman API binding errors.
All other methods and attributes forwarded to original Response.
"""
def __init__(self, response: requests.Response):
"""Initialize APIResponse.
Args:
response: the requests.Response to provide implementation
"""
self._response = response
def __getattr__(self, item: str):
"""Forward any query for an attribute not defined in this proxy class to wrapped class."""
return getattr(self._response, item)
def raise_for_status(self, not_found: Type[APIError] = NotFound) -> None:
"""Raises exception when Podman service reports one."""
if self.status_code < 400:
return
try:
body = self.json()
cause = body["cause"]
message = body["message"]
except (json.decoder.JSONDecodeError, KeyError):
cause = message = self.text
if self.status_code == requests.codes.not_found:
raise not_found(cause, response=self._response, explanation=message)
raise APIError(cause, response=self._response, explanation=message)
class APIClient(requests.Session):
"""Client for Podman service API."""
# Abstract methods (delete,get,head,post) are specialized and pylint cannot walk hierarchy.
# pylint: disable=too-many-instance-attributes,arguments-differ,arguments-renamed
supported_schemes: ClassVar[List[str]] = (
"unix",
"http+unix",
"ssh",
"http+ssh",
"tcp",
"http",
)
def __init__(
self,
base_url: str = None,
version: Optional[str] = None,
timeout: Optional[float] = None,
tls: Union[TLSConfig, bool] = False,
user_agent: Optional[str] = None,
num_pools: Optional[int] = None,
credstore_env: Optional[Mapping[str, str]] = None,
use_ssh_client=True,
max_pools_size=None,
**kwargs,
): # pylint: disable=unused-argument
"""Instantiate APIClient object.
Args:
base_url: Address to use for connecting to Podman service.
version: Override version prefix for Podman resource URLs.
timeout: Time in seconds to allow for Podman service operation.
tls: Configuration for TLS connections.
user_agent: Override User-Agent HTTP header.
num_pools: The number of connection pools to cache.
credstore_env: Environment for storing credentials.
use_ssh_client: Use system ssh agent rather than ssh module. Always, True.
max_pool_size: Override number of connections pools to maintain.
Default: requests.adapters.DEFAULT_POOLSIZE
Keyword Args:
compatible_version (str): Override version prefix for compatible resource URLs.
identity (str): Provide SSH key to authenticate SSH connection.
Raises:
ValueError: when a parameter is incorrect
"""
super().__init__()
self.base_url = self._normalize_url(base_url)
adapter_kwargs = kwargs.copy()
if num_pools is not None:
adapter_kwargs["pool_connections"] = num_pools
if max_pools_size is not None:
adapter_kwargs["pool_maxsize"] = max_pools_size
if timeout is not None:
adapter_kwargs["timeout"] = timeout
if self.base_url.scheme == "http+unix":
self.mount("http://", UDSAdapter(self.base_url.geturl(), **adapter_kwargs))
self.mount("https://", UDSAdapter(self.base_url.geturl(), **adapter_kwargs))
elif self.base_url.scheme == "http+ssh":
self.mount("http://", SSHAdapter(self.base_url.geturl(), **adapter_kwargs))
self.mount("https://", SSHAdapter(self.base_url.geturl(), **adapter_kwargs))
elif self.base_url.scheme == "http":
self.mount("http://", HTTPAdapter(**adapter_kwargs))
self.mount("https://", HTTPAdapter(**adapter_kwargs))
else:
assert False, "APIClient.supported_schemes changed without adding a branch here."
self.version = version or api.VERSION
self.path_prefix = f"/v{self.version}/libpod/"
self.compatible_version = kwargs.get("compatible_version", api.COMPATIBLE_VERSION)
self.compatible_prefix = f"/v{self.compatible_version}/"
self.timeout = timeout
self.pool_maxsize = num_pools or requests.adapters.DEFAULT_POOLSIZE
self.credstore_env = credstore_env or {}
self.user_agent = user_agent or (
f"PodmanPy/{__version__} (API v{self.version}; Compatible v{self.compatible_version})"
)
self.headers.update({"User-Agent": self.user_agent})
@staticmethod
def _normalize_url(base_url: str) -> urllib.parse.ParseResult:
uri = urllib.parse.urlparse(base_url)
if uri.scheme not in APIClient.supported_schemes:
raise ValueError(
f"The scheme '{uri.scheme}' must be one of {APIClient.supported_schemes}"
)
# Normalize URL scheme, needs to match up with adapter mounts
if uri.scheme == "unix":
uri = uri._replace(scheme="http+unix")
elif uri.scheme == "ssh":
uri = uri._replace(scheme="http+ssh")
elif uri.scheme == "tcp":
uri = uri._replace(scheme="http")
# Normalize URL netloc, needs to match up with transport adapters expectations
if uri.netloc == "":
uri = uri._replace(netloc=uri.path)._replace(path="")
if "/" in uri.netloc:
uri = uri._replace(netloc=urllib.parse.quote_plus(uri.netloc))
return uri
def delete(
self,
path: Union[str, bytes],
params: Union[None, bytes, Mapping[str, str]] = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = False,
**kwargs,
) -> APIResponse:
"""HTTP DELETE operation against configured Podman service.
Args:
path: Relative path to RESTful resource.
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
stream: Return iterator for content vs reading all content into memory
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
return self._request(
"DELETE",
path=path,
params=params,
headers=headers,
timeout=timeout,
stream=stream,
**kwargs,
)
def get(
self,
path: Union[str, bytes],
params: Union[None, bytes, Mapping[str, List[str]]] = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = False,
**kwargs,
) -> APIResponse:
"""HTTP GET operation against configured Podman service.
Args:
path: Relative path to RESTful resource.
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
stream: Return iterator for content vs reading all content into memory
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
return self._request(
"GET",
path=path,
params=params,
headers=headers,
timeout=timeout,
stream=stream,
**kwargs,
)
def head(
self,
path: Union[str, bytes],
params: Union[None, bytes, Mapping[str, str]] = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = False,
**kwargs,
) -> APIResponse:
"""HTTP HEAD operation against configured Podman service.
Args:
path: Relative path to RESTful resource.
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
stream: Return iterator for content vs reading all content into memory
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
return self._request(
"HEAD",
path=path,
params=params,
headers=headers,
timeout=timeout,
stream=stream,
**kwargs,
)
def post(
self,
path: Union[str, bytes],
params: Union[None, bytes, Mapping[str, str]] = None,
data: _Data = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = False,
**kwargs,
) -> APIResponse:
"""HTTP POST operation against configured Podman service.
Args:
path: Relative path to RESTful resource.
data: HTTP body for operation
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
stream: Return iterator for content vs reading all content into memory
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
return self._request(
"POST",
path=path,
params=params,
data=data,
headers=headers,
timeout=timeout,
stream=stream,
**kwargs,
)
def put(
self,
path: Union[str, bytes],
params: Union[None, bytes, Mapping[str, str]] = None,
data: _Data = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = False,
**kwargs,
) -> APIResponse:
"""HTTP PUT operation against configured Podman service.
Args:
path: Relative path to RESTful resource.
data: HTTP body for operation
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
stream: Return iterator for content vs reading all content into memory
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
return self._request(
"PUT",
path=path,
params=params,
data=data,
headers=headers,
timeout=timeout,
stream=stream,
**kwargs,
)
def _request(
self,
method: str,
path: Union[str, bytes],
data: _Data = None,
params: Union[None, bytes, Mapping[str, str]] = None,
headers: Optional[Mapping[str, str]] = None,
timeout: _Timeout = None,
stream: Optional[bool] = None,
**kwargs,
) -> APIResponse:
"""HTTP operation against configured Podman service.
Args:
method: HTTP method to use for request
path: Relative path to RESTful resource.
params: Optional parameters to include with URL.
headers: Optional headers to include in request.
timeout: Number of seconds to wait on request, or (connect timeout, read timeout) tuple
Keyword Args:
compatible: Will override the default path prefix with compatible prefix
Raises:
APIError: when service returns an error
"""
# Only set timeout if one is given, lower level APIs will not override None
timeout_kw = {}
timeout = timeout or self.timeout
if timeout_kw is not None:
timeout_kw["timeout"] = timeout
compatible = kwargs.get("compatible", False)
path_prefix = self.compatible_prefix if compatible else self.path_prefix
path = path.lstrip("/") # leading / makes urljoin crazy...
# TODO should we have an option for HTTPS support?
# Build URL for operation from base_url
uri = urllib.parse.ParseResult(
"http",
self.base_url.netloc,
urllib.parse.urljoin(path_prefix, path),
self.base_url.params,
self.base_url.query,
self.base_url.fragment,
)
try:
return APIResponse(
self.request(
method.upper(),
uri.geturl(),
params=params,
data=data,
headers=(headers or {}),
stream=stream,
**timeout_kw,
)
)
except OSError as e:
raise APIError(uri.geturl(), explanation=f"{method.upper()} operation failed") from e

View File

@ -0,0 +1,102 @@
"""Utility functions for working with URLs."""
import base64
import collections.abc
import json
from typing import Dict, List, Mapping, Optional, Union, Any
def prepare_filters(filters: Union[str, List[str], Mapping[str, str]]) -> Optional[str]:
"""Return filters as an URL quoted JSON Dict[str, List[Any]]."""
if filters is None or len(filters) == 0:
return None
criteria: Dict[str, List[str]] = {}
if isinstance(filters, str):
_format_string(filters, criteria)
elif isinstance(filters, collections.abc.Mapping):
_format_dict(filters, criteria)
else:
_format_list(filters, criteria)
if len(criteria) == 0:
return None
return json.dumps(criteria, sort_keys=True)
def _format_list(filters, criteria):
for item in filters:
if item is None:
continue
key, value = item.split("=", 1)
if key in criteria:
criteria[key].append(value)
else:
criteria[key] = [value]
def _format_dict(filters, criteria):
for key, value in filters.items():
if value is None:
continue
value = str(value)
if key in criteria:
criteria[key].append(value)
else:
criteria[key] = [value]
def _format_string(filters, criteria):
key, value = filters.split("=", 1)
criteria[key] = [value]
def prepare_body(body: Mapping[str, Any]) -> str:
"""Returns JSON payload to be uploaded to server.
Values of None and empty Iterables are removed, False and zero-values are retained.
"""
if body is None:
return ""
body = _filter_values(body)
return json.dumps(body, sort_keys=True)
def _filter_values(mapping: Mapping[str, Any], recursion=False) -> Dict[str, Any]:
"""Returns a canonical dictionary with values == None or empty Iterables removed.
Dictionary is walked using recursion.
"""
canonical = {}
for key, value in mapping.items():
# quick filter if possible...
if (
value is None
or (isinstance(value, collections.abc.Sized) and len(value) <= 0)
and not recursion
):
continue
# depending on type we need details...
if isinstance(value, collections.abc.Mapping):
proposal = _filter_values(value, recursion=True)
elif isinstance(value, collections.abc.Iterable) and not isinstance(value, str):
proposal = [i for i in value if i is not None]
else:
proposal = value
if not recursion and proposal not in (None, str(), [], {}):
canonical[key] = proposal
elif recursion and proposal not in (None, [], {}):
canonical[key] = proposal
return canonical
def encode_auth_header(auth_config: Dict[str, str]) -> str:
return base64.b64encode(json.dumps(auth_config).encode('utf-8'))

View File

@ -0,0 +1,99 @@
"""Helper functions for parsing strings."""
import base64
import ipaddress
import json
import struct
from datetime import datetime
from typing import Any, Dict, Iterator, Optional, Tuple, Union
from requests import Response
def parse_repository(name: str) -> Tuple[str, Optional[str]]:
"""Parse repository image name from tag or digest
Returns:
item 1: repository name
item 2: Either digest and tag, tag, or None
"""
# split image name and digest
elements = name.split("@", 1)
if len(elements) == 2:
return elements[0], elements[1]
# split repository and image name from tag
elements = name.split(":", 1)
if len(elements) == 2 and "/" not in elements[1]:
return elements[0], elements[1]
return name, None
def decode_header(value: Optional[str]) -> Dict[str, Any]:
"""Decode a base64 JSON header value."""
if value is None:
return {}
value = base64.b64decode(value)
text = value.decode("utf-8")
return json.loads(text)
def prepare_timestamp(value: Union[datetime, int, None]) -> Optional[int]:
"""Returns a UTC UNIX timestamp from given input."""
if value is None:
return None
if isinstance(value, int):
return value
if isinstance(value, datetime):
delta = value - datetime.utcfromtimestamp(0)
return delta.seconds + delta.days * 24 * 3600
raise ValueError(f"Type '{type(value)}' is not supported by prepare_timestamp()")
def prepare_cidr(value: Union[ipaddress.IPv4Network, ipaddress.IPv6Network]) -> (str, str):
"""Returns network address and Base64 encoded netmask from CIDR.
The return values are dictated by the Go JSON decoder.
"""
return str(value.network_address), base64.b64encode(value.netmask.packed).decode("utf-8")
def frames(response: Response) -> Iterator[bytes]:
"""Returns each frame from multiplexed payload, all results are expected in the payload.
The stdout and stderr frames are undifferentiated as they are returned.
"""
length = len(response.content)
index = 0
while length - index > 8:
header = response.content[index : index + 8]
_, frame_length = struct.unpack_from(">BxxxL", header)
frame_begin = index + 8
frame_end = frame_begin + frame_length
index = frame_end
yield response.content[frame_begin:frame_end]
def stream_frames(response: Response) -> Iterator[bytes]:
"""Returns each frame from multiplexed streamed payload.
Notes:
The stdout and stderr frames are undifferentiated as they are returned.
"""
while True:
header = response.raw.read(8)
if not header:
return
_, frame_length = struct.unpack_from(">BxxxL", header)
if not frame_length:
continue
data = response.raw.read(frame_length)
if not data:
return
yield data

View File

@ -0,0 +1,301 @@
"""Specialized Transport Adapter for remote Podman access via ssh tunnel.
See Podman go bindings for more details.
"""
import collections
import functools
import http.client
import logging
import pathlib
import random
import socket
import subprocess
import urllib.parse
from contextlib import suppress
from typing import Optional, Union
import time
import xdg.BaseDirectory
try:
import urllib3
except ImportError:
from requests.packages import urllib3
from requests.adapters import DEFAULT_POOLBLOCK, DEFAULT_RETRIES, HTTPAdapter
from .adapter_utils import _key_normalizer
logger = logging.getLogger("podman.ssh_adapter")
class SSHSocket(socket.socket):
"""Specialization of socket.socket to forward a UNIX domain socket via SSH."""
def __init__(self, uri: str, identity: Optional[str] = None):
"""Initialize SSHSocket.
Args:
uri: Full address of a Podman service including path to remote socket.
identity: path to file containing SSH key for authorization
Examples:
SSHSocket("http+ssh://alice@api.example:2222/run/user/1000/podman/podman.sock",
"~alice/.ssh/api_ed25519")
"""
super().__init__(socket.AF_UNIX, socket.SOCK_STREAM)
self.uri = uri
self.identity = identity
self._proc: Optional[subprocess.Popen] = None
runtime_dir = pathlib.Path(xdg.BaseDirectory.get_runtime_dir(strict=False)) / "podman"
runtime_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
self.local_sock = runtime_dir / f"podman-forward-{random.getrandbits(80):x}.sock"
def connect(self, **kwargs): # pylint: disable=unused-argument
"""Returns socket for SSH tunneled UNIX domain socket.
Raises:
subprocess.TimeoutExpired: when SSH client fails to create local socket
"""
uri = urllib.parse.urlparse(self.uri)
command = [
"ssh",
"-N",
"-o",
"StrictHostKeyChecking no",
"-L",
f"{self.local_sock}:{uri.path}",
]
if self.identity is not None:
path = pathlib.Path(self.identity).expanduser()
command += ["-i", str(path)]
command += [f"ssh://{uri.netloc}"]
self._proc = subprocess.Popen( # pylint: disable=consider-using-with
command,
shell=False,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
)
expiration = time.monotonic() + 300
while not self.local_sock.exists():
if time.monotonic() > expiration:
cmd = " ".join(command)
raise subprocess.TimeoutExpired(cmd, expiration)
logger.debug("Waiting on %s", self.local_sock)
time.sleep(0.2)
super().connect(str(self.local_sock))
def send(self, data: bytes, flags=None) -> int: # pylint: disable=unused-argument
"""Write data to SSH forwarded UNIX domain socket.
Args:
data: Data to write.
flags: Ignored.
Returns:
The number of bytes written.
Raises:
RuntimeError: When socket has not been connected.
"""
if not self._proc or self._proc.stdin.closed:
raise RuntimeError(f"SSHSocket({self.uri}) not connected.")
count = self._proc.stdin.write(data)
self._proc.stdin.flush()
return count
def recv(self, buffersize, flags=None) -> bytes: # pylint: disable=unused-argument
"""Read data from SSH forwarded UNIX domain socket.
Args:
buffersize: Maximum number of bytes to read.
flags: Ignored.
Raises:
RuntimeError: When socket has not been connected.
"""
if not self._proc:
raise RuntimeError(f"SSHSocket({self.uri}) not connected.")
return self._proc.stdout.read(buffersize)
def close(self):
"""Release resources held by SSHSocket.
The SSH client is first sent SIGTERM, then a SIGKILL 20 seconds later if needed.
"""
if not self._proc or self._proc.stdin.closed:
return
with suppress(BrokenPipeError):
self._proc.stdin.close()
self._proc.stdout.close()
self._proc.terminate()
try:
self._proc.wait(timeout=20)
except subprocess.TimeoutExpired:
logger.debug("SIGKILL required to stop SSH client.")
self._proc.kill()
self.local_sock.unlink()
self._proc = None
super().close()
class SSHConnection(http.client.HTTPConnection):
"""Specialization of HTTPConnection to use a SSH forwarded socket."""
def __init__(
self,
host: str,
port: int,
timeout: Union[float, urllib3.Timeout, None] = None,
strict=False,
**kwargs, # pylint: disable=unused-argument
) -> None:
"""Initialize connection to SSHSocket for HTTP client.
Args:
host: Ignored.
port: Ignored.
timeout: Time to allow for operation.
strict: Ignored.
Keyword Args:
uri: Full address of a Podman service including path to remote socket. Required.
identity: path to file containing SSH key for authorization.
"""
self.sock: Optional[socket.socket] = None
connection_kwargs = kwargs.copy()
connection_kwargs["port"] = port
if timeout is not None:
if isinstance(timeout, urllib3.Timeout):
try:
connection_kwargs["timeout"] = float(timeout.total)
except TypeError:
pass
connection_kwargs["timeout"] = timeout
self.uri = connection_kwargs.pop("uri")
self.identity = connection_kwargs.pop("identity", None)
super().__init__(host, **connection_kwargs)
if logger.getEffectiveLevel() == logging.DEBUG:
self.set_debuglevel(1)
def connect(self) -> None:
"""Connect to Podman service via SSHSocket."""
sock = SSHSocket(self.uri, self.identity)
sock.settimeout(self.timeout)
sock.connect()
self.sock = sock
class SSHConnectionPool(urllib3.HTTPConnectionPool):
"""Specialized HTTPConnectionPool for holding SSH connections."""
ConnectionCls = SSHConnection
class SSHPoolManager(urllib3.PoolManager):
"""Specialized PoolManager for tracking SSH connections."""
# pylint's special handling for namedtuple does not cover this usage
# pylint: disable=invalid-name
_PoolKey = collections.namedtuple(
"_PoolKey", urllib3.poolmanager.PoolKey._fields + ("key_uri", "key_identity")
)
# Map supported schemes to Pool Classes
_pool_classes_by_scheme = {
"http": SSHConnectionPool,
"http+ssh": SSHConnectionPool,
}
# Map supported schemes to Pool Key index generator
_key_fn_by_scheme = {
"http": functools.partial(_key_normalizer, _PoolKey),
"http+ssh": functools.partial(_key_normalizer, _PoolKey),
}
def __init__(self, num_pools=10, headers=None, **kwargs):
"""Initialize SSHPoolManager.
Args:
num_pools: Number of SSH Connection pools to maintain.
headers: Additional headers to add to operations.
"""
super().__init__(num_pools, headers, **kwargs)
self.pool_classes_by_scheme = SSHPoolManager._pool_classes_by_scheme
self.key_fn_by_scheme = SSHPoolManager._key_fn_by_scheme
class SSHAdapter(HTTPAdapter):
"""Specialization of requests transport adapter for SSH forwarded UNIX domain sockets."""
def __init__(
self,
uri: str,
pool_connections: int = 9,
pool_maxsize: int = 10,
max_retries: int = DEFAULT_RETRIES,
pool_block: int = DEFAULT_POOLBLOCK,
**kwargs,
):
"""Initialize SSHAdapter.
Args:
uri: Full address of a Podman service including path to remote socket.
Format, ssh://<user>@<host>[:port]/run/podman/podman.sock?secure=True
pool_connections: The number of connection pools to cache. Should be at least one less
than pool_maxsize.
pool_maxsize: The maximum number of connections to save in the pool.
OpenSSH default is 10.
max_retries: The maximum number of retries each connection should attempt.
pool_block: Whether the connection pool should block for connections.
Keyword Args:
timeout (float):
identity (str): Optional path to ssh identity key
"""
self.poolmanager: Optional[SSHPoolManager] = None
# Parsed for fail-fast side effects
_ = urllib.parse.urlparse(uri)
self._pool_kwargs = {"uri": uri}
if "identity" in kwargs:
path = pathlib.Path(kwargs.get("identity"))
if not path.exists():
raise FileNotFoundError(f"Identity file '{path}' does not exist.")
self._pool_kwargs["identity"] = str(path)
if "timeout" in kwargs:
self._pool_kwargs["timeout"] = kwargs.get("timeout")
super().__init__(pool_connections, pool_maxsize, max_retries, pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **kwargs):
"""Initialize SSHPoolManager to be used by SSHAdapter.
Args:
connections: The number of urllib3 connection pools to cache.
maxsize: The maximum number of connections to save in the pool.
block: Block when no free connections are available.
"""
pool_kwargs = kwargs.copy()
pool_kwargs.update(self._pool_kwargs)
self.poolmanager = SSHPoolManager(
num_pools=connections, maxsize=maxsize, block=block, **pool_kwargs
)

View File

@ -0,0 +1,133 @@
"""Utility functions for working with tarballs."""
import pathlib
import random
import shutil
import tarfile
import tempfile
from fnmatch import fnmatch
from typing import BinaryIO, List, Optional
import sys
def prepare_containerignore(anchor: str) -> List[str]:
"""Return the list of patterns for filenames to exclude.
.containerignore takes precedence over .dockerignore.
"""
for filename in (".containerignore", ".dockerignore"):
ignore = pathlib.Path(anchor) / filename
if not ignore.exists():
continue
with ignore.open(encoding='utf-8') as file:
return list(
filter(
lambda l: l and not l.startswith("#"),
(line.strip() for line in file.readlines()),
)
)
return []
def prepare_containerfile(anchor: str, dockerfile: str) -> str:
"""Ensure that Containerfile, or a proxy Containerfile is in context_dir.
Args:
anchor: Build context directory
dockerfile: Path to Dockerfile/Containerfile
Returns:
path to Dockerfile/Containerfile in root of context directory
"""
anchor_path = pathlib.Path(anchor)
dockerfile_path = pathlib.Path(dockerfile)
if dockerfile_path.parent.samefile(anchor_path):
return dockerfile_path.name
proxy_path = anchor_path / f".containerfile.{random.getrandbits(160):x}"
shutil.copy2(dockerfile_path, proxy_path, follow_symlinks=False)
return proxy_path.name
def create_tar(
anchor: str, name: str = None, exclude: List[str] = None, gzip: bool = False
) -> BinaryIO:
"""Create a tarfile from context_dir to send to Podman service.
Args:
anchor: Directory to use as root of tar file.
name: Name of tar file.
exclude: List of patterns for files to exclude from tar file.
gzip: When True, gzip compress tar file.
"""
def add_filter(info: tarfile.TarInfo) -> Optional[tarfile.TarInfo]:
"""Filter files targeted to be added to tarfile.
Args:
info: Information on the file targeted to be added
Returns:
None: if file is not to be added
TarInfo: when file is to be added. Modified as needed.
Notes:
exclude is captured from parent
"""
if not (info.isfile() or info.isdir() or info.issym()):
return None
if _exclude_matcher(info.name, exclude):
return None
# Workaround https://bugs.python.org/issue32713. Fixed in Python 3.7
if info.mtime < 0 or info.mtime > 8**11 - 1:
info.mtime = int(info.mtime)
# do not leak client information to service
info.uid = 0
info.uname = info.gname = "root"
if sys.platform == "win32":
info.mode = info.mode & 0o755 | 0o111
return info
if name is None:
# pylint: disable=consider-using-with
name = tempfile.NamedTemporaryFile(prefix="podman_context", suffix=".tar")
else:
name = pathlib.Path(name)
if exclude is None:
exclude = []
else:
exclude = exclude.copy()
# FIXME caller needs to add this...
# exclude.append(".dockerignore")
exclude.append(name.name)
mode = "w:gz" if gzip else "w"
with tarfile.open(name.name, mode) as tar:
tar.add(anchor, arcname="", recursive=True, filter=add_filter)
return open(name.name, "rb") # pylint: disable=consider-using-with
def _exclude_matcher(path: str, exclude: List[str]) -> bool:
"""Returns True if path matches an entry in exclude.
Note:
FIXME Not compatible, support !, **, etc
"""
if not exclude:
return False
for pattern in exclude:
if fnmatch(path, pattern):
return True
return False

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,181 @@
"""Specialized Transport Adapter for UNIX domain sockets."""
import collections
import functools
import http.client
import logging
import socket
from typing import Optional, Union
from urllib.parse import unquote, urlparse
try:
import urllib3
except ImportError:
from requests.packages import urllib3
from requests.adapters import DEFAULT_POOLBLOCK, DEFAULT_POOLSIZE, DEFAULT_RETRIES, HTTPAdapter
from ..errors import APIError
from .adapter_utils import _key_normalizer
logger = logging.getLogger("podman.uds_adapter")
class UDSSocket(socket.socket):
"""Specialization of socket.socket for a UNIX domain socket."""
def __init__(self, uds: str):
"""Initialize UDSSocket.
Args:
uds: Full address of a Podman service UNIX domain socket.
Examples:
UDSSocket("http+unix:///run/podman/podman.sock")
"""
super().__init__(socket.AF_UNIX, socket.SOCK_STREAM)
self.uds = uds
def connect(self, **kwargs): # pylint: disable=unused-argument
"""Returns socket for UNIX domain socket."""
netloc = unquote(urlparse(self.uds).netloc)
try:
super().connect(netloc)
except Exception as e:
raise APIError(f"Unable to make connection to UDS '{netloc}'") from e
class UDSConnection(http.client.HTTPConnection):
"""Specialization of HTTPConnection to use a UNIX domain sockets."""
def __init__(
self,
host: str,
port: int,
timeout: Union[float, urllib3.Timeout, None] = None,
strict=False,
**kwargs, # pylint: disable=unused-argument
):
"""Initialize connection to UNIX domain socket for HTTP client.
Args:
host: Ignored.
port: Ignored.
timeout: Time to allow for operation.
strict: Ignored.
Keyword Args:
uds: Full address of a Podman service UNIX domain socket. Required.
"""
connection_kwargs = kwargs.copy()
self.sock: Optional[socket.socket] = None
if timeout is not None:
if isinstance(timeout, urllib3.Timeout):
try:
connection_kwargs["timeout"] = float(timeout.total)
except TypeError:
pass
connection_kwargs["timeout"] = timeout
self.uds = connection_kwargs.pop("uds")
super().__init__(host, **connection_kwargs)
def connect(self) -> None:
"""Connect to Podman service via UNIX domain socket."""
sock = UDSSocket(self.uds)
sock.settimeout(self.timeout)
sock.connect()
self.sock = sock
class UDSConnectionPool(urllib3.HTTPConnectionPool):
"""Specialization of HTTPConnectionPool for holding UNIX domain sockets."""
ConnectionCls = UDSConnection
class UDSPoolManager(urllib3.PoolManager):
"""Specialized PoolManager for tracking UNIX domain socket connections."""
# pylint's special handling for namedtuple does not cover this usage
# pylint: disable=invalid-name
_PoolKey = collections.namedtuple(
"_PoolKey", urllib3.poolmanager.PoolKey._fields + ("key_uds",)
)
# Map supported schemes to Pool Classes
_pool_classes_by_scheme = {
"http": UDSConnectionPool,
"http+ssh": UDSConnectionPool,
}
# Map supported schemes to Pool Key index generator
_key_fn_by_scheme = {
"http": functools.partial(_key_normalizer, _PoolKey),
"http+ssh": functools.partial(_key_normalizer, _PoolKey),
}
def __init__(self, num_pools=10, headers=None, **kwargs):
"""Initialize UDSPoolManager.
Args:
num_pools: Number of UDS Connection pools to maintain.
headers: Additional headers to add to operations.
"""
super().__init__(num_pools, headers, **kwargs)
self.pool_classes_by_scheme = UDSPoolManager._pool_classes_by_scheme
self.key_fn_by_scheme = UDSPoolManager._key_fn_by_scheme
class UDSAdapter(HTTPAdapter):
"""Specialization of requests transport adapter for UNIX domain sockets."""
def __init__(
self,
uds: str,
pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE,
max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK,
**kwargs,
):
"""Initialize UDSAdapter.
Args:
uds: Full address of a Podman service UNIX domain socket.
Format, http+unix:///run/podman/podman.sock
max_retries: The maximum number of retries each connection should attempt.
pool_block: Whether the connection pool should block for connections.
pool_connections: The number of connection pools to cache.
pool_maxsize: The maximum number of connections to save in the pool.
Keyword Args:
timeout (float): Time in seconds to wait for response
Examples:
requests.Session.mount(
"http://", UDSAdapater("http+unix:///run/user/1000/podman/podman.sock"))
"""
self.poolmanager: Optional[UDSPoolManager] = None
self._pool_kwargs = {"uds": uds}
if "timeout" in kwargs:
self._pool_kwargs["timeout"] = kwargs.get("timeout")
super().__init__(pool_connections, pool_maxsize, max_retries, pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **kwargs):
"""Initialize UDS Pool Manager.
Args:
connections: The number of urllib3 connection pools to cache.
maxsize: The maximum number of connections to save in the pool.
block: Block when no free connections are available.
"""
pool_kwargs = kwargs.copy()
pool_kwargs.update(self._pool_kwargs)
self.poolmanager = UDSPoolManager(
num_pools=connections, maxsize=maxsize, block=block, **pool_kwargs
)