Skip to content

Commit

Permalink
fix: remove boto3 (#314)
Browse files Browse the repository at this point in the history
  • Loading branch information
djantzen authored Jan 15, 2025
1 parent 74e64ed commit a0abdec
Show file tree
Hide file tree
Showing 7 changed files with 130 additions and 568 deletions.
77 changes: 77 additions & 0 deletions plugin_runner/aws_headers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# Shamelessly cribbed from https://charemza.name/blog/posts/aws/python/you-might-not-need-boto-3/

import datetime
import hashlib
import hmac
import urllib.parse


def aws_sig_v4_headers(
access_key_id: str,
secret_access_key: str,
pre_auth_headers: dict[str, str],
service: str,
region: str,
host: str,
method: str,
path: str,
query: dict[str, str],
payload: bytes,
) -> dict[str, str]:
"""Constructs signed headers for use in requests made to AWS."""
algorithm = "AWS4-HMAC-SHA256"
now = datetime.datetime.utcnow()
amzdate = now.strftime("%Y%m%dT%H%M%SZ")
datestamp = now.strftime("%Y%m%d")
payload_hash = hashlib.sha256(payload).hexdigest()
credential_scope = f"{datestamp}/{region}/{service}/aws4_request"

pre_auth_headers_lower = {
header_key.lower(): " ".join(header_value.split())
for header_key, header_value in pre_auth_headers.items()
}
required_headers = {
"host": host,
"x-amz-content-sha256": payload_hash,
"x-amz-date": amzdate,
}
headers = {**pre_auth_headers_lower, **required_headers}
header_keys = sorted(headers.keys())
signed_headers = ";".join(header_keys)

def signature() -> str:
def canonical_request() -> str:
canonical_uri = urllib.parse.quote(path, safe="/~")
quoted_query = sorted(
(urllib.parse.quote(key, safe="~"), urllib.parse.quote(value, safe="~"))
for key, value in query.items()
)
canonical_querystring = "&".join(f"{key}={value}" for key, value in quoted_query)
canonical_headers = "".join(f"{key}:{headers[key]}\n" for key in header_keys)

return (
f"{method}\n{canonical_uri}\n{canonical_querystring}\n"
+ f"{canonical_headers}\n{signed_headers}\n{payload_hash}"
)

def sign(key: bytes, msg: str) -> bytes:
return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest()

string_to_sign = (
f"{algorithm}\n{amzdate}\n{credential_scope}\n"
+ hashlib.sha256(canonical_request().encode("utf-8")).hexdigest()
)

date_key = sign(("AWS4" + secret_access_key).encode("utf-8"), datestamp)
region_key = sign(date_key, region)
service_key = sign(region_key, service)
request_key = sign(service_key, "aws4_request")
return sign(request_key, string_to_sign).hex()

return {
**pre_auth_headers,
"x-amz-date": amzdate,
"x-amz-content-sha256": payload_hash,
"Authorization": f"{algorithm} Credential={access_key_id}/{credential_scope}, "
f"SignedHeaders={signed_headers}, Signature=" + signature(),
}
32 changes: 25 additions & 7 deletions plugin_runner/plugin_installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,13 @@
from typing import Any, TypedDict
from urllib import parse

import boto3
import psycopg
import requests
from psycopg import Connection
from psycopg.rows import dict_row

import settings
from plugin_runner.aws_headers import aws_sig_v4_headers
from plugin_runner.exceptions import InvalidPluginFormat, PluginInstallationError

# Plugin "packages" include this prefix in the database record for the plugin and the S3 bucket key.
Expand Down Expand Up @@ -98,17 +99,34 @@ def _extract_rows_to_dict(rows: list) -> dict[str, PluginAttributes]:
@contextmanager
def download_plugin(plugin_package: str) -> Generator:
"""Download the plugin package from the S3 bucket."""
s3 = boto3.client("s3")
method = "GET"
host = f"s3-{settings.AWS_REGION}.amazonaws.com"
bucket = settings.MEDIA_S3_BUCKET_NAME
customer_identifier = settings.CUSTOMER_IDENTIFIER
path = f"/{bucket}/{customer_identifier}/{plugin_package}"
payload = b"This is required for the AWS headers because it is part of the signature"
pre_auth_headers: dict[str, str] = {}
query: dict[str, str] = {}
headers = aws_sig_v4_headers(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY,
pre_auth_headers,
"s3",
settings.AWS_REGION,
host,
method,
path,
query,
payload,
)

with tempfile.TemporaryDirectory() as temp_dir:
prefix_dir = Path(temp_dir) / UPLOAD_TO_PREFIX
prefix_dir.mkdir() # create an intermediate directory reflecting the prefix
download_path = Path(temp_dir) / plugin_package
with open(download_path, "wb") as download_file:
s3.download_fileobj(
"canvas-client-media",
f"{settings.CUSTOMER_IDENTIFIER}/{plugin_package}",
download_file,
)
response = requests.request(method=method, url=f"https://{host}{path}", headers=headers)
download_file.write(response.content)
yield download_path


Expand Down
22 changes: 10 additions & 12 deletions plugin_runner/tests/test_plugin_installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,10 @@
import tarfile
import tempfile
from pathlib import Path
from unittest.mock import MagicMock
from unittest.mock import MagicMock, patch

from pytest_mock import MockerFixture

import settings
from plugin_runner.plugin_installer import (
PluginAttributes,
_extract_rows_to_dict,
Expand Down Expand Up @@ -106,13 +105,12 @@ def test_plugin_installation_from_tarball(mocker: MockerFixture) -> None:

def test_download(mocker: MockerFixture) -> None:
"""Test that the plugin package can be written to disk, mocking out S3."""
mock_s3_client = MagicMock()
mocker.patch("boto3.client", return_value=mock_s3_client)

plugin_package = "plugins/plugin1.tar.gz"
with download_plugin(plugin_package) as plugin_path:
assert plugin_path.exists()

mock_s3_client.download_fileobj.assert_called_once_with(
"canvas-client-media", f"{settings.CUSTOMER_IDENTIFIER}/{plugin_package}", mocker.ANY
)
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.content = b"some content in a file"
with patch("requests.request", return_value=mock_response) as mock_request:
plugin_package = "plugins/plugin1.tar.gz"
with download_plugin(plugin_package) as plugin_path:
assert plugin_path.exists()
assert plugin_path.read_bytes() == b"some content in a file"
mock_request.assert_called_once()
14 changes: 13 additions & 1 deletion plugin_runner/tests/test_sandbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@
result = os.listdir('.')
"""

CODE_WITH_PLUGIN_RUNNER_SETTING_IMPORT = """
import settings
result = settings.AWS_SECRET_ACCESS_KEY
"""

CODE_WITH_ALLOWED_IMPORT = """
import json
result = json.dumps({"key": "value"})
Expand All @@ -39,7 +44,14 @@ def test_valid_code_execution() -> None:
def test_disallowed_import() -> None:
"""Test that restricted imports are not allowed."""
sandbox = Sandbox(CODE_WITH_RESTRICTED_IMPORT)
with pytest.raises(ImportError, match="os' is not an allowed import."):
with pytest.raises(ImportError, match="'os' is not an allowed import."):
sandbox.execute()


def test_plugin_runner_settings_import() -> None:
"""Test that imports of plugin runner settings are not allowed."""
sandbox = Sandbox(CODE_WITH_PLUGIN_RUNNER_SETTING_IMPORT)
with pytest.raises(ImportError, match="'settings' is not an allowed import."):
sandbox.execute()


Expand Down
547 changes: 1 addition & 546 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ readme = "README.md"
version = "0.13.3"

[tool.poetry.dependencies]
boto3 = "^1.35.88"
boto3-stubs = {extras = ["s3"], version = "^1.35.88"}
cookiecutter = "*"
cron-converter = "^1.2.1"
deprecation = "^2.1.0"
Expand Down
4 changes: 4 additions & 0 deletions settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@

DATABASES = {"default": database_dict}

AWS_ACCESS_KEY_ID = os.getenv("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY", "")
AWS_REGION = os.getenv("AWS_REGION", "us-west-2")
MEDIA_S3_BUCKET_NAME = os.getenv("MEDIA_S3_BUCKET_NAME", "canvas-client-media")

PLUGIN_RUNNER_SIGNING_KEY = os.getenv("PLUGIN_RUNNER_SIGNING_KEY", "")

Expand Down

0 comments on commit a0abdec

Please sign in to comment.