Skip to content

Commit 334346a

Browse files
author
Pravali Uppugunduri
committed
fix: Security fixes for Triton HMAC key exposure and missing integrity check (v2)
Backport of v3 security fixes for P400136088 and V2146375387. 1. check_integrity.py: Switch from HMAC-SHA256 to plain SHA-256. Remove generate_secret_key, remove env var dependency. 2. triton/model.py: Add integrity check in initialize() BEFORE cloudpickle deserialization. 3. triton/server.py: Remove SAGEMAKER_SERVE_SECRET_KEY from container environment variables. 4. triton/triton_builder.py: Remove hardcoded dummy secret key for ONNX path. Rename _hmac_signing to _compute_integrity_hash. Use plain SHA-256. 5. All prepare.py files (torchserve, mms, tf_serving, smd): Remove generate_secret_key usage, switch to plain SHA-256.
1 parent e5f349c commit 334346a

File tree

19 files changed

+22
-75
lines changed

19 files changed

+22
-75
lines changed

src/sagemaker/serve/model_server/multi_model_server/prepare.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from sagemaker.serve.spec.inference_spec import InferenceSpec
2727
from sagemaker.serve.detector.dependency_manager import capture_dependencies
2828
from sagemaker.serve.validations.check_integrity import (
29-
generate_secret_key,
3029
compute_hash,
3130
)
3231
from sagemaker.remote_function.core.serialization import _MetaData
@@ -120,11 +119,9 @@ def prepare_for_mms(
120119

121120
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
122121

123-
secret_key = generate_secret_key()
124122
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
125123
buffer = f.read()
126-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
124+
hash_value = compute_hash(buffer=buffer)
127125
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
128126
metadata.write(_MetaData(hash_value).to_json())
129127

130-
return secret_key

src/sagemaker/serve/model_server/multi_model_server/server.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ def _start_serving(
3636
env = {
3737
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
3838
"SAGEMAKER_PROGRAM": "inference.py",
39-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
4039
"LOCAL_PYTHON": platform.python_version(),
4140
}
4241
if env_vars:
@@ -145,7 +144,6 @@ def _upload_server_artifacts(
145144
env_vars = {
146145
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
147146
"SAGEMAKER_PROGRAM": "inference.py",
148-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
149147
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
150148
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
151149
"LOCAL_PYTHON": platform.python_version(),

src/sagemaker/serve/model_server/smd/prepare.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
from sagemaker.serve.spec.inference_spec import InferenceSpec
1313
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1414
from sagemaker.serve.validations.check_integrity import (
15-
generate_secret_key,
1615
compute_hash,
1716
)
1817
from sagemaker.remote_function.core.serialization import _MetaData
@@ -64,11 +63,9 @@ def prepare_for_smd(
6463

6564
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
6665

67-
secret_key = generate_secret_key()
6866
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6967
buffer = f.read()
70-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
68+
hash_value = compute_hash(buffer=buffer)
7169
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7270
metadata.write(_MetaData(hash_value).to_json())
7371

74-
return secret_key

src/sagemaker/serve/model_server/smd/server.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@ def _upload_smd_artifacts(
5353
"SAGEMAKER_INFERENCE_CODE_DIRECTORY": "/opt/ml/model/code",
5454
"SAGEMAKER_INFERENCE_CODE": "inference.handler",
5555
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
56-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5756
"LOCAL_PYTHON": platform.python_version(),
5857
}
5958
return s3_upload_path, env_vars

src/sagemaker/serve/model_server/tei/server.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,6 @@ def _start_tei_serving(
3939
secret_key: Secret key to use for authentication
4040
env_vars: Environment variables to set
4141
"""
42-
if env_vars and secret_key:
43-
env_vars["SAGEMAKER_SERVE_SECRET_KEY"] = secret_key
44-
4542
self.container = client.containers.run(
4643
image,
4744
shm_size=_SHM_SIZE,

src/sagemaker/serve/model_server/tensorflow_serving/prepare.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
)
1212
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1313
from sagemaker.serve.validations.check_integrity import (
14-
generate_secret_key,
1514
compute_hash,
1615
)
1716
from sagemaker.remote_function.core.serialization import _MetaData
@@ -57,11 +56,9 @@ def prepare_for_tf_serving(
5756
raise ValueError("SavedModel is not found for Tensorflow or Keras flavor.")
5857
_move_contents(src_dir=mlflow_saved_model_dir, dest_dir=saved_model_bundle_dir)
5958

60-
secret_key = generate_secret_key()
6159
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6260
buffer = f.read()
63-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
61+
hash_value = compute_hash(buffer=buffer)
6462
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
6563
metadata.write(_MetaData(hash_value).to_json())
6664

67-
return secret_key

src/sagemaker/serve/model_server/tensorflow_serving/server.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ def _start_tensorflow_serving(
4848
environment={
4949
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
5050
"SAGEMAKER_PROGRAM": "inference.py",
51-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5251
"LOCAL_PYTHON": platform.python_version(),
5352
**env_vars,
5453
},
@@ -142,7 +141,6 @@ def _upload_tensorflow_serving_artifacts(
142141
"SAGEMAKER_PROGRAM": "inference.py",
143142
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
144143
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
145-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
146144
"LOCAL_PYTHON": platform.python_version(),
147145
}
148146
return s3_upload_path, env_vars

src/sagemaker/serve/model_server/torchserve/prepare.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
from sagemaker.serve.spec.inference_spec import InferenceSpec
1414
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1515
from sagemaker.serve.validations.check_integrity import (
16-
generate_secret_key,
1716
compute_hash,
1817
)
1918
from sagemaker.serve.validations.check_image_uri import is_1p_image_uri
@@ -69,11 +68,9 @@ def prepare_for_torchserve(
6968

7069
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
7170

72-
secret_key = generate_secret_key()
7371
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
7472
buffer = f.read()
75-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
73+
hash_value = compute_hash(buffer=buffer)
7674
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7775
metadata.write(_MetaData(hash_value).to_json())
7876

79-
return secret_key

src/sagemaker/serve/model_server/torchserve/server.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ def _start_torch_serve(
4040
environment={
4141
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
4242
"SAGEMAKER_PROGRAM": "inference.py",
43-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
4443
"LOCAL_PYTHON": platform.python_version(),
4544
**env_vars,
4645
},
@@ -116,7 +115,6 @@ def _upload_torchserve_artifacts(
116115
"SAGEMAKER_PROGRAM": "inference.py",
117116
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
118117
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
119-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
120118
"LOCAL_PYTHON": platform.python_version(),
121119
}
122120
return s3_upload_path, env_vars

src/sagemaker/serve/model_server/triton/model.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,12 @@ def auto_complete_config(auto_complete_model_config):
2626
def initialize(self, args: dict) -> None:
2727
"""Placeholder docstring"""
2828
serve_path = Path(TRITON_MODEL_DIR).joinpath("serve.pkl")
29+
metadata_path = Path(TRITON_MODEL_DIR).joinpath("metadata.json")
2930
with open(str(serve_path), mode="rb") as f:
30-
inference_spec, schema_builder = cloudpickle.load(f)
31+
buffer = f.read()
3132

32-
# TODO: HMAC signing for integrity check
33+
perform_integrity_check(buffer=buffer, metadata_path=metadata_path)
34+
inference_spec, schema_builder = cloudpickle.loads(buffer)
3335

3436
self.inference_spec = inference_spec
3537
self.schema_builder = schema_builder

0 commit comments

Comments
 (0)