-
Notifications
You must be signed in to change notification settings - Fork 20
fix: preserve URL query parameters in storage flash for signed URLs #435
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
a63c8cc
95b57fc
28e2314
15d3321
8ff6181
9ce59ff
331787b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -10,15 +10,21 @@ | |
| from concurrent.futures import CancelledError | ||
| from contextlib import contextmanager | ||
| from dataclasses import dataclass | ||
| from pathlib import Path, PosixPath | ||
| from pathlib import Path | ||
| from queue import Queue | ||
| from urllib.parse import urlparse | ||
|
|
||
| import click | ||
| import pexpect | ||
| import requests | ||
| from jumpstarter_driver_composite.client import CompositeClient | ||
| from jumpstarter_driver_opendal.client import FlasherClient, OpendalClient, operator_for_path | ||
| from jumpstarter_driver_opendal.client import ( | ||
| FlasherClient, | ||
| OpendalClient, | ||
| clean_filename, | ||
| operator_for_path, | ||
| path_with_query, | ||
| ) | ||
| from jumpstarter_driver_opendal.common import PathBuf | ||
| from jumpstarter_driver_pyserial.client import Console | ||
| from opendal import Metadata, Operator | ||
|
|
@@ -167,10 +173,10 @@ def flash( # noqa: C901 | |
| "http", root="/", endpoint=f"{parsed.scheme}://{parsed.netloc}", token=bearer_token | ||
| ) | ||
| operator_scheme = "http" | ||
| path = Path(parsed.path) | ||
| path = path_with_query(parsed) | ||
| else: | ||
| path, operator, operator_scheme = operator_for_path(path) | ||
| image_url = self.http.get_url() + "/" + path.name | ||
| image_url = self.http.get_url() + "/" + self._filename(path) | ||
|
|
||
| # start counting time for the flash operation | ||
| start_time = time.time() | ||
|
|
@@ -966,9 +972,9 @@ def _transfer_bg_thread( | |
| original_url: Original URL for HTTP fallback | ||
| headers: HTTP headers for requests | ||
| """ | ||
| self.logger.info(f"Writing image to storage in the background: {src_path}") | ||
| filename = self._filename(src_path) | ||
| self.logger.info(f"Writing image to storage in the background: {filename}") | ||
| try: | ||
| filename = Path(src_path).name if isinstance(src_path, (str, os.PathLike)) else src_path.name | ||
|
|
||
| if src_operator_scheme == "fs": | ||
| file_hash = self._sha256_file(src_operator, src_path) | ||
|
|
@@ -1019,7 +1025,7 @@ def _create_metadata_and_json( | |
| ) -> tuple[Metadata | None, str]: | ||
| """Create a metadata json string from a metadata object""" | ||
| metadata = None | ||
| metadata_dict = {"path": str(src_path)} | ||
| metadata_dict = {"path": clean_filename(src_path)} | ||
|
|
||
| try: | ||
| metadata = src_operator.stat(src_path) | ||
|
|
@@ -1088,8 +1094,8 @@ def dump( | |
| raise NotImplementedError("Dump is not implemented for this driver yet") | ||
|
|
||
| def _filename(self, path: PathBuf) -> str: | ||
| """Extract filename from url or path""" | ||
| if path.startswith("oci://"): | ||
| """Extract filename from url or path, stripping any query parameters""" | ||
| if isinstance(path, str) and path.startswith("oci://"): | ||
| oci_path = path[6:] # Remove "oci://" prefix | ||
| if ":" in oci_path: | ||
| repository, tag = oci_path.rsplit(":", 1) | ||
|
|
@@ -1098,10 +1104,8 @@ def _filename(self, path: PathBuf) -> str: | |
| else: | ||
| repo_name = oci_path.split("/")[-1] if "/" in oci_path else oci_path | ||
| return repo_name | ||
| elif path.startswith(("http://", "https://")): | ||
| return urlparse(path).path.split("/")[-1] | ||
| else: | ||
| return Path(path).name | ||
| return clean_filename(path) | ||
|
|
||
| def _upload_artifact(self, storage, path: PathBuf, operator: Operator): | ||
| """Upload artifact to storage""" | ||
|
|
@@ -1636,17 +1640,12 @@ def _get_decompression_command(filename_or_url) -> str: | |
| Determine the appropriate decompression command based on file extension | ||
|
|
||
| Args: | ||
| filename (str): Name of the file to check | ||
| filename_or_url (str): Name of the file or URL to check | ||
|
|
||
| Returns: | ||
| str: Decompression command ('zcat', 'xzcat', or 'cat' for uncompressed) | ||
| str: Decompression command ('zcat |', 'xzcat |', or '' for uncompressed) | ||
| """ | ||
| if type(filename_or_url) is PosixPath: | ||
| filename = filename_or_url.name | ||
| elif filename_or_url.startswith(("http://", "https://")): | ||
| filename = urlparse(filename_or_url).path.split("/")[-1] | ||
|
|
||
| filename = filename.lower() | ||
| filename = clean_filename(filename_or_url).lower() | ||
| if filename.endswith((".gz", ".gzip")): | ||
| return "zcat |" | ||
| elif filename.endswith(".xz"): | ||
|
Comment on lines
+1648
to
1651
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [MEDIUM] The implementation handles Suggested fix: add a AI-generated, human reviewed
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Acknowledged. Adding
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Good catch on the missing .zst extension, but this is a pre-existing gap that was present before this PR. Adding .zst support would change the scope of this fix beyond the original issue (preserving URL query parameters for signed URLs). I would suggest tracking this as a separate issue/PR to keep changes focused. |
||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.