Skip to content

Commit

Permalink
Merge pull request #13 from dryan/build-4.1.2
Browse files Browse the repository at this point in the history
πŸ› fix path prefixing when there's a dot in the path; πŸ› fixing a bug w…
  • Loading branch information
dryan authored Sep 30, 2021
2 parents ddd7a7d + 146b2de commit b30602a
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 24 deletions.
16 changes: 9 additions & 7 deletions d3ploy/d3ploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import mimetypes
import os
import pathlib
import re
import signal
import sys
import threading
Expand All @@ -25,7 +24,7 @@
from colorama import init as colorama_init
from tqdm import tqdm

VERSION = "4.1.0"
VERSION = "4.1.2"

VALID_ACLS = [
"private",
Expand Down Expand Up @@ -192,11 +191,11 @@ def check_for_updates(

# this is where the actual upload happens, called by sync_files
def upload_file(
file_name: typing.Union[str, pathlib.Path],
file_name: pathlib.Path,
bucket_name: str,
s3: AWSServiceResource,
bucket_path: str,
prefix_regex: re.Pattern,
prefix: pathlib.Path,
acl: typing.Optional[str] = None,
force: bool = False,
dry_run: bool = False,
Expand All @@ -210,8 +209,11 @@ def upload_file(
caches = {}
updated = 0

if not isinstance(file_name, pathlib.Path):
file_name = pathlib.Path(file_name)

key_name = "/".join(
[bucket_path.rstrip("/"), prefix_regex.sub("", str(file_name)).lstrip("/")]
[bucket_path.rstrip("/"), str(file_name.relative_to(prefix)).lstrip("/")]
).lstrip("/")
if key_exists(s3, bucket_name, key_name):
s3_obj = s3.Object(bucket_name, key_name)
Expand Down Expand Up @@ -311,6 +313,7 @@ def determine_files_to_sync(
excludes = []
if isinstance(excludes, str):
excludes = [excludes]
excludes.append(".gitignore")
if not isinstance(local_path, pathlib.Path):
local_path = pathlib.Path(local_path)
gitignore_patterns = list(map(pathspec.patterns.GitWildMatchPattern, excludes))
Expand Down Expand Up @@ -444,7 +447,6 @@ def sync_files(
else:
raise e

prefix_regex = re.compile(r"^{}".format(local_path))
files = determine_files_to_sync(local_path, excludes, gitignore=gitignore)
deleted = 0
key_names = []
Expand All @@ -458,7 +460,7 @@ def sync_files(
for fn in files:
job = executor.submit(
upload_file,
*(fn, bucket_name, s3, bucket_path, prefix_regex),
*(fn, bucket_name, s3, bucket_path, local_path),
**{
"acl": acl,
"force": force,
Expand Down
35 changes: 18 additions & 17 deletions tests/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ def relative_path(p: str, convert=False) -> typing.Union[str, pathlib.Path]:


PREFIX_REGEX = re.compile(r"^{}".format(relative_path("./files")))
PREFIX_PATH = pathlib.Path(relative_path("./files"))


# we need to remove .DS_Store files before testing on macOS to keep tests consistent on
Expand Down Expand Up @@ -434,7 +435,7 @@ def test_bucket_path(self):
self.bucket.name,
self.s3,
prefix,
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertEqual(
result[0],
Expand All @@ -454,7 +455,7 @@ def test_acls(self):
self.bucket.name,
self.s3,
"test-acl-{}".format(acl),
PREFIX_REGEX,
PREFIX_PATH,
acl=acl,
)
object_acl = self.s3.ObjectAcl(self.bucket.name, result[0])
Expand All @@ -475,14 +476,14 @@ def test_force_update_file(self):
self.bucket.name,
self.s3,
"test-force-upload",
PREFIX_REGEX,
PREFIX_PATH,
)
result = d3ploy.upload_file(
relative_path("./files/css/sample.css"),
self.bucket.name,
self.s3,
"test-force-upload",
PREFIX_REGEX,
PREFIX_PATH,
force=True,
)
self.assertTrue(
Expand All @@ -500,7 +501,7 @@ def test_md5_hashing(self):
self.bucket.name,
self.s3,
"test-md5-hashing",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertTrue(s3_object_exists(self.bucket.name, result_1[0]))
s3_object_1_hash = self.s3.Object(self.bucket.name, result_1[0]).metadata.get(
Expand All @@ -516,7 +517,7 @@ def test_md5_hashing(self):
self.bucket.name,
self.s3,
"test-md5-hashing",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertTrue(s3_object_exists(self.bucket.name, result_2[0]))
s3_object_2_hash = self.s3.Object(self.bucket.name, result_2[0]).metadata.get(
Expand All @@ -541,7 +542,7 @@ def test_md5_hashing(self):
self.bucket.name,
self.s3,
"test-md5-hashing",
PREFIX_REGEX,
PREFIX_PATH,
)
s3_object_3_hash = self.s3.Object(self.bucket.name, result_3[0]).metadata.get(
"d3ploy-hash"
Expand All @@ -563,7 +564,7 @@ def test_dry_run(self):
self.bucket.name,
self.s3,
"test-dry-run",
PREFIX_REGEX,
PREFIX_PATH,
dry_run=True,
)
self.assertEqual(
Expand All @@ -582,7 +583,7 @@ def test_charset(self):
self.bucket.name,
self.s3,
"test-charset-{}".format(charset),
PREFIX_REGEX,
PREFIX_PATH,
charset=charset,
)
s3_obj = self.s3.Object(self.bucket.name, result[0])
Expand All @@ -604,7 +605,7 @@ def test_caches(self):
self.bucket.name,
self.s3,
"test-cache-{:d}".format(expiration),
PREFIX_REGEX,
PREFIX_PATH,
caches={"text/css": expiration},
)
s3_obj = self.s3.Object(self.bucket.name, response[0])
Expand Down Expand Up @@ -634,7 +635,7 @@ def test_mimetypes(self):
self.bucket.name,
self.s3,
"test-mimetypes",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertTrue(s3_object_exists(self.bucket.name, result[0]))
s3_object = self.s3.Object(self.bucket.name, result[0])
Expand All @@ -653,7 +654,7 @@ def test_upload_with_killswitch_flipped(self, *args):
self.bucket.name,
self.s3,
"test-upload-killswitch",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertTupleEqual(
result,
Expand All @@ -675,7 +676,7 @@ def setUp(self):
self.bucket.name,
self.s3,
"test-delete-dry-run",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertEqual(
upload_result[1],
Expand Down Expand Up @@ -987,7 +988,7 @@ def test_deleting_files(self):
self.bucket.name,
self.s3,
"sync_files/test-deleting",
PREFIX_REGEX,
PREFIX_PATH,
)
self.destroy_test_file()
self.assertFalse(os.path.exists(self.test_file_name))
Expand All @@ -1012,7 +1013,7 @@ def test_deleting_files_single_process(self):
self.bucket.name,
self.s3,
"sync_files/test-deleting-single-process",
PREFIX_REGEX,
PREFIX_PATH,
)
self.destroy_test_file()
self.assertFalse(os.path.exists(self.test_file_name))
Expand Down Expand Up @@ -1042,7 +1043,7 @@ def test_deleting_files_with_confirmation(self, *args):
self.bucket.name,
self.s3,
"sync_files/test-deleting",
PREFIX_REGEX,
PREFIX_PATH,
)
self.destroy_test_file()
self.assertTrue(s3_object_exists(self.bucket.name, uploaded_file[0]))
Expand All @@ -1068,7 +1069,7 @@ def test_deleting_files_with_confirmation_denied(self, *args):
self.bucket.name,
self.s3,
"sync_files/test-deleting",
PREFIX_REGEX,
PREFIX_PATH,
)
self.assertTrue(s3_object_exists(self.bucket.name, uploaded_file[0]))
self.destroy_test_file()
Expand Down

0 comments on commit b30602a

Please sign in to comment.