forked from cf-platform-eng/tile-generator
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutil.py
129 lines (124 loc) · 4.43 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
#!/usr/bin/env python
# tile-generator
#
# Copyright (c) 2015-Present Pivotal Software, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import errno
import os
import os.path
import requests
import shutil
import sys
import re
import zipfile
try:
# Python 3
from urllib.request import urlretrieve
except ImportError:
# Python 2
from urllib import urlretrieve
def mkdir_p(dir, clobber=False):
if clobber and os.path.isdir(dir):
shutil.rmtree(dir)
try:
os.makedirs(dir)
return dir
except os.error as e:
if e.errno != errno.EEXIST:
raise
def download(url, filename, cache=None):
if cache is not None:
basename = os.path.basename(filename)
cachename = os.path.join(cache, basename)
if os.path.isfile(cachename):
print('- using cached version of', basename)
shutil.copy(cachename, filename)
return
# Special url to find a file associated with a github release.
# github://cf-platform-eng/meta-buildpack/meta-buildpack.tgz
# will find the file named meta-buildpack-0.0.3.tgz in the latest
# release for https://github.com/cf-platform-eng/meta-buildpack
if url.startswith("github:"):
repo_name = url.replace('github:', '', 1).lstrip("/")
file_name = os.path.basename(repo_name)
repo_name = os.path.dirname(repo_name)
url = "https://api.github.com/repos/" + repo_name + "/releases/latest"
response = requests.get(url, stream=True)
response.raise_for_status()
release = response.json()
assets = release.get('assets', [])
url = None
pattern = re.compile('.*\\.'.join(file_name.rsplit('.', 1))+'\\Z')
for asset in assets:
if pattern.match(asset['name']) is not None:
url = asset['browser_download_url']
break
if url is None:
print('no matching asset found for repo', repo_name, 'file', file_name, file=sys.stderr)
sys.exit(1)
# Fallthrough intentional, we now proceed to download the URL we found
if url.startswith("http:") or url.startswith("https"):
# [mboldt:20160908] Using urllib.urlretrieve gave an "Access
# Denied" page when trying to download docker boshrelease.
# I don't know why. requests.get works. Do what works.
response = requests.get(url, stream=True)
response.raise_for_status()
with open(filename, 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
elif url.startswith("docker:"):
docker_image = url.replace('docker:', '', 1)
try:
from docker.client import Client
docker_cli = Client.from_env()
docker_cli.pull(docker_image)
image = docker_cli.get_image(docker_image)
image_tar = open(filename,'w')
image_tar.write(image.data)
image_tar.close()
except KeyError as e:
print('docker not configured on this machine (or environment variables are not properly set)', file=sys.stderr)
sys.exit(1)
except Exception as e:
print(e)
print(docker_image, 'not found on local machine', file=sys.stderr)
print('you must either pull the image, or download it and use the --cache option', file=sys.stderr)
sys.exit(1)
elif os.path.isdir(url):
shutil.copytree(url, filename)
else:
shutil.copy(url, filename)
if cache:
if os.path.isdir(filename):
basename = os.path.basename(filename)
cachedir = os.path.join(cache, basename)
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
shutil.copytree(filename, os.path.join(cache, basename))
elif os.path.isfile(filename):
shutil.copy(filename, cache)
else:
print(filename, 'is not a file or directory. Cannot cache.', file=sys.stderr)
def zip_dir(zipfilename, dirname):
with zipfile.ZipFile(zipfilename, 'w', allowZip64=True) as packagezip:
if os.path.isdir(dirname):
for root, dirs, files in os.walk(dirname):
for file in files:
abspath = os.path.join(root, file)
relpath = abspath[len(dirname)+1:] # +1 for trailing slash.
packagezip.write(abspath, relpath)
elif os.path.isfile(dirname):
packagezip.write(dirname, os.path.basename(dirname))