-
Notifications
You must be signed in to change notification settings - Fork 2
/
manage-packages
executable file
·159 lines (129 loc) · 5.5 KB
/
manage-packages
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
#!/usr/bin/python
"""
Version 0.5
Maintains simple cheeseshop on S3.
Authored by David Wolfe [email protected] based on
http://honza.ca/2012/02/how-not-to-depend-on-pypi (Honza Pokorny)
"""
BOTO_MESSAGE = """
Cannot import boto. This could be for one of two reasons:
(1) Boto is not installed globally.
(2) The wrong global python is listed in the top line of this script.
Try changing /usr/bin/python to /usr/local/bin/python
or whatever is returned by 'which python'
"""
# Requires installation of boto (works with version 2.5.2)
import argparse, tempfile, subprocess, shutil, os
try:
import boto
except ImportError:
print (BOTO_MESSAGE)
exit(1)
from boto.s3.key import Key
# Make sure you have these set up in your environment
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
# Non-sheepdoggers: change to your bucket
BUCKET_NAME = 'sheepdog-assets'
CHEESESHOP_DIR = "feta" # Sheep milk cheese...
def is_compressed(filename):
# Just a quick sanity check by filename. Good enough.
return (filename[-3:] == '.gz'
or filename[-4:] in ('.tgz', '.zip'))
def dispatch():
"""
Parse command line arguments and dispatch to appropriate function
"""
parser = argparse.ArgumentParser(
description="Used for managing s3 bucket of assets,"
" primarily Python Cheeseshop management.")
subparsers = parser.add_subparsers()
# ... push [-h] key file
sub = subparsers.add_parser("push",
help="Deploy file to bucket, assigning it the"
" specified key. Overwrites any existing item"
" with the same key.")
sub.set_defaults(func=push_to_s3)
sub.add_argument('key')
sub.add_argument('file', type=file)
# ... install [-h] [-r FILENAME] [-f]
sub = subparsers.add_parser("install",
help="Update cheeseshop with packages,"
" and update index.")
sub.set_defaults(func=upload_to_cheeseshop)
sub.add_argument('packages', metavar="PACKAGE_NAMES", nargs="*",
help=('each package is a package name'
' (Django or Django==1.3.1) or a'
' path to a file (dir/Django-1.3.1.tgz)'))
sub.add_argument('-r', '--requirement', type=file, metavar="FILENAME",
help=('assure packages in requirements file are present'
', uploading from pypi as needed'))
sub.add_argument('-U', '--upgrade', action='store_true',
help='force upload, overwriting existing packages')
# ... index
sub = subparsers.add_parser("index",
help="Re-construct cheeseshop's index.")
sub.set_defaults(func=build_cheeseshop_index)
kwargs = vars(parser.parse_args())
f = kwargs.pop('func')
f(**kwargs)
_cached_bucket = None
def aws_bucket():
global _cached_bucket
if _cached_bucket is None:
conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
_cached_bucket = conn.get_bucket(BUCKET_NAME)
return _cached_bucket
def push_to_s3(key, file):
k = Key(aws_bucket())
k.key = key
k.set_contents_from_filename(getattr(file, 'name', file))
k.make_public()
def upload_to_cheeseshop(packages=[], requirement=None, upgrade=False):
if requirement is None: # Just an alternate way to update the index...
build_cheeseshop_index()
bucket = aws_bucket()
freeze = tempfile.NamedTemporaryFile().name
pypi = tempfile.mkdtemp()
# Download all required packages into a temporary directory
sreq = "-r %s" % requirement.name if requirement else ""
compiled_packages = [p for p in packages if is_compressed(p)]
packages = [p for p in packages if p not in compiled_packages]
pkgs = " ".join(packages)
subprocess.call("pip install -d %s %s %s" % (pypi, sreq, pkgs), shell=True)
for p in compiled_packages:
shutil.copy(p, pypi)
result = subprocess.check_output('cd %s ; ls' % pypi, shell=True).strip()
# Find out current packages
prefix = "%s/packages/" % CHEESESHOP_DIR
packages = [key.name[len(prefix):] for key in bucket.list()
if key.name.startswith(prefix)]
for package in result.split("\n"):
if "/" in package: continue # cd had output
if not is_compressed(package): continue # Junk file
if not upgrade and package in packages:
print "Package %s already in %s" % (package, CHEESESHOP_DIR)
continue # Already uploaded
print "Uploading %s to %s" % (package, CHEESESHOP_DIR)
push_to_s3("%s/packages/%s" % (CHEESESHOP_DIR, package),
"%s/%s" % (pypi, package))
build_cheeseshop_index()
def build_cheeseshop_index():
bucket = aws_bucket()
prefix = "%s/packages/" % CHEESESHOP_DIR
# Build index.html
packages = [key.name[len(prefix):] for key in bucket.list()
if key.name.startswith(prefix)]
html = "<html><head></head><body>\n%s\n</body></html>\n"
links = []
for package in packages:
link = '<a href="packages/%s">%s</a>' % (package, package)
links.append(link)
links = '\n'.join(links)
k = Key(bucket)
k.key = "%s/index.html" % CHEESESHOP_DIR
k.set_metadata('Content-Type', 'text/html')
k.set_contents_from_string(html % links)
k.make_public()
print "Index for %s has been updated." % CHEESESHOP_DIR
dispatch()