0
0
mirror of https://github.com/mongodb/mongo.git synced 2024-12-01 01:21:03 +01:00

BUILDBOT-58: use s3tool to do s3-related work (and remove old, unused s3 scripts)

This commit is contained in:
Dan Crosta 2012-02-16 21:51:26 -05:00
parent 31991afed8
commit fc7cac2239
6 changed files with 173 additions and 165 deletions

View File

@ -1145,35 +1145,47 @@ def s3push( localName , remoteName=None , remotePrefix=None , fixName=True , pla
findSettingsSetup()
import simples3
import settings
try:
import settings
settings_file = os.path.abspath(settings.__file__)
except ImportError:
print "could not find settings.py, not attempting push"
Exit(2)
s = simples3.S3Bucket( settings.bucket , settings.id , settings.key )
try:
bucket_name = settings.bucket
except:
print "no bucket defined in settings.py, not attempting push"
Exit(2)
if remoteName is None:
if not remoteName:
remoteName = localName
if fixName:
(root,dot,suffix) = _rpartition( localName, "." )
root, suffix = os.path.splitext(localName)
name = remoteName + "-" + getSystemInstallName()
name += remotePrefix
if dot == "." :
name += "." + suffix
# "suffix" contains the dot, if any, or is an empty string
name += suffix
name = name.lower()
else:
name = remoteName
if isDriverBuild():
name = "cxx-driver/" + name
elif platformDir:
name = platform + "/" + name
print( "uploading " + localName + " to http://s3.amazonaws.com/" + s.name + "/" + name )
print( "uploading " + localName + " to http://s3.amazonaws.com/" + bucket_name + "/" + name )
if dontReplacePackage:
for ( key , modify , etag , size ) in s.listdir( prefix=name ):
if utils.run_s3tool(settings_file, bucket_name, 'exists', name):
print( "error: already a file with that name, not uploading" )
Exit(2)
s.put( name , open( localName , "rb" ).read() , acl="public-read" );
if not utils.run_s3tool(settings_file, bucket_name, 'put', localName, name):
print( "error: could not put '%s' to s3 as '%s'" % (localName, name) )
Exit(2)
print( " done uploading!" )
def s3shellpush( env , target , source ):

View File

@ -1,36 +0,0 @@
import os
import sys
import time
sys.path.append( "." )
sys.path.append( ".." )
sys.path.append( "../../" )
sys.path.append( "../../../" )
import simples3
import settings
import subprocess
# check s3 for md5 hashes
def check_dir( bucket , prefix , todel ):
for ( key , modify , etag , size ) in bucket.listdir( prefix=prefix ):
if key.find( todel ) < 0:
continue
print( key )
time.sleep( 2 )
bucket.delete( key )
def clean( todel ):
bucket = simples3.S3Bucket( settings.bucket , settings.id , settings.key )
for x in [ "osx" , "linux" , "win32" , "sunos5" , "src" ]:
check_dir( bucket , x , todel )
if __name__ == "__main__":
clean( sys.argv[1] )

View File

@ -1,48 +0,0 @@
import os
import sys
sys.path.append( "." )
sys.path.append( ".." )
sys.path.append( "../../" )
sys.path.append( "../../../" )
import simples3
import settings
import subprocess
# check s3 for md5 hashes
def check_dir( bucket , prefix ):
zips = {}
md5s = {}
for ( key , modify , etag , size ) in bucket.listdir( prefix=prefix ):
if key.endswith( ".tgz" ) or key.endswith( ".zip" ):
zips[key] = etag.replace( '"' , '' )
elif key.endswith( ".md5" ):
md5s[key] = True
elif key.find( "$folder$" ) > 0:
pass
else:
print( "unknown file type: " + key )
for x in zips:
m = x + ".md5"
if m in md5s:
continue
print( "need to do: " + x + " " + zips[x] + " to " + m )
bucket.put( m , zips[x] , acl="public-read" )
def run():
bucket = simples3.S3Bucket( settings.bucket , settings.id , settings.key )
for x in [ "osx" , "linux" , "win32" , "sunos5" ]:
check_dir( bucket , x )
if __name__ == "__main__":
run()

138
buildscripts/s3tool Executable file
View File

@ -0,0 +1,138 @@
#!/usr/bin/env python
# vim:filetype=python
import sys
import os
import os.path
import simples3
def put(bucket, local_path, remote_path):
remote_path = remote_path.lstrip('/')
sys.stdout.write('put %s => %s:/%s ...' % (local_path, bucket.name, remote_path))
sys.stdout.flush()
bucket.put(remote_path, open(local_path, 'rb').read(), acl='public-read')
sys.stdout.write(' done.\n')
sys.stdout.flush()
def ls(bucket, remote_path=''):
remote_path = remote_path.lstrip('/')
if remote_path and not remote_path.endswith('/'):
remote_path = '%s/' % remote_path
printed_dirs = set()
empty = True
print '%-23s %-9s %s' % ('Modified', 'Size', 'Name')
for filename, mtime, etag, size in bucket.listdir(prefix=remote_path):
filename = filename[len(remote_path):]
dirname, filename = os.path.split(filename)
if dirname and dirname not in printed_dirs:
print '%-23s %-9s %s/' % ('', '(dir)', dirname)
printed_dirs.add(dirname)
else:
print '%-23s %-9d %s' % (mtime.strftime('%Y-%m-%d %H:%M:%S UTC'), size, filename)
empty = False
if empty:
print '(directory has no files)'
def exists(bucket, remote_path):
if remote_path in bucket:
return 0
return 1
def rm(bucket, remote_path):
del bucket[remote_path]
def help():
print """
Command Help:
put local_path remote_path
Uploads the file at local_path to the remote_path in the
given bucket. Overwrites existing file, if any.
rm remote_path
Removes the file at remote_path if it exists.
ls remote_path
List the contents of the remote_path (assumed to be a
directory)
exists remote_path
Check if a remote path exists (return code 0) or not
(return code 1)
""".strip()
commands = {
'put': put,
'rm': rm,
'ls': ls,
'exists': exists,
}
def _read_config(options):
if options.config.endswith('.py'):
path, filename = os.path.split(options.config)
path = os.path.abspath(os.path.join(os.getcwd(), path))
modname = filename[:-3]
orig_sys_path = list(sys.path)
sys.path.append(path)
try:
module = __import__(modname)
options.key_id = getattr(module, 'key_id', getattr(module, 'id', None))
options.secret = getattr(module, 'secret', getattr(module, 'key', None))
finally:
sys.path = orig_sys_path
else:
sys.exit('cannot understand config file "%s"' % options.config)
def main():
from optparse import OptionParser
epilog = """Type %prog help for command help."""
parser = OptionParser('%prog bucket command [argument ...]', epilog=epilog)
parser.add_option('-k', '--key', help='Authenticate with the given Access Key ID',
action='store', dest='key_id', default=None, metavar='ACCESS_KEY_ID')
parser.add_option('-s', '--secret', help='Authenticate with the given Secret',
action='store', dest='secret', default=None, metavar='SECRET')
parser.add_option('-c', '--config', help='Load authentication credentials from CONFIG_FILE',
action='store', dest='config', metavar='CONFIG_FILE')
options, args = parser.parse_args()
if len(args) == 1 and args[0] == 'help':
return help()
elif len(args) < 2:
parser.error('must specify a bucket and command')
if not any((options.key_id, options.secret, options.config)):
parser.error('must specify --key and --secret or --config')
if options.config and any((options.key_id, options.secret)):
parser.error('cannot specify --config and --key or --secret')
if any((options.key_id, options.secret)) and not all((options.key_id, options.secret)):
parser.error('--config and --key must be specified together')
bucket_name, command, args = args[0], args[1], args[2:]
func = commands.get(command)
if not func:
parser.error('unknown command "%s"' % command)
if options.config:
_read_config(options)
bucket = simples3.S3Bucket(bucket_name, options.key_id, options.secret)
return func(bucket, *args)
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,70 +0,0 @@
import os
import sys
sys.path.append( "." )
sys.path.append( ".." )
sys.path.append( "../../" )
sys.path.append( "../../../" )
import simples3
import settings
import subprocess
# this pushes all source balls as tgz and zip
def run_git( args ):
cmd = "git " + args
cmd = cmd.split( " " )
x = subprocess.Popen( ( "git " + args ).split( " " ) , stdout=subprocess.PIPE).communicate()
return x[0]
def push_tag( bucket , tag , extension , gzip=False ):
localName = "mongodb-src-" + tag + "." + extension
remoteName = "src/" + localName
if gzip:
remoteName += ".gz"
for ( key , modify , etag , size ) in bucket.listdir( prefix=remoteName ):
print( "found old: " + key + " uploaded on: " + str( modify ) )
return
if os.path.exists( localName ):
os.remove( localName )
print( "need to do: " + remoteName )
cmd = "archive --format %s --output %s --prefix mongodb-src-%s/ %s" % ( extension , localName , tag , tag )
run_git( cmd )
print( "\t" + cmd )
if not os.path.exists( localName ) or os.path.getsize(localName) == 0 :
raise( Exception( "creating archive failed: " + cmd ) )
if gzip:
newLocalName = localName + ".gz"
if ( os.path.exists( newLocalName ) ):
os.remove( newLocalName )
subprocess.call( [ "gzip" , localName ] )
localName = newLocalName
if not os.path.exists( localName ) or os.path.getsize(localName) == 0 :
raise( Exception( "gzipping failed" ) )
bucket.put( remoteName , open( localName , "rb" ).read() , acl="public-read" )
print( "\t uploaded to: http://s3.amazonaws.com/%s/%s" % ( bucket.name , remoteName ) )
os.remove( localName )
def push_all():
tags = run_git("tag -l").strip().split( "\n" )
bucket = simples3.S3Bucket( settings.bucket , settings.id , settings.key )
for tag in tags:
push_tag( bucket , tag , "tar" , True )
push_tag( bucket , tag , "zip" )
if __name__ == "__main__":
push_all()

View File

@ -182,3 +182,15 @@ def smoke_command(*args):
(find_python(), smoke_py),
args))
def run_s3tool(settings_file, bucket_name, command, *args):
if settings_file.endswith('.pyc'):
settings_file = settings_file[:-1]
here = os.path.dirname(__file__)
s3tool = os.path.join(here, 's3tool')
cmd = [find_python(), s3tool, '--config=%s' % settings_file, bucket_name, command]
cmd.extend(args)
print ' '.join(cmd)
exitcode = subprocess.call(cmd)
return exitcode == 0