1"""Tag the sandbox for release, make source and doc tarballs. 2 3Requires Python 2.6 4 5Example of invocation (use to test the script): 6python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev 7 8When testing this script: 9python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev 10 11Example of invocation when doing a release: 12python makerelease.py 0.5.0 0.6.0-dev 13 14Note: This was for Subversion. Now that we are in GitHub, we do not 15need to build versioned tarballs anymore, so makerelease.py is defunct. 16""" 17from __future__ import print_function 18import os.path 19import subprocess 20import sys 21import doxybuild 22import subprocess 23import xml.etree.ElementTree as ElementTree 24import shutil 25import urllib2 26import tempfile 27import os 28import time 29from devtools import antglob, fixeol, tarball 30import amalgamate 31 32SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' 33SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' 34SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' 35SOURCEFORGE_PROJECT = 'jsoncpp' 36 37def set_version( version ): 38 with open('version','wb') as f: 39 f.write( version.strip() ) 40 41def rmdir_if_exist( dir_path ): 42 if os.path.isdir( dir_path ): 43 shutil.rmtree( dir_path ) 44 45class SVNError(Exception): 46 pass 47 48def svn_command( command, *args ): 49 cmd = ['svn', '--non-interactive', command] + list(args) 50 print('Running:', ' '.join( cmd )) 51 process = subprocess.Popen( cmd, 52 stdout=subprocess.PIPE, 53 stderr=subprocess.STDOUT ) 54 stdout = process.communicate()[0] 55 if process.returncode: 56 error = SVNError( 'SVN command failed:\n' + stdout ) 57 error.returncode = process.returncode 58 raise error 59 return stdout 60 61def check_no_pending_commit(): 62 """Checks that there is no pending commit in the sandbox.""" 63 stdout = svn_command( 'status', '--xml' ) 64 etree = ElementTree.fromstring( stdout ) 65 msg = [] 66 for entry in etree.getiterator( 'entry' ): 67 path = entry.get('path') 68 status = entry.find('wc-status').get('item') 69 if status != 'unversioned' and path != 'version': 70 msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) 71 if msg: 72 msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) 73 return '\n'.join( msg ) 74 75def svn_join_url( base_url, suffix ): 76 if not base_url.endswith('/'): 77 base_url += '/' 78 if suffix.startswith('/'): 79 suffix = suffix[1:] 80 return base_url + suffix 81 82def svn_check_if_tag_exist( tag_url ): 83 """Checks if a tag exist. 84 Returns: True if the tag exist, False otherwise. 85 """ 86 try: 87 list_stdout = svn_command( 'list', tag_url ) 88 except SVNError as e: 89 if e.returncode != 1 or not str(e).find('tag_url'): 90 raise e 91 # otherwise ignore error, meaning tag does not exist 92 return False 93 return True 94 95def svn_commit( message ): 96 """Commit the sandbox, providing the specified comment. 97 """ 98 svn_command( 'ci', '-m', message ) 99 100def svn_tag_sandbox( tag_url, message ): 101 """Makes a tag based on the sandbox revisions. 102 """ 103 svn_command( 'copy', '-m', message, '.', tag_url ) 104 105def svn_remove_tag( tag_url, message ): 106 """Removes an existing tag. 107 """ 108 svn_command( 'delete', '-m', message, tag_url ) 109 110def svn_export( tag_url, export_dir ): 111 """Exports the tag_url revision to export_dir. 112 Target directory, including its parent is created if it does not exist. 113 If the directory export_dir exist, it is deleted before export proceed. 114 """ 115 rmdir_if_exist( export_dir ) 116 svn_command( 'export', tag_url, export_dir ) 117 118def fix_sources_eol( dist_dir ): 119 """Set file EOL for tarball distribution. 120 """ 121 print('Preparing exported source file EOL for distribution...') 122 prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' 123 win_sources = antglob.glob( dist_dir, 124 includes = '**/*.sln **/*.vcproj', 125 prune_dirs = prune_dirs ) 126 unix_sources = antglob.glob( dist_dir, 127 includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in 128 sconscript *.json *.expected AUTHORS LICENSE''', 129 excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', 130 prune_dirs = prune_dirs ) 131 for path in win_sources: 132 fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) 133 for path in unix_sources: 134 fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) 135 136def download( url, target_path ): 137 """Download file represented by url to target_path. 138 """ 139 f = urllib2.urlopen( url ) 140 try: 141 data = f.read() 142 finally: 143 f.close() 144 fout = open( target_path, 'wb' ) 145 try: 146 fout.write( data ) 147 finally: 148 fout.close() 149 150def check_compile( distcheck_top_dir, platform ): 151 cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] 152 print('Running:', ' '.join( cmd )) 153 log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) 154 flog = open( log_path, 'wb' ) 155 try: 156 process = subprocess.Popen( cmd, 157 stdout=flog, 158 stderr=subprocess.STDOUT, 159 cwd=distcheck_top_dir ) 160 stdout = process.communicate()[0] 161 status = (process.returncode == 0) 162 finally: 163 flog.close() 164 return (status, log_path) 165 166def write_tempfile( content, **kwargs ): 167 fd, path = tempfile.mkstemp( **kwargs ) 168 f = os.fdopen( fd, 'wt' ) 169 try: 170 f.write( content ) 171 finally: 172 f.close() 173 return path 174 175class SFTPError(Exception): 176 pass 177 178def run_sftp_batch( userhost, sftp, batch, retry=0 ): 179 path = write_tempfile( batch, suffix='.sftp', text=True ) 180 # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc 181 cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] 182 error = None 183 for retry_index in range(0, max(1,retry)): 184 heading = retry_index == 0 and 'Running:' or 'Retrying:' 185 print(heading, ' '.join( cmd )) 186 process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) 187 stdout = process.communicate()[0] 188 if process.returncode != 0: 189 error = SFTPError( 'SFTP batch failed:\n' + stdout ) 190 else: 191 break 192 if error: 193 raise error 194 return stdout 195 196def sourceforge_web_synchro( sourceforge_project, doc_dir, 197 user=None, sftp='sftp' ): 198 """Notes: does not synchronize sub-directory of doc-dir. 199 """ 200 userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) 201 stdout = run_sftp_batch( userhost, sftp, """ 202cd htdocs 203dir 204exit 205""" ) 206 existing_paths = set() 207 collect = 0 208 for line in stdout.split('\n'): 209 line = line.strip() 210 if not collect and line.endswith('> dir'): 211 collect = True 212 elif collect and line.endswith('> exit'): 213 break 214 elif collect == 1: 215 collect = 2 216 elif collect == 2: 217 path = line.strip().split()[-1:] 218 if path and path[0] not in ('.', '..'): 219 existing_paths.add( path[0] ) 220 upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) 221 paths_to_remove = existing_paths - upload_paths 222 if paths_to_remove: 223 print('Removing the following file from web:') 224 print('\n'.join( paths_to_remove )) 225 stdout = run_sftp_batch( userhost, sftp, """cd htdocs 226rm %s 227exit""" % ' '.join(paths_to_remove) ) 228 print('Uploading %d files:' % len(upload_paths)) 229 batch_size = 10 230 upload_paths = list(upload_paths) 231 start_time = time.time() 232 for index in range(0,len(upload_paths),batch_size): 233 paths = upload_paths[index:index+batch_size] 234 file_per_sec = (time.time() - start_time) / (index+1) 235 remaining_files = len(upload_paths) - index 236 remaining_sec = file_per_sec * remaining_files 237 print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)) 238 run_sftp_batch( userhost, sftp, """cd htdocs 239lcd %s 240mput %s 241exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) 242 243def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): 244 userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) 245 run_sftp_batch( userhost, sftp, """ 246mput %s 247exit 248""" % (' '.join(paths),) ) 249 250 251def main(): 252 usage = """%prog release_version next_dev_version 253Update 'version' file to release_version and commit. 254Generates the document tarball. 255Tags the sandbox revision with release_version. 256Update 'version' file to next_dev_version and commit. 257 258Performs an svn export of tag release version, and build a source tarball. 259 260Must be started in the project top directory. 261 262Warning: --force should only be used when developping/testing the release script. 263""" 264 from optparse import OptionParser 265 parser = OptionParser(usage=usage) 266 parser.allow_interspersed_args = False 267 parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), 268 help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") 269 parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), 270 help="""Path to Doxygen tool. [Default: %default]""") 271 parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, 272 help="""Ignore pending commit. [Default: %default]""") 273 parser.add_option('--retag', dest="retag_release", action='store_true', default=False, 274 help="""Overwrite release existing tag if it exist. [Default: %default]""") 275 parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', 276 help="""Comma separated list of platform passed to scons for build check.""") 277 parser.add_option('--no-test', dest="no_test", action='store_true', default=False, 278 help="""Skips build check.""") 279 parser.add_option('--no-web', dest="no_web", action='store_true', default=False, 280 help="""Do not update web site.""") 281 parser.add_option('-u', '--upload-user', dest="user", action='store', 282 help="""Sourceforge user for SFTP documentation upload.""") 283 parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), 284 help="""Path of the SFTP compatible binary used to upload the documentation.""") 285 parser.enable_interspersed_args() 286 options, args = parser.parse_args() 287 288 if len(args) != 2: 289 parser.error( 'release_version missing on command-line.' ) 290 release_version = args[0] 291 next_version = args[1] 292 293 if not options.platforms and not options.no_test: 294 parser.error( 'You must specify either --platform or --no-test option.' ) 295 296 if options.ignore_pending_commit: 297 msg = '' 298 else: 299 msg = check_no_pending_commit() 300 if not msg: 301 print('Setting version to', release_version) 302 set_version( release_version ) 303 svn_commit( 'Release ' + release_version ) 304 tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) 305 if svn_check_if_tag_exist( tag_url ): 306 if options.retag_release: 307 svn_remove_tag( tag_url, 'Overwriting previous tag' ) 308 else: 309 print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url) 310 sys.exit( 1 ) 311 svn_tag_sandbox( tag_url, 'Release ' + release_version ) 312 313 print('Generated doxygen document...') 314## doc_dirname = r'jsoncpp-api-html-0.5.0' 315## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' 316 doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) 317 doc_distcheck_dir = 'dist/doccheck' 318 tarball.decompress( doc_tarball_path, doc_distcheck_dir ) 319 doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) 320 321 export_dir = 'dist/export' 322 svn_export( tag_url, export_dir ) 323 fix_sources_eol( export_dir ) 324 325 source_dir = 'jsoncpp-src-' + release_version 326 source_tarball_path = 'dist/%s.tar.gz' % source_dir 327 print('Generating source tarball to', source_tarball_path) 328 tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) 329 330 amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir 331 print('Generating amalgamation source tarball to', amalgamation_tarball_path) 332 amalgamation_dir = 'dist/amalgamation' 333 amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) 334 amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version 335 tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], 336 amalgamation_dir, prefix_dir=amalgamation_source_dir ) 337 338 # Decompress source tarball, download and install scons-local 339 distcheck_dir = 'dist/distcheck' 340 distcheck_top_dir = distcheck_dir + '/' + source_dir 341 print('Decompressing source tarball to', distcheck_dir) 342 rmdir_if_exist( distcheck_dir ) 343 tarball.decompress( source_tarball_path, distcheck_dir ) 344 scons_local_path = 'dist/scons-local.tar.gz' 345 print('Downloading scons-local to', scons_local_path) 346 download( SCONS_LOCAL_URL, scons_local_path ) 347 print('Decompressing scons-local to', distcheck_top_dir) 348 tarball.decompress( scons_local_path, distcheck_top_dir ) 349 350 # Run compilation 351 print('Compiling decompressed tarball') 352 all_build_status = True 353 for platform in options.platforms.split(','): 354 print('Testing platform:', platform) 355 build_status, log_path = check_compile( distcheck_top_dir, platform ) 356 print('see build log:', log_path) 357 print(build_status and '=> ok' or '=> FAILED') 358 all_build_status = all_build_status and build_status 359 if not build_status: 360 print('Testing failed on at least one platform, aborting...') 361 svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) 362 sys.exit(1) 363 if options.user: 364 if not options.no_web: 365 print('Uploading documentation using user', options.user) 366 sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) 367 print('Completed documentation upload') 368 print('Uploading source and documentation tarballs for release using user', options.user) 369 sourceforge_release_tarball( SOURCEFORGE_PROJECT, 370 [source_tarball_path, doc_tarball_path], 371 user=options.user, sftp=options.sftp ) 372 print('Source and doc release tarballs uploaded') 373 else: 374 print('No upload user specified. Web site and download tarbal were not uploaded.') 375 print('Tarball can be found at:', doc_tarball_path) 376 377 # Set next version number and commit 378 set_version( next_version ) 379 svn_commit( 'Released ' + release_version ) 380 else: 381 sys.stderr.write( msg + '\n' ) 382 383if __name__ == '__main__': 384 main() 385