Format scripts

This commit is contained in:
Victor Zverovich 2016-06-15 16:14:31 -07:00
parent e9a0d3141b
commit 41356aa00a
7 changed files with 233 additions and 228 deletions

View File

@ -10,22 +10,23 @@ platform = os.environ.get('PLATFORM')
path = os.environ['PATH'] path = os.environ['PATH']
cmake_command = ['cmake', '-DFMT_PEDANTIC=ON', '-DCMAKE_BUILD_TYPE=' + config] cmake_command = ['cmake', '-DFMT_PEDANTIC=ON', '-DCMAKE_BUILD_TYPE=' + config]
if build == 'mingw': if build == 'mingw':
cmake_command.append('-GMinGW Makefiles') cmake_command.append('-GMinGW Makefiles')
build_command = ['mingw32-make', '-j4'] build_command = ['mingw32-make', '-j4']
test_command = ['mingw32-make', 'test'] test_command = ['mingw32-make', 'test']
# Remove the path to Git bin directory from $PATH because it breaks MinGW config. # Remove the path to Git bin directory from $PATH because it breaks
path = path.replace(r'C:\Program Files (x86)\Git\bin', '') # MinGW config.
os.environ['PATH'] = r'C:\MinGW\bin;' + path path = path.replace(r'C:\Program Files (x86)\Git\bin', '')
os.environ['PATH'] = r'C:\MinGW\bin;' + path
else: else:
# Add MSBuild 14.0 to PATH as described in # Add MSBuild 14.0 to PATH as described in
# http://help.appveyor.com/discussions/problems/2229-v140-not-found-on-vs2105rc. # http://help.appveyor.com/discussions/problems/2229-v140-not-found-on-vs2105rc.
os.environ['PATH'] = r'C:\Program Files (x86)\MSBuild\14.0\Bin;' + path os.environ['PATH'] = r'C:\Program Files (x86)\MSBuild\14.0\Bin;' + path
generator = 'Visual Studio 14 2015' generator = 'Visual Studio 14 2015'
if platform == 'x64': if platform == 'x64':
generator += ' Win64' generator += ' Win64'
cmake_command.append('-G' + generator) cmake_command.append('-G' + generator)
build_command = ['cmake', '--build', '.', '--config', config, '--', '/m:4'] build_command = ['cmake', '--build', '.', '--config', config, '--', '/m:4']
test_command = ['ctest', '-C', config] test_command = ['ctest', '-C', config]
check_call(cmake_command) check_call(cmake_command)
check_call(build_command) check_call(build_command)

View File

@ -3,44 +3,44 @@
import contextlib, os, tempfile, timer, urllib2, urlparse import contextlib, os, tempfile, timer, urllib2, urlparse
class Downloader: class Downloader:
def __init__(self, dir=None): def __init__(self, dir=None):
self.dir = dir self.dir = dir
# Downloads a file and removes it when exiting a block. # Downloads a file and removes it when exiting a block.
# Usage: # Usage:
# d = Downloader() # d = Downloader()
# with d.download(url) as f: # with d.download(url) as f:
# use_file(f) # use_file(f)
def download(self, url, cookie=None): def download(self, url, cookie=None):
suffix = os.path.splitext(urlparse.urlsplit(url)[2])[1] suffix = os.path.splitext(urlparse.urlsplit(url)[2])[1]
fd, filename = tempfile.mkstemp(suffix=suffix, dir=self.dir) fd, filename = tempfile.mkstemp(suffix=suffix, dir=self.dir)
os.close(fd) os.close(fd)
with timer.print_time('Downloading', url, 'to', filename): with timer.print_time('Downloading', url, 'to', filename):
opener = urllib2.build_opener() opener = urllib2.build_opener()
if cookie: if cookie:
opener.addheaders.append(('Cookie', cookie)) opener.addheaders.append(('Cookie', cookie))
num_tries = 2 num_tries = 2
for i in range(num_tries): for i in range(num_tries):
try: try:
f = opener.open(url) f = opener.open(url)
except urllib2.URLError, e: except urllib2.URLError, e:
print('Failed to open url', url) print('Failed to open url', url)
continue continue
length = f.headers.get('content-length') length = f.headers.get('content-length')
if not length: if not length:
print('Failed to get content-length') print('Failed to get content-length')
continue continue
length = int(length) length = int(length)
with open(filename, 'wb') as out: with open(filename, 'wb') as out:
count = 0 count = 0
while count < length: while count < length:
data = f.read(1024 * 1024) data = f.read(1024 * 1024)
count += len(data) count += len(data)
out.write(data) out.write(data)
@contextlib.contextmanager @contextlib.contextmanager
def remove(filename): def remove(filename):
try: try:
yield filename yield filename
finally: finally:
os.remove(filename) os.remove(filename)
return remove(filename) return remove(filename)

View File

@ -139,7 +139,7 @@ if __name__ == '__main__':
fmt_dir = os.path.join(workdir, 'fmt') fmt_dir = os.path.join(workdir, 'fmt')
branch = args.get('<branch>') branch = args.get('<branch>')
if branch is None: if branch is None:
branch = 'master' branch = 'master'
run('git', 'clone', '-b', branch, 'git@github.com:fmtlib/fmt.git', fmt_dir) run('git', 'clone', '-b', branch, 'git@github.com:fmtlib/fmt.git', fmt_dir)
# Convert changelog from RST to GitHub-flavored Markdown and get the version. # Convert changelog from RST to GitHub-flavored Markdown and get the version.

View File

@ -5,31 +5,31 @@ from contextlib import contextmanager
import timeit import timeit
class Timer: class Timer:
""" """
A with statement based timer. A with statement based timer.
Usage: Usage:
t = Timer() t = Timer()
with t: with t:
do_something() do_something()
time = t.time time = t.time
""" """
def __enter__(self):
self.start = timeit.default_timer()
def __exit__(self, type, value, traceback): def __enter__(self):
finish = timeit.default_timer() self.start = timeit.default_timer()
self.time = finish - self.start
def __exit__(self, type, value, traceback):
finish = timeit.default_timer()
self.time = finish - self.start
@contextmanager @contextmanager
def print_time(*args): def print_time(*args):
""" """
Measures and prints the time taken to execute nested code. Measures and prints the time taken to execute nested code.
args: Additional arguments to print. args: Additional arguments to print.
""" """
t = Timer() t = Timer()
print(*args) print(*args)
with t: with t:
yield yield
print(*args, end=' ') print(*args, end=' ')
print('finished in {0:.2f} second(s)'.format(t.time)) print('finished in {0:.2f} second(s)'.format(t.time))

View File

@ -6,34 +6,28 @@ import errno, os, re, shutil, sys, tempfile, urllib
from subprocess import call, check_call, check_output, Popen, PIPE, STDOUT from subprocess import call, check_call, check_output, Popen, PIPE, STDOUT
def rmtree_if_exists(dir): def rmtree_if_exists(dir):
try: try:
shutil.rmtree(dir) shutil.rmtree(dir)
except OSError as e: except OSError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
pass pass
def makedirs_if_not_exist(dir): def makedirs_if_not_exist(dir):
try: try:
os.makedirs(dir) os.makedirs(dir)
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) def install_dependencies():
build = os.environ['BUILD']
if build == 'Doc':
travis = 'TRAVIS' in os.environ
# Install dependencies.
if travis:
branch = os.environ['TRAVIS_BRANCH'] branch = os.environ['TRAVIS_BRANCH']
if branch != 'master': if branch != 'master':
print('Branch: ' + branch) print('Branch: ' + branch)
exit(0) # Ignore non-master branches exit(0) # Ignore non-master branches
check_call('curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | ' + check_call('curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key ' +
'sudo apt-key add -', shell=True) '| sudo apt-key add -', shell=True)
check_call('echo "deb https://deb.nodesource.com/node_0.10 precise main" | ' + check_call('echo "deb https://deb.nodesource.com/node_0.10 precise main" ' +
'sudo tee /etc/apt/sources.list.d/nodesource.list', shell=True) '| sudo tee /etc/apt/sources.list.d/nodesource.list', shell=True)
check_call(['sudo', 'apt-get', 'update']) check_call(['sudo', 'apt-get', 'update'])
check_call(['sudo', 'apt-get', 'install', 'python-virtualenv', 'nodejs']) check_call(['sudo', 'apt-get', 'install', 'python-virtualenv', 'nodejs'])
check_call(['npm', 'install', '-g', 'less', 'less-plugin-clean-css']) check_call(['npm', 'install', '-g', 'less', 'less-plugin-clean-css'])
@ -41,40 +35,48 @@ if build == 'Doc':
urllib.urlretrieve('http://mirrors.kernel.org/ubuntu/pool/main/d/doxygen/' + urllib.urlretrieve('http://mirrors.kernel.org/ubuntu/pool/main/d/doxygen/' +
deb_file, deb_file) deb_file, deb_file)
check_call(['sudo', 'dpkg', '-i', deb_file]) check_call(['sudo', 'dpkg', '-i', deb_file])
sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
import build fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
build.create_build_env()
html_dir = build.build_docs() build = os.environ['BUILD']
repo = 'fmtlib.github.io' if build == 'Doc':
if travis and 'KEY' not in os.environ: travis = 'TRAVIS' in os.environ
# Don't update the repo if building on Travis from an account that doesn't
# have push access.
print('Skipping update of ' + repo)
exit(0)
# Clone the fmtlib.github.io repo.
rmtree_if_exists(repo)
git_url = 'https://github.com/' if travis else 'git@github.com:'
check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
# Copy docs to the repo.
target_dir = os.path.join(repo, 'dev')
rmtree_if_exists(target_dir)
shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
if travis:
check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
check_call(['git', 'config', '--global', 'user.email', 'viz@ampl.com'])
# Push docs to GitHub pages.
check_call(['git', 'add', '--all'], cwd=repo)
if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
cmd = 'git push'
if travis: if travis:
cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master' install_dependencies()
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo) sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
# Print the output without the key. import build
print(p.communicate()[0].replace(os.environ['KEY'], '$KEY')) build.create_build_env()
if p.returncode != 0: html_dir = build.build_docs()
raise CalledProcessError(p.returncode, cmd) repo = 'fmtlib.github.io'
exit(0) if travis and 'KEY' not in os.environ:
# Don't update the repo if building on Travis from an account that
# doesn't have push access.
print('Skipping update of ' + repo)
exit(0)
# Clone the fmtlib.github.io repo.
rmtree_if_exists(repo)
git_url = 'https://github.com/' if travis else 'git@github.com:'
check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
# Copy docs to the repo.
target_dir = os.path.join(repo, 'dev')
rmtree_if_exists(target_dir)
shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
if travis:
check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
check_call(['git', 'config', '--global', 'user.email', 'viz@ampl.com'])
# Push docs to GitHub pages.
check_call(['git', 'add', '--all'], cwd=repo)
if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
cmd = 'git push'
if travis:
cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master'
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
# Print the output without the key.
print(p.communicate()[0].replace(os.environ['KEY'], '$KEY'))
if p.returncode != 0:
raise CalledProcessError(p.returncode, cmd)
exit(0)
standard = os.environ['STANDARD'] standard = os.environ['STANDARD']
install_dir = os.path.join(fmt_dir, "_install") install_dir = os.path.join(fmt_dir, "_install")
@ -84,11 +86,13 @@ test_build_dir = os.path.join(fmt_dir, "_build_test")
# Configure library. # Configure library.
makedirs_if_not_exist(build_dir) makedirs_if_not_exist(build_dir)
common_cmake_flags = [ common_cmake_flags = [
'-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build '-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build
] ]
extra_cmake_flags = [] extra_cmake_flags = []
if standard != '0x': if standard != '0x':
extra_cmake_flags = ['-DCMAKE_CXX_FLAGS=-std=c++' + standard, '-DFMT_USE_CPP11=OFF'] extra_cmake_flags = [
'-DCMAKE_CXX_FLAGS=-std=c++' + standard, '-DFMT_USE_CPP11=OFF'
]
check_call(['cmake', '-DFMT_DOC=OFF', '-DFMT_PEDANTIC=ON', fmt_dir] + check_call(['cmake', '-DFMT_DOC=OFF', '-DFMT_PEDANTIC=ON', fmt_dir] +
common_cmake_flags + extra_cmake_flags, cwd=build_dir) common_cmake_flags + extra_cmake_flags, cwd=build_dir)
@ -99,9 +103,9 @@ check_call(['make', '-j4'], cwd=build_dir)
env = os.environ.copy() env = os.environ.copy()
env['CTEST_OUTPUT_ON_FAILURE'] = '1' env['CTEST_OUTPUT_ON_FAILURE'] = '1'
if call(['make', 'test'], env=env, cwd=build_dir): if call(['make', 'test'], env=env, cwd=build_dir):
with open('Testing/Temporary/LastTest.log', 'r') as f: with open('Testing/Temporary/LastTest.log', 'r') as f:
print(f.read()) print(f.read())
sys.exit(-1) sys.exit(-1)
# Install library. # Install library.
check_call(['make', 'install'], cwd=build_dir) check_call(['make', 'install'], cwd=build_dir)

View File

@ -8,23 +8,23 @@ import shutil, tempfile
from subprocess import check_output, STDOUT from subprocess import check_output, STDOUT
class Git: class Git:
def __init__(self, dir): def __init__(self, dir):
self.dir = dir self.dir = dir
def __call__(self, *args): def __call__(self, *args):
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT) output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
print(output) print(output)
return output return output
dir = tempfile.mkdtemp() dir = tempfile.mkdtemp()
try: try:
git = Git(dir) git = Git(dir)
git('clone', '-b', 'coverity', 'git@github.com:fmtlib/fmt.git', dir) git('clone', '-b', 'coverity', 'git@github.com:fmtlib/fmt.git', dir)
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master') output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
if 'Fast-forward' not in output: if 'Fast-forward' not in output:
git('reset', 'HEAD', '.travis.yml') git('reset', 'HEAD', '.travis.yml')
git('checkout', '--', '.travis.yml') git('checkout', '--', '.travis.yml')
git('commit', '-m', 'Update coverity branch') git('commit', '-m', 'Update coverity branch')
git('push') git('push')
finally: finally:
shutil.rmtree(dir) shutil.rmtree(dir)

View File

@ -5,30 +5,30 @@ from distutils.version import LooseVersion
from subprocess import check_call from subprocess import check_call
class Git: class Git:
def __init__(self, dir): def __init__(self, dir):
self.dir = dir self.dir = dir
def call(self, method, args, **kwargs): def call(self, method, args, **kwargs):
return check_call(['git', method] + list(args), **kwargs) return check_call(['git', method] + list(args), **kwargs)
def clone(self, *args): def clone(self, *args):
return self.call('clone', list(args) + [self.dir]) return self.call('clone', list(args) + [self.dir])
def checkout(self, *args): def checkout(self, *args):
return self.call('checkout', args, cwd=self.dir) return self.call('checkout', args, cwd=self.dir)
def clean(self, *args): def clean(self, *args):
return self.call('clean', args, cwd=self.dir) return self.call('clean', args, cwd=self.dir)
def reset(self, *args): def reset(self, *args):
return self.call('reset', args, cwd=self.dir) return self.call('reset', args, cwd=self.dir)
def pull(self, *args): def pull(self, *args):
return self.call('pull', args, cwd=self.dir) return self.call('pull', args, cwd=self.dir)
def update(self, *args): def update(self, *args):
if not os.path.exists(self.dir): if not os.path.exists(self.dir):
self.clone(*args) self.clone(*args)
# Import the documentation build module. # Import the documentation build module.
fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -47,58 +47,58 @@ doc_repo = Git(os.path.join(build_dir, 'fmtlib.github.io'))
doc_repo.update('git@github.com:fmtlib/fmtlib.github.io') doc_repo.update('git@github.com:fmtlib/fmtlib.github.io')
for version in ['1.0.0', '1.1.0', '2.0.0', '3.0.0']: for version in ['1.0.0', '1.1.0', '2.0.0', '3.0.0']:
fmt_repo.clean('-f', '-d') fmt_repo.clean('-f', '-d')
fmt_repo.reset('--hard') fmt_repo.reset('--hard')
fmt_repo.checkout(version) fmt_repo.checkout(version)
target_doc_dir = os.path.join(fmt_repo.dir, 'doc') target_doc_dir = os.path.join(fmt_repo.dir, 'doc')
# Remove the old theme. # Remove the old theme.
for entry in os.listdir(target_doc_dir): for entry in os.listdir(target_doc_dir):
path = os.path.join(target_doc_dir, entry) path = os.path.join(target_doc_dir, entry)
if os.path.isdir(path): if os.path.isdir(path):
shutil.rmtree(path) shutil.rmtree(path)
# Copy the new theme. # Copy the new theme.
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap', for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
'conf.py', 'fmt.less']: 'conf.py', 'fmt.less']:
src = os.path.join(fmt_dir, 'doc', entry) src = os.path.join(fmt_dir, 'doc', entry)
dst = os.path.join(target_doc_dir, entry) dst = os.path.join(target_doc_dir, entry)
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
copy(src, dst) copy(src, dst)
# Rename index to contents. # Rename index to contents.
contents = os.path.join(target_doc_dir, 'contents.rst') contents = os.path.join(target_doc_dir, 'contents.rst')
if not os.path.exists(contents): if not os.path.exists(contents):
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents) os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
# Fix issues in reference.rst/api.rst. # Fix issues in reference.rst/api.rst.
for filename in ['reference.rst', 'api.rst']: for filename in ['reference.rst', 'api.rst']:
reference = os.path.join(target_doc_dir, filename) reference = os.path.join(target_doc_dir, filename)
if not os.path.exists(reference): if not os.path.exists(reference):
continue continue
with open(reference) as f: with open(reference) as f:
data = f.read() data = f.read()
data = data.replace('std::ostream &', 'std::ostream&') data = data.replace('std::ostream &', 'std::ostream&')
data = re.sub(re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.MULTILINE), pat = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.MULTILINE)
r'doxygenfunction:: \1(int)', data) data = re.sub(pat, r'doxygenfunction:: \1(int)', data)
data = data.replace('std::FILE*', 'std::FILE *') data = data.replace('std::FILE*', 'std::FILE *')
data = data.replace('unsigned int', 'unsigned') data = data.replace('unsigned int', 'unsigned')
with open(reference, 'w') as f: with open(reference, 'w') as f:
f.write(data) f.write(data)
# Build the docs. # Build the docs.
html_dir = os.path.join(build_dir, 'html') html_dir = os.path.join(build_dir, 'html')
if os.path.exists(html_dir): if os.path.exists(html_dir):
shutil.rmtree(html_dir) shutil.rmtree(html_dir)
include_dir = fmt_repo.dir include_dir = fmt_repo.dir
if LooseVersion(version) >= LooseVersion('3.0.0'): if LooseVersion(version) >= LooseVersion('3.0.0'):
include_dir = os.path.join(include_dir, 'fmt') include_dir = os.path.join(include_dir, 'fmt')
build.build_docs(version, doc_dir=target_doc_dir, build.build_docs(version, doc_dir=target_doc_dir,
include_dir=include_dir, work_dir=build_dir) include_dir=include_dir, work_dir=build_dir)
shutil.rmtree(os.path.join(html_dir, '.doctrees')) shutil.rmtree(os.path.join(html_dir, '.doctrees'))
# Create symlinks for older versions. # Create symlinks for older versions.
for link, target in {'index': 'contents', 'api': 'reference'}.items(): for link, target in {'index': 'contents', 'api': 'reference'}.items():
link = os.path.join(html_dir, link) + '.html' link = os.path.join(html_dir, link) + '.html'
target += '.html' target += '.html'
if os.path.exists(os.path.join(html_dir, target)) and \ if os.path.exists(os.path.join(html_dir, target)) and \
not os.path.exists(link): not os.path.exists(link):
os.symlink(target, link) os.symlink(target, link)
# Copy docs to the website. # Copy docs to the website.
version_doc_dir = os.path.join(doc_repo.dir, version) version_doc_dir = os.path.join(doc_repo.dir, version)
shutil.rmtree(version_doc_dir) shutil.rmtree(version_doc_dir)
shutil.move(html_dir, version_doc_dir) shutil.move(html_dir, version_doc_dir)