diff options
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/builder.sh | 36 | ||||
-rwxr-xr-x | scripts/check-dependencies.sh | 23 | ||||
-rwxr-xr-x | scripts/git-archive-all.py | 571 | ||||
-rw-r--r-- | scripts/installer.nsi | 2 | ||||
-rwxr-xr-x | scripts/macosx-build-dependencies.sh | 16 | ||||
-rwxr-xr-x | scripts/publish-macosx.sh | 13 | ||||
-rwxr-xr-x | scripts/release-common.sh | 32 | ||||
-rw-r--r-- | scripts/setenv-mingw-xbuild.sh | 1 | ||||
-rw-r--r-- | scripts/setenv-unibuild.sh | 4 | ||||
-rwxr-xr-x | scripts/uni-build-dependencies.sh | 23 | ||||
-rwxr-xr-x | scripts/uni-get-dependencies.sh | 12 |
11 files changed, 544 insertions, 189 deletions
diff --git a/scripts/builder.sh b/scripts/builder.sh index 6a143e3..b00919f 100755 --- a/scripts/builder.sh +++ b/scripts/builder.sh @@ -11,6 +11,9 @@ # todo - make linux work # # todo - detect failure and stop +# +# todo - generalize to build release binaries as well +# init_variables() { @@ -94,8 +97,11 @@ upload_win_generic() if [ $DRYRUN ]; then echo dry run, not uploading to googlecode echo cmd - python ./scripts/googlecode_upload.py -s '"'$summary'"' $opts + echo dry run, not uploading to files.openscad.org + echo scp -v $filename openscad@files.openscad.org:www/ else python ./scripts/googlecode_upload.py -s "$summary" $opts + scp -v $filename openscad@files.openscad.org:www/ fi } @@ -173,22 +179,23 @@ update_win_www_download_links() cd inc echo `pwd` BASEURL='https://openscad.googlecode.com/files/' + # BASEURL='http://files.openscad.org' DATECODE=`date +"%Y.%m.%d"` rm win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT1_URL'] = '$BASEURL$WIN64_PACKAGEFILE1'" >> win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT2_URL'] = '$BASEURL$WIN64_PACKAGEFILE2'" >> win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT1_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT2_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT1_SIZE'] = '$WIN64_PACKAGEFILE1_SIZE'" >> win_snapshot_links.js - echo "snapinfo['WIN64_SNAPSHOT2_SIZE'] = '$WIN64_PACKAGEFILE2_SIZE'" >> win_snapshot_links.js - - echo "snapinfo['WIN32_SNAPSHOT1_URL'] = '$BASEURL$WIN32_PACKAGEFILE1'" >> win_snapshot_links.js - echo "snapinfo['WIN32_SNAPSHOT2_URL'] = '$BASEURL$WIN32_PACKAGEFILE2'" >> win_snapshot_links.js - echo "snapinfo['WIN32_SNAPSHOT1_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js - echo "snapinfo['WIN32_SNAPSHOT2_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js - echo "snapinfo['WIN32_SNAPSHOT1_SIZE'] = '$WIN32_PACKAGEFILE1_SIZE'" >> win_snapshot_links.js - echo "snapinfo['WIN32_SNAPSHOT2_SIZE'] = '$WIN32_PACKAGEFILE2_SIZE'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT1_URL'] = '$BASEURL$WIN64_PACKAGEFILE1'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT2_URL'] = '$BASEURL$WIN64_PACKAGEFILE2'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT1_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT2_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT1_SIZE'] = '$WIN64_PACKAGEFILE1_SIZE'" >> win_snapshot_links.js + echo "fileinfo['WIN64_SNAPSHOT2_SIZE'] = '$WIN64_PACKAGEFILE2_SIZE'" >> win_snapshot_links.js + + echo "fileinfo['WIN32_SNAPSHOT1_URL'] = '$BASEURL$WIN32_PACKAGEFILE1'" >> win_snapshot_links.js + echo "fileinfo['WIN32_SNAPSHOT2_URL'] = '$BASEURL$WIN32_PACKAGEFILE2'" >> win_snapshot_links.js + echo "fileinfo['WIN32_SNAPSHOT1_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js + echo "fileinfo['WIN32_SNAPSHOT2_NAME'] = 'OpenSCAD $DATECODE'" >> win_snapshot_links.js + echo "fileinfo['WIN32_SNAPSHOT1_SIZE'] = '$WIN32_PACKAGEFILE1_SIZE'" >> win_snapshot_links.js + echo "fileinfo['WIN32_SNAPSHOT2_SIZE'] = '$WIN32_PACKAGEFILE2_SIZE'" >> win_snapshot_links.js echo 'modified win_snapshot_links.js' PAGER=cat git diff @@ -200,6 +207,9 @@ update_win_www_download_links() fi } +# FIXME: We might be running this locally and not need an ssh agent. +# Before checking $SSH_AUTH_SOCK, try 'ssh -T git@github.com' to verify that we +# can access github over ssh check_ssh_agent() { if [ $DRYRUN ]; then echo 'skipping ssh, dry run'; return; fi diff --git a/scripts/check-dependencies.sh b/scripts/check-dependencies.sh index ef8c904..5fddb13 100755 --- a/scripts/check-dependencies.sh +++ b/scripts/check-dependencies.sh @@ -87,11 +87,16 @@ mpfr_sysver() gmp_sysver() { # on some systems you have VERSION in gmp-$arch.h not gmp.h. use gmp*.h - if [ ! -e $1/include ]; then return; fi - gmppaths=`ls $1/include | grep ^gmp` + if [ -e $1/include/multiarch-x86_64-linux ]; then + subdir=include/multiarch-x86_64-linux + else + subdir=include + fi + if [ ! -e $1/$subdir ]; then return; fi + gmppaths=`ls $1/$subdir | grep ^gmp` if [ ! "$gmppaths" ]; then return; fi for gmpfile in $gmppaths; do - gmppath=$1/include/$gmpfile + gmppath=$1/$subdir/$gmpfile if [ "`grep __GNU_MP_VERSION $gmppath`" ]; then gmpmaj=`grep "define *__GNU_MP_VERSION *[0-9]*" $gmppath | awk '{print $3}'` gmpmin=`grep "define *__GNU_MP_VERSION_MINOR *[0-9]*" $gmppath | awk '{print $3}'` @@ -155,8 +160,15 @@ flex_sysver() bison_sysver() { + # bison (GNU Bison) 2.7.12-4996 if [ ! -x $1/bin/bison ]; then return ; fi - bison_sysver_result=`$1/bin/bison --version | grep bison | sed s/"[^0-9.]"/" "/g` + bison_sver=`$1/bin/bison --version | grep bison` + debug bison_sver1: $bison_sver + bison_sver=`echo $bison_sver | awk -F ")" ' { print $2 } '` + debug bison_sver2: $bison_sver + bison_sver=`echo $bison_sver | awk -F "-" ' { print $1 } '` + debug bison_sver3: $bison_sver + bison_sysver_result=$bison_sver } gcc_sysver() @@ -425,7 +437,7 @@ find_installed_version() debug $depname"_sysver" $syspath eval $depname"_sysver" $syspath fsv_tmp=`eval echo "$"$depname"_sysver_result"` - if [ $fsv_tmp ]; then break; fi + if [ $fsv_tmp ]; then break; fi fi done fi @@ -511,6 +523,7 @@ main() deps="qt4 cgal gmp mpfr boost opencsg glew eigen gcc bison flex make" #deps="$deps curl git" # not technically necessary for build #deps="$deps python cmake imagemagick" # only needed for tests + #deps="cgal" pretty_print title for depname in $deps; do debug "processing $dep" diff --git a/scripts/git-archive-all.py b/scripts/git-archive-all.py index ccfb08a..0088e1a 100755 --- a/scripts/git-archive-all.py +++ b/scripts/git-archive-all.py @@ -1,171 +1,464 @@ #! /usr/bin/env python +# coding=utf-8 + +from __future__ import print_function +from __future__ import unicode_literals + +__version__ = "1.7" import sys -from os import path, chdir -from subprocess import Popen, PIPE -from sys import argv, stdout -from fnmatch import fnmatch +from os import path, extsep +from subprocess import Popen, PIPE, CalledProcessError class GitArchiver(object): """ GitArchiver - + Scan a git repository and export all tracked files, and submodules. Checks for .gitattributes files in each directory and uses 'export-ignore' pattern entries for ignore files in the archive. - - Automatically detects output format extension: zip, tar, bz2, or gz + + Automatically detects output format extension: zip, tar, bz2, or gz. """ - - def __init__(self, prefix='', verbose=False, exclude=True, extra=[]): - self.prefix = prefix - self.verbose = verbose - self.exclude = exclude - self.extra = extra - - self._excludes = [] + def __init__(self, prefix='', verbose=False, exclude=True, force_sub=False, extra=None, main_repo_abspath=None): + """ + @type prefix: string + @param prefix: Prefix used to prepend all paths in the resulting archive. + + @type verbose: bool + @param verbose: Determines verbosity of the output (stdout). + + @type exclude: bool + @param exclude: Determines whether archiver should follow rules specified in .gitattributes files. + Defaults to True. + + @type force_sub: bool + @param force_sub: Determines whether submodules are initialized and updated before archiving. + Defaults to False - def create(self, output_file): + @type extra: list + @param extra: List of extra paths to include in the resulting archive. + + @type main_repo_abspath: string + @param main_repo_abspath: Absolute path to the main repository (or one of subdirectories). + If None, current cwd is used. + If given path is path to a subdirectory (but not a submodule directory!) + it will be replaced with abspath to toplevel directory of the repository. """ - create(str output_file) -> None - - Creates the archive, written to the given output_file - Filetype may be one of: gz, zip, bz2, tar + if extra is None: + extra = [] + + if main_repo_abspath is None: + main_repo_abspath = path.abspath('') + elif not path.isabs(main_repo_abspath): + raise ValueError("You MUST pass absolute path to the main git repository.") + + # Raises an exception if there is no repo under main_repo_abspath. + try: + self.run_shell("[ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1", main_repo_abspath) + except Exception as e: + raise ValueError("Not a git repository (or any of the parent directories).".format(path=main_repo_abspath)) + + # Detect toplevel directory of the repo. + main_repo_abspath = path.abspath(self.read_git_shell('git rev-parse --show-toplevel', main_repo_abspath).rstrip()) + + self.prefix = prefix + self.verbose = verbose + self.exclude = exclude + self.extra = extra + self.force_sub = force_sub + self.main_repo_abspath = main_repo_abspath + + def create(self, output_path, dry_run=False, output_format=None): """ - # - # determine the format - # - _, _, format = output_file.rpartition(".") + Creates the archive, written to the given output_file_path + + Type of the archive is determined either by extension of output_file_path or by the format argument. + Supported formats are: gz, zip, bz2, tar, tgz + + @type output_path: string + @param output_path: Output file path. - if format.lower() == 'zip': + @type dry_run: bool + @param dry_run: Determines whether create should do nothing but print what it would archive. + + @type output_format: string + @param output_format: Determines format of the output archive. + If None, format is determined from extension of output_file_path. + """ + if output_format is None: + file_name, file_ext = path.splitext(output_path) + output_format = file_ext[len(extsep):].lower() + + if output_format == 'zip': from zipfile import ZipFile, ZIP_DEFLATED - output_archive = ZipFile(path.abspath(output_file), 'w') - add = lambda name, arcname: output_archive.write(name, self.prefix + arcname, ZIP_DEFLATED) - - elif format.lower() in ['tar', 'bz2', 'gz']: + + if not dry_run: + archive = ZipFile(path.abspath(output_path), 'w') + add = lambda file_path, file_name: archive.write(file_path, path.join(self.prefix, file_name), ZIP_DEFLATED) + elif output_format in ['tar', 'bz2', 'gz', 'tgz']: import tarfile - t_mode = ('w:%s' % format) if format != 'tar' else 'w' - - output_archive = tarfile.open(path.abspath(output_file), t_mode) - add = lambda name, arcname: output_archive.add(name, self.prefix + arcname) + + if output_format == 'tar': + t_mode = 'w' + elif output_format == 'tgz': + t_mode = 'w:gz' + else: + t_mode = 'w:{f}'.format(f=output_format) + + if not dry_run: + archive = tarfile.open(path.abspath(output_path), t_mode) + add = lambda file_path, file_name: archive.add(file_path, path.join(self.prefix, file_name)) + else: + raise RuntimeError("Unknown format: {f}".format(f=output_format)) + + for file_path in self.extra: + if not dry_run: + if self.verbose: + print("Compressing {f} => {a}...".format(f=file_path, + a=path.join(self.prefix, file_path))) + add(file_path, file_path) + else: + print("{f} => {a}".format(f=file_path, + a=path.join(self.prefix, file_path))) + + for file_path in self.list_files(): + if not dry_run: + if self.verbose: + print("Compressing {f} => {a}...".format(f=path.join(self.main_repo_abspath, file_path), + a=path.join(self.prefix, file_path))) + add(path.join(self.main_repo_abspath, file_path), file_path) + else: + print("{f} => {a}".format(f=path.join(self.main_repo_abspath, file_path), + a=path.join(self.prefix, file_path))) + + if not dry_run: + archive.close() + + def get_path_components(self, repo_abspath, abspath): + """ + Splits given abspath into components until repo_abspath is reached. + + E.g. if repo_abspath is '/Documents/Hobby/ParaView/' and abspath is + '/Documents/Hobby/ParaView/Catalyst/Editions/Base/', function will return: + ['.', 'Catalyst', 'Editions', 'Base'] + + First element is always '.' (concrete symbol depends on OS). + + @type repo_abspath: string + @param repo_abspath: Absolute path to the git repository. + + @type abspath: string + @param abspath: Absolute path to within repo_abspath. + + @rtype: list + @return: List of path components. + """ + components = [] + + while not path.samefile(abspath, repo_abspath): + abspath, tail = path.split(abspath) + + if len(tail): + components.insert(0, tail) + + components.insert(0, path.relpath(repo_abspath, repo_abspath)) + return components + + def get_exclude_patterns(self, repo_abspath, repo_file_paths): + """ + Returns exclude patterns for a given repo. It looks for .gitattributes files in repo_file_paths. + + Resulting dictionary will contain exclude patterns per path (relative to the repo_abspath). + E.g. {('.', 'Catalyst', 'Editions', 'Base'), ['Foo*', '*Bar']} + + @type repo_abspath: string + @param repo_abspath: Absolute path to the git repository. + + @type repo_file_paths: list + @param repo_file_paths: List of paths relative to the repo_abspath that are under git control. + + @rtype: dict + @return: Dictionary representing exclude patterns. + Keys are tuples of strings. Values are lists of strings. + Returns None if self.exclude is not set. + """ + if not self.exclude: + return None + + def read_attributes(attributes_abspath): + patterns = [] + if path.isfile(attributes_abspath): + attributes = open(attributes_abspath, 'r').readlines() + patterns = [] + for line in attributes: + tokens = line.strip().split() + if "export-ignore" in tokens[1:]: + patterns.append(tokens[0]) + return patterns + + exclude_patterns = {(): []} + + # There may be no gitattributes. + try: + global_attributes_abspath = self.read_shell("git config --get core.attributesfile", repo_abspath).rstrip() + exclude_patterns[()] = read_attributes(global_attributes_abspath) + except: + # And valid to not have them. + pass + + for attributes_abspath in [path.join(repo_abspath, f) for f in repo_file_paths if f.endswith(".gitattributes")]: + # Each .gitattributes affects only files within its directory. + key = tuple(self.get_path_components(repo_abspath, path.dirname(attributes_abspath))) + exclude_patterns[key] = read_attributes(attributes_abspath) + + local_attributes_abspath = path.join(repo_abspath, ".git", "info", "attributes") + key = tuple(self.get_path_components(repo_abspath, repo_abspath)) + + if key in exclude_patterns: + exclude_patterns[key].extend(read_attributes(local_attributes_abspath)) else: - raise RuntimeError("Unknown format: '%s'" % format) - - # - # compress - # - - # extra files first (we may change folder later) - for name in self.extra: - if self.verbose: - toPath = '=> %s%s' % (self.prefix, name) if self.prefix else "" - print 'Compressing %s %s ...' % (name, toPath) - add(name, name) - - self._excludes = [] - - for name, arcname in self.listFiles(path.abspath('')): - if self.verbose: - toPath = '=> %s%s' % (self.prefix, arcname) if self.prefix else "" - print 'Compressing %s %s ...' % (arcname, toPath) - add(name, arcname) - - output_archive.close() - - - def listFiles(self, git_repositary_path, baselevel=''): - """ - listFiles(str git_repository_path, str baselevel='') -> iterator - - An iterator method that yields a tuple(filepath, fullpath) + exclude_patterns[key] = read_attributes(local_attributes_abspath) + + return exclude_patterns + + def is_file_excluded(self, repo_abspath, repo_file_path, exclude_patterns): + """ + Checks whether file at a given path is excluded. + + @type repo_abspath: string + @param repo_abspath: Absolute path to the git repository. + + @type repo_file_path: string + @param repo_file_path: Path to a file within repo_abspath. + + @type exclude_patterns: dict + @param exclude_patterns: Exclude patterns with format specified for get_exclude_patterns. + + @rtype: bool + @return: True if file should be excluded. Otherwise False. + """ + if exclude_patterns is None or not len(exclude_patterns): + return False + + from fnmatch import fnmatch + + file_name = path.basename(repo_file_path) + components = self.get_path_components(repo_abspath, path.join(repo_abspath, path.dirname(repo_file_path))) + + is_excluded = False + # We should check all patterns specified in intermediate directories to the given file. + # At the end we should also check for the global patterns (key '()' or empty tuple). + while not is_excluded: + key = tuple(components) + if key in exclude_patterns: + patterns = exclude_patterns[key] + for p in patterns: + if fnmatch(file_name, p) or fnmatch(repo_file_path, p): + if self.verbose: + print("Exclude pattern matched {pattern}: {path}".format(pattern=p, path=repo_file_path)) + is_excluded = True + + if not len(components): + break + + components.pop() + + return is_excluded + + def list_files(self, repo_path=''): + """ + An iterator method that yields a file path relative to main_repo_abspath for each file that should be included in the archive. Skips those that match the exclusion patterns found in any discovered .gitattributes files along the way. - - Recurses into submodules as well. - """ - for filepath in self.runShell('git ls-files --cached --full-name --no-empty-directory'): - fullpath = path.join(baselevel, filepath) - filename = path.basename(filepath) - - if self.exclude and filename == '.gitattributes': - self._excludes = [] - fh = open(filepath, 'r') - for line in fh: - if not line: break - tokens = line.strip().split() - if 'export-ignore' in tokens[1:]: - self._excludes.append(tokens[0]) - fh.close() - - if not filename.startswith('.git') and not path.isdir(filepath): - - # check the patterns first - ignore = False - for pattern in self._excludes: - if fnmatch(fullpath, pattern) or fnmatch(filename, pattern): - if self.verbose: print 'Exclude pattern matched (%s): %s' % (pattern, fullpath) - ignore = True - break - if ignore: - continue - - # baselevel is needed to tell the arhiver where it have to extract file - yield filepath, fullpath - - # get paths for every submodule - for submodule in self.runShell("git submodule --quiet foreach 'pwd'"): - chdir(submodule) - # in order to get output path we need to exclude repository path from the submodule path - submodule = submodule[len(git_repositary_path)+1:] - # recursion allows us to process repositories with more than one level of submodules - for git_file in self.listFiles(git_repositary_path, submodule): - yield git_file - - - + + Recurs into submodules as well. + + @type repo_path: string + @param repo_path: Path to the git submodule repository within the main git repository. + + @rtype: iterator + @return: Iterator to traverse files under git control relative to main_repo_abspath. + """ + repo_abspath = path.join(self.main_repo_abspath, repo_path) + repo_file_paths = self.read_git_shell("git ls-files --cached --full-name --no-empty-directory", repo_abspath).splitlines() + exclude_patterns = self.get_exclude_patterns(repo_abspath, repo_file_paths) + + for repo_file_path in repo_file_paths: + # Git puts path in quotes if file path has unicode characters. + repo_file_path = repo_file_path.strip('"') # file path relative to current repo + file_name = path.basename(repo_file_path) + + # Only list symlinks and files that don't start with git. + if file_name.startswith(".git") or (not path.islink(repo_file_path) and path.isdir(repo_file_path)): + continue + + main_repo_file_path = path.join(repo_path, repo_file_path) # file path relative to the main repo + + if self.is_file_excluded(repo_abspath, repo_file_path, exclude_patterns): + continue + + # Yield both repo_file_path and main_repo_file_path to preserve structure of the repo. + yield main_repo_file_path + + if self.force_sub: + self.run_shell("git submodule init", repo_abspath) + self.run_shell("git submodule update", repo_abspath) + + # List files of every submodule. + for submodule_path in self.read_shell("git submodule --quiet foreach 'pwd'", repo_abspath).splitlines(): + # In order to get output path we need to exclude repository path from submodule_path. + submodule_path = path.relpath(submodule_path, self.main_repo_abspath) + for file_path in self.list_files(submodule_path): + yield file_path + @staticmethod - def runShell(cmd): - return Popen(cmd, shell=True, stdout=PIPE).stdout.read().splitlines() - - - -if __name__ == "__main__": + def run_shell(cmd, cwd=None): + """ + Runs shell command. + + @type cmd: string + @param cmd: Command to be executed. + + @type cwd: string + @param cwd: Working directory. + + @rtype: int + @return: Return code of the command. + + @raise CalledProcessError: Raises exception if return code of the command is non-zero. + """ + p = Popen(cmd, shell=True, cwd=cwd) + p.wait() + + if p.returncode: + raise CalledProcessError(returncode=p.returncode, cmd=cmd) + + return p.returncode + + @staticmethod + def read_shell(cmd, cwd=None, encoding='utf-8'): + """ + Runs shell command and reads output. + + @type cmd: string + @param cmd: Command to be executed. + + @type cwd: string + @param cwd: Working directory. + + @type encoding: string + @param encoding: Encoding used to decode bytes returned by Popen into string. + + @rtype: string + @return: Output of the command. + + @raise CalledProcessError: Raises exception if return code of the command is non-zero. + """ + p = Popen(cmd, shell=True, stdout=PIPE, cwd=cwd) + output, _ = p.communicate() + output = output.decode(encoding) + + if p.returncode: + raise CalledProcessError(returncode=p.returncode, cmd=cmd, output=output) + + return output + + @staticmethod + def read_git_shell(cmd, cwd=None): + """ + Runs git shell command, reads output and decodes it into unicode string + + @type cmd: string + @param cmd: Command to be executed. + + @type cwd: string + @param cwd: Working directory. + + @rtype: string + @return: Output of the command. + + @raise CalledProcessError: Raises exception if return code of the command is non-zero. + """ + p = Popen(cmd, shell=True, stdout=PIPE, cwd=cwd) + output, _ = p.communicate() + output = output.decode('unicode_escape').encode('raw_unicode_escape').decode('utf-8') + + if p.returncode: + raise CalledProcessError(returncode=p.returncode, cmd=cmd, output=output) + + return output + + +if __name__ == '__main__': from optparse import OptionParser - parser = OptionParser(usage="usage: %prog [-v] [--prefix PREFIX] [--no-exclude] OUTPUT_FILE", version="%prog 1.0") - - parser.add_option('--prefix', type='string', dest='prefix', - default='', help="prepend PREFIX to each filename in the archive") - - parser.add_option('-v', '--verbose', action='store_true', dest='verbose', help='enable verbose mode') - - parser.add_option('--no-exclude', action='store_false', dest='exclude', - default=True, help="Dont read .gitattributes files for patterns containing export-ignore attrib") - - parser.add_option('--extra', action='append', dest='extra', default=[], + parser = OptionParser(usage="usage: %prog [-v] [--prefix PREFIX] [--no-exclude] [--force-submodules] [--dry-run] OUTPUT_FILE", + version="%prog {version}".format(version=__version__)) + + parser.add_option('--prefix', + type='string', + dest='prefix', + default='', + help="Prepend PREFIX to each filename in the archive. OUTPUT_FILE name is used by default to avoid tarbomb.") + + parser.add_option('-v', '--verbose', + action='store_true', + dest='verbose', + help='Enable verbose mode.') + + parser.add_option('--no-exclude', + action='store_false', + dest='exclude', + default=True, + help="Don't read .gitattributes files for patterns containing export-ignore attrib.") + + parser.add_option('--force-submodules', + action='store_true', + dest='force_sub', + help="Force a git submodule init && git submodule update at each level before iterating submodules.") + + parser.add_option('--extra', + action='append', + dest='extra', + default=[], help="Any additional files to include in the archive.") + parser.add_option('--dry-run', + action='store_true', + dest='dry_run', + help="Don't actually archive anything, just show what would be done.") options, args = parser.parse_args() - + if len(args) != 1: - parser.error('You must specify exactly one output file') - - outFile = args[0] - - if path.isdir(outFile): - parser.error('You cannot use directory as output') - - archiver = GitArchiver(options.prefix, - options.verbose, - options.exclude, - options.extra) - + parser.error("You must specify exactly one output file") + + output_file_path = args[0] + + if path.isdir(output_file_path): + parser.error("You cannot use directory as output") + + # avoid tarbomb + if options.prefix: + options.prefix = path.join(options.prefix, '') + else: + import re + + output_name = path.basename(output_file_path) + output_name = re.sub('(\.zip|\.tar|\.tgz|\.gz|\.bz2|\.tar\.gz|\.tar\.bz2)$', '', output_name) or "Archive" + options.prefix = path.join(output_name, '') + try: - archiver.create(outFile) - except Exception, e: - parser.exit(2, "%s\n" % e) - + archiver = GitArchiver(options.prefix, + options.verbose, + options.exclude, + options.force_sub, + options.extra) + archiver.create(output_file_path, options.dry_run) + except Exception as e: + parser.exit(2, "{exception}\n".format(exception=e)) + sys.exit(0) diff --git a/scripts/installer.nsi b/scripts/installer.nsi index 2cbd6d3..fea6563 100644 --- a/scripts/installer.nsi +++ b/scripts/installer.nsi @@ -17,6 +17,7 @@ CreateShortCut $SMPROGRAMS\OpenSCAD.lnk $INSTDIR\openscad.exe WriteUninstaller $INSTDIR\Uninstall.exe WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\OpenSCAD" "DisplayName" "OpenSCAD (remove only)" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\OpenSCAD" "UninstallString" "$INSTDIR\Uninstall.exe" +WriteRegStr HKCR ".scad" "PerceivedType" "text" SectionEnd Section "Uninstall" ${unregisterExtension} ".scad" "OpenSCAD_File" @@ -30,5 +31,6 @@ Delete $INSTDIR\libraries\boxes.scad Delete $INSTDIR\libraries\shapes.scad RMDir $INSTDIR\libraries Delete $INSTDIR\openscad.exe +Delete $INSTDIR\openscad.com RMDir $INSTDIR SectionEnd diff --git a/scripts/macosx-build-dependencies.sh b/scripts/macosx-build-dependencies.sh index 088d8b4..03f8598 100755 --- a/scripts/macosx-build-dependencies.sh +++ b/scripts/macosx-build-dependencies.sh @@ -238,8 +238,9 @@ build_cgal() cd $BASEDIR/src rm -rf CGAL-$version if [ ! -f CGAL-$version.tar.gz ]; then - # 4.1 - curl -O https://gforge.inria.fr/frs/download.php/31641/CGAL-$version.tar.gz + # 4.2 + curl -O https://gforge.inria.fr/frs/download.php/32359/CGAL-$version.tar.gz + # 4.1 curl -O https://gforge.inria.fr/frs/download.php/31641/CGAL-$version.tar.gz # 4.1-beta1 curl -O https://gforge.inria.fr/frs/download.php/31348/CGAL-$version.tar.gz # 4.0.2 curl -O https://gforge.inria.fr/frs/download.php/31175/CGAL-$version.tar.gz # 4.0 curl -O https://gforge.inria.fr/frs/download.php/30387/CGAL-$version.tar.gz @@ -306,7 +307,9 @@ build_eigen() EIGENDIR="none" if [ $version = "2.0.17" ]; then EIGENDIR=eigen-eigen-b23437e61a07; fi - if [ $version = "3.1.2" ]; then EIGENDIR=eigen-eigen-5097c01bcdc4; fi + if [ $version = "3.1.2" ]; then EIGENDIR=eigen-eigen-5097c01bcdc4; + elif [ $version = "3.1.3" ]; then EIGENDIR=eigen-eigen-2249f9c22fe8; fi + if [ $EIGENDIR = "none" ]; then echo Unknown eigen version. Please edit script. exit 1 @@ -435,12 +438,13 @@ fi echo "Using basedir:" $BASEDIR mkdir -p $SRCDIR $DEPLOYDIR build_qt 4.8.4 -build_eigen 3.1.2 -build_gmp 5.1.1 +# NB! For eigen, also update the path in the function +build_eigen 3.1.3 +build_gmp 5.1.2 build_mpfr 3.1.2 build_boost 1.53.0 # NB! For CGAL, also update the actual download URL in the function -build_cgal 4.1 +build_cgal 4.2 build_glew 1.9.0 build_opencsg 1.3.2 if $OPTION_DEPLOY; then diff --git a/scripts/publish-macosx.sh b/scripts/publish-macosx.sh index a3b0090..3617570 100755 --- a/scripts/publish-macosx.sh +++ b/scripts/publish-macosx.sh @@ -26,9 +26,9 @@ update_www_download_links() if [ -f $webdir/$incfile ]; then cd $webdir - echo "snapinfo['MAC_SNAPSHOT_URL'] = '$BASEURL$packagefile'" > $incfile - echo "snapinfo['MAC_SNAPSHOT_NAME'] = 'OpenSCAD $version'" >> $incfile - echo "snapinfo['MAC_SNAPSHOT_SIZE'] = '$filesize'" >> $incfile + echo "fileinfo['MAC_SNAPSHOT_URL'] = '$BASEURL$packagefile'" > $incfile + echo "fileinfo['MAC_SNAPSHOT_NAME'] = 'OpenSCAD $version'" >> $incfile + echo "fileinfo['MAC_SNAPSHOT_SIZE'] = '$filesize'" >> $incfile echo 'modified mac_snapshot_links.js' git --no-pager diff @@ -67,7 +67,7 @@ if [[ $? != 0 ]]; then exit 1 fi -SIGNATURE=$(openssl dgst -sha1 -binary < OpenSCAD-$VERSION.dmg | openssl dgst -dss1 -sign dsa_priv.pem | openssl enc -base64) +SIGNATURE=$(openssl dgst -sha1 -binary < OpenSCAD-$VERSION.dmg | openssl dgst -dss1 -sign $HOME/.ssh/openscad-appcast.pem | openssl enc -base64) if [[ $VERSION == $VERSIONDATE ]]; then APPCASTFILE=appcast-snapshots.xml @@ -90,5 +90,10 @@ if [[ $? != 0 ]]; then exit 1 fi +scp OpenSCAD-$VERSION.dmg openscad@files.openscad.org:www +if [[ $? != 0 ]]; then + exit 1 +fi + # Update snapshot filename on web page update_www_download_links version=$VERSION packagefile=OpenSCAD-$VERSION.dmg filesize=$FILESIZE diff --git a/scripts/release-common.sh b/scripts/release-common.sh index 7d36907..8a1ed7c 100755 --- a/scripts/release-common.sh +++ b/scripts/release-common.sh @@ -52,13 +52,13 @@ elif [[ $OSTYPE == "linux-gnu" ]]; then fi if [ "`echo $* | grep mingw32`" ]; then - OS=LINXWIN + OS=UNIX_CROSS_WIN ARCH=32 echo Mingw-cross build using ARCH=32 fi if [ "`echo $* | grep mingw64`" ]; then - OS=LINXWIN + OS=UNIX_CROSS_WIN ARCH=64 echo Mingw-cross build using ARCH=64 fi @@ -90,7 +90,7 @@ fi echo "Checking pre-requisites..." case $OS in - LINXWIN) + UNIX_CROSS_WIN) MAKENSIS= if [ "`command -v makensis`" ]; then MAKENSIS=makensis @@ -120,10 +120,9 @@ echo "Building openscad-$VERSION ($VERSIONDATE) $CONFIGURATION..." if [ ! $NUMCPU ]; then echo "note: you can 'export NUMCPU=x' for multi-core compiles (x=number)"; - NUMCPU=2 -else - echo "NUMCPU: " $NUMCPU + NUMCPU=1 fi +echo "NUMCPU: " $NUMCPU CONFIG=deploy case $OS in @@ -138,7 +137,7 @@ case $OS in ZIPARGS="a -tzip" TARGET=release ;; - LINXWIN) + UNIX_CROSS_WIN) . ./scripts/setenv-mingw-xbuild.sh $ARCH TARGET=release ZIP="zip" @@ -148,7 +147,7 @@ esac case $OS in - LINXWIN) + UNIX_CROSS_WIN) cd $DEPLOYDIR && qmake VERSION=$VERSION OPENSCAD_COMMIT=$OPENSCAD_COMMIT CONFIG+=$CONFIG CONFIG+=mingw-cross-env CONFIG-=debug ../openscad.pro cd $OPENSCADDIR ;; @@ -158,7 +157,7 @@ case $OS in esac case $OS in - LINXWIN) + UNIX_CROSS_WIN) cd $DEPLOYDIR make clean ## comment out for test-run cd $OPENSCADDIR @@ -176,10 +175,17 @@ case $OS in #if the following files are missing their tried removal stops the build process on msys touch -t 200012121010 parser_yacc.h parser_yacc.cpp lexer_lex.cpp ;; + UNIX_CROSS_WIN) + # kludge to enable paralell make + touch -t 200012121010 $OPENSCADDIR/src/parser_yacc.h + touch -t 200012121010 $OPENSCADDIR/src/parser_yacc.cpp + touch -t 200012121010 $OPENSCADDIR/src/parser_yacc.hpp + touch -t 200012121010 $OPENSCADDIR/src/lexer_lex.cpp + ;; esac case $OS in - LINXWIN) + UNIX_CROSS_WIN) # make main openscad.exe cd $DEPLOYDIR make $TARGET -j$NUMCPU ## comment 4 test @@ -214,12 +220,12 @@ case $OS in EXAMPLESDIR=OpenSCAD.app/Contents/Resources/examples LIBRARYDIR=OpenSCAD.app/Contents/Resources/libraries ;; - LINXWIN) + UNIX_CROSS_WIN) EXAMPLESDIR=$DEPLOYDIR/openscad-$VERSION/examples/ LIBRARYDIR=$DEPLOYDIR/openscad-$VERSION/libraries/ rm -rf $DEPLOYDIR/openscad-$VERSION mkdir $DEPLOYDIR/openscad-$VERSION - ;; + ;; *) EXAMPLESDIR=openscad-$VERSION/examples/ LIBRARYDIR=openscad-$VERSION/libraries/ @@ -267,7 +273,7 @@ case $OS in rm -rf openscad-$VERSION echo "Binary created: openscad-$VERSION.zip" ;; - LINXWIN) + UNIX_CROSS_WIN) BINFILE=$DEPLOYDIR/OpenSCAD-$VERSION-x86-$ARCH.zip INSTFILE=$DEPLOYDIR/OpenSCAD-$VERSION-x86-$ARCH-Installer.exe diff --git a/scripts/setenv-mingw-xbuild.sh b/scripts/setenv-mingw-xbuild.sh index d3a014c..a88b752 100644 --- a/scripts/setenv-mingw-xbuild.sh +++ b/scripts/setenv-mingw-xbuild.sh @@ -6,6 +6,7 @@ # # source ./scripts/setenv-mingw-xbuild.sh # 32 bit build # source ./scripts/setenv-mingw-xbuild.sh 64 # 64 bit build +# source ./scripts/setenv-mingw-xbuild.sh clean # Clean up exported variables # # Prerequisites: # diff --git a/scripts/setenv-unibuild.sh b/scripts/setenv-unibuild.sh index 980fa7b..cb0b0a0 100644 --- a/scripts/setenv-unibuild.sh +++ b/scripts/setenv-unibuild.sh @@ -54,7 +54,9 @@ setenv_netbsd() QMAKESPEC=netbsd-g++ QTDIR=/usr/pkg/qt4 PATH=/usr/pkg/qt4/bin:$PATH - LD_LIBRARY_PATH=/usr/pkg/qt4/lib:/usr/X11R7/lib:$LD_LIBRARY_PATH + LD_LIBRARY_PATH=/usr/pkg/qt4/lib:$LD_LIBRARY_PATH + LD_LIBRARY_PATH=/usr/X11R7/lib:$LD_LIBRARY_PATH + LD_LIBRARY_PATH=/usr/pkg/lib:$LD_LIBRARY_PATH export QMAKESPEC export QTDIR diff --git a/scripts/uni-build-dependencies.sh b/scripts/uni-build-dependencies.sh index 60dbb74..6596c8a 100755 --- a/scripts/uni-build-dependencies.sh +++ b/scripts/uni-build-dependencies.sh @@ -290,11 +290,26 @@ build_cgal() ver3_7="curl --insecure -O https://gforge.inria.fr/frs/download.php/27641/CGAL-3.7.tar.gz" vernull="echo already downloaded..skipping" download_cmd=ver`echo $version | sed s/"\."/"_"/` - if [ -e CGAL-$version.tar.gz ]; then download_cmd=vernull; fi - if [ -e CGAL-$version.tar.bz2 ]; then download_cmd=vernull; fi + + if [ -e CGAL-$version.tar.gz ]; then + download_cmd=vernull; + fi + if [ -e CGAL-$version.tar.bz2 ]; then + download_cmd=vernull; + fi + `eval echo "$"$download_cmd` - if [ -e CGAL-$version.tar.gz ]; then tar xf CGAL-$version.tar.gz; fi - if [ -e CGAL-$version.tar.bz2 ]; then tar xf CGAL-$version.tar.bz2; fi + + zipper=gzip + suffix=gz + if [ -e CGAL-$version.tar.bz2 ]; then + zipper=bzip2 + suffix=bz2 + fi + + $zipper -f -d CGAL-$version.tar.$suffix; + tar xf CGAL-$version.tar + cd CGAL-$version # older cmakes have buggy FindBoost that can result in diff --git a/scripts/uni-get-dependencies.sh b/scripts/uni-get-dependencies.sh index e2fdaa7..31337c8 100755 --- a/scripts/uni-get-dependencies.sh +++ b/scripts/uni-get-dependencies.sh @@ -7,8 +7,9 @@ get_fedora_deps() { sudo yum install qt-devel bison flex eigen2-devel python-paramiko \ - boost-devel mpfr-devel gmp-devel glew-devel CGAL-devel gcc pkgconfig \ - git libXmu-devel curl imagemagick + boost-devel mpfr-devel gmp-devel glew-devel CGAL-devel gcc gcc-c++ pkgconfig \ + opencsg-devel git libXmu-devel curl imagemagick ImageMagick make \ + xorg-x11-server-Xvfb } get_qomo_deps() @@ -34,7 +35,8 @@ get_freebsd_deps() get_netbsd_deps() { sudo pkgin install bison boost cmake git bash eigen flex gmake gmp mpfr \ - qt4 glew cgal opencsg modular-xorg python27 py27-paramiko curl imagemagick + qt4 glew cgal opencsg modular-xorg python27 py27-paramiko curl \ + imagemagick ImageMagick } get_opensuse_deps() @@ -48,7 +50,7 @@ get_mageia_deps() sudo urpmi ctags sudo urpmi task-c-devel task-c++-devel libqt4-devel libgmp-devel \ libmpfr-devel libboost-devel eigen3-devel libglew-devel bison flex \ - cmake imagemagick python curl git + cmake imagemagick python curl git x11-server-xvfb } get_debian_deps() @@ -74,6 +76,8 @@ if [ -e /etc/issue ]; then get_debian_deps elif [ "`grep -i debian /etc/issue`" ]; then get_debian_deps + elif [ "`grep -i mint /etc/issue`" ]; then + get_debian_deps elif [ "`grep -i suse /etc/issue`" ]; then get_opensuse_deps elif [ "`grep -i fedora /etc/issue`" ]; then |