svn commit: r556589 - in head/databases/mongodb40: . files

Mikael Urankar mikael at FreeBSD.org
Sun Nov 29 16:47:46 UTC 2020


Author: mikael
Date: Sun Nov 29 16:47:45 2020
New Revision: 556589
URL: https://svnweb.freebsd.org/changeset/ports/556589

Log:
  databases/mongodb40: allow build with python 3
  
  PR:		249598
  Submitted by:	Ronald Klop
  Approved by:	dev.ashevchuk (maintainer, previous version)

Added:
  head/databases/mongodb40/files/patch-python3   (contents, props changed)
Modified:
  head/databases/mongodb40/Makefile

Modified: head/databases/mongodb40/Makefile
==============================================================================
--- head/databases/mongodb40/Makefile	Sun Nov 29 16:40:08 2020	(r556588)
+++ head/databases/mongodb40/Makefile	Sun Nov 29 16:47:45 2020	(r556589)
@@ -3,6 +3,7 @@
 PORTNAME=	mongodb
 DISTVERSIONPREFIX=	r
 DISTVERSION=	4.0.19
+PORTREVISION=	1
 CATEGORIES=	databases net
 MASTER_SITES=	https://fastdl.mongodb.org/src/ \
 		http://fastdl.mongodb.org/src/
@@ -21,7 +22,7 @@ LICENSE_PERMS_SSPLv1=	dist-mirror dist-sell pkg-mirror
 ONLY_FOR_ARCHS=	aarch64 amd64
 ONLY_FOR_ARCHS_REASON=	"Only supported on amd64 and aarch64 (i386 deprecated in v3)"
 
-BUILD_DEPENDS=	${PYTHON_PKGNAMEPREFIX}cheetah>=2.4.4:devel/py-cheetah@${PY_FLAVOR} \
+BUILD_DEPENDS=	${PYTHON_PKGNAMEPREFIX}cheetah3>0:devel/py-cheetah3@${PY_FLAVOR} \
 		${PY_TYPING} \
 		${PYTHON_PKGNAMEPREFIX}yaml>=3.11:devel/py-yaml@${PY_FLAVOR} \
 		${LOCALBASE}/bin/ar:devel/binutils
@@ -30,7 +31,7 @@ LIB_DEPENDS=	libboost_system.so:devel/boost-libs \
 		libcurl.so:ftp/curl \
 		libsnappy.so:archivers/snappy
 
-USES=		compiler:c++14-lang cpe python:2.7,build scons:python2 shebangfix
+USES=		compiler:c++14-lang cpe python:3.5+,build scons shebangfix
 USE_RC_SUBR=	mongod
 
 CONFLICTS_BUILD=	mongo-cxx-driver

Added: head/databases/mongodb40/files/patch-python3
==============================================================================
--- /dev/null	00:00:00 1970	(empty, because file is newly added)
+++ head/databases/mongodb40/files/patch-python3	Sun Nov 29 16:47:45 2020	(r556589)
@@ -0,0 +1,1397 @@
+diff -ru /data/ports-build/mongodb-src-r4.0.21/SConstruct ./SConstruct
+--- /data/ports-build/mongodb-src-r4.0.21/SConstruct	2020-11-15 22:50:25.001942000 +0100
++++ ./SConstruct	2020-11-16 20:09:38.793885000 +0100
+@@ -28,8 +28,8 @@
+ import mongo.toolchain as mongo_toolchain
+ import mongo.generators as mongo_generators
+ 
+-EnsurePythonVersion(2, 7)
+-EnsureSConsVersion(2, 5)
++EnsurePythonVersion(3, 5)
++EnsureSConsVersion(3, 0, 4)
+ 
+ from buildscripts import utils
+ from buildscripts import moduleconfig
+@@ -435,7 +435,7 @@
+ }
+ 
+ add_option('win-version-min',
+-    choices=win_version_min_choices.keys(),
++    choices=list(win_version_min_choices.keys()),
+     default=None,
+     help='minimum Windows version to support',
+     type='choice',
+@@ -547,7 +547,7 @@
+ except IOError as e:
+     # If the file error wasn't because the file is missing, error out
+     if e.errno != errno.ENOENT:
+-        print("Error opening version.json: {0}".format(e.strerror))
++        print(("Error opening version.json: {0}".format(e.strerror)))
+         Exit(1)
+ 
+     version_data = {
+@@ -556,14 +556,14 @@
+     }
+ 
+ except ValueError as e:
+-    print("Error decoding version.json: {0}".format(e))
++    print(("Error decoding version.json: {0}".format(e)))
+     Exit(1)
+ 
+ # Setup the command-line variables
+ def variable_shlex_converter(val):
+     # If the argument is something other than a string, propogate
+     # it literally.
+-    if not isinstance(val, basestring):
++    if not isinstance(val, str):
+         return val
+     parse_mode = get_option('variable-parse-mode')
+     if parse_mode == 'auto':
+@@ -627,7 +627,7 @@
+ 
+ variables_files = variable_shlex_converter(get_option('variables-files'))
+ for file in variables_files:
+-    print("Using variable customization file %s" % file)
++    print(("Using variable customization file %s" % file))
+ 
+ env_vars = Variables(
+     files=variables_files,
+@@ -636,7 +636,7 @@
+ 
+ sconsflags = os.environ.get('SCONSFLAGS', None)
+ if sconsflags:
+-    print("Using SCONSFLAGS environment variable arguments: %s" % sconsflags)
++    print(("Using SCONSFLAGS environment variable arguments: %s" % sconsflags))
+ 
+ env_vars.Add('ABIDW',
+     help="Configures the path to the 'abidw' (a libabigail) utility")
+@@ -770,7 +770,7 @@
+ def validate_mongo_version(key, val, env):
+     regex = r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc)(\d+))?.*)?'
+     if not re.match(regex, val):
+-        print("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val))
++        print(("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val)))
+         Exit(1)
+ 
+ env_vars.Add('MONGO_VERSION',
+@@ -901,12 +901,12 @@
+         Exit(1)
+ 
+ sconsDataDir = Dir(buildDir).Dir('scons')
+-SConsignFile(str(sconsDataDir.File('sconsign')))
++SConsignFile(str(sconsDataDir.File('sconsign.py3')))
+ 
+ def printLocalInfo():
+     import sys, SCons
+-    print( "scons version: " + SCons.__version__ )
+-    print( "python version: " + " ".join( [ `i` for i in sys.version_info ] ) )
++    print(( "scons version: " + SCons.__version__ ))
++    print(( "python version: " + " ".join( [ repr(i) for i in sys.version_info ] ) ))
+ 
+ printLocalInfo()
+ 
+@@ -986,12 +986,12 @@
+ env.AddMethod(mongo_platform.env_get_os_name_wrapper, 'GetTargetOSName')
+ 
+ def fatal_error(env, msg, *args):
+-    print(msg.format(*args))
++    print((msg.format(*args)))
+     Exit(1)
+ 
+ def conf_error(env, msg, *args):
+-    print(msg.format(*args))
+-    print("See {0} for details".format(env.File('$CONFIGURELOG').abspath))
++    print((msg.format(*args)))
++    print(("See {0} for details".format(env.File('$CONFIGURELOG').abspath)))
+     Exit(1)
+ 
+ env.AddMethod(fatal_error, 'FatalError')
+@@ -1010,12 +1010,12 @@
+ env.AddMethod(lambda env: env['VERBOSE'], 'Verbose')
+ 
+ if has_option('variables-help'):
+-    print(env_vars.GenerateHelpText(env))
++    print((env_vars.GenerateHelpText(env)))
+     Exit(0)
+ 
+ #unknown_vars = env_vars.UnknownVariables()
+ #if unknown_vars:
+-#    env.FatalError("Unknown variables specified: {0}", ", ".join(unknown_vars.keys()))
++#    env.FatalError("Unknown variables specified: {0}", ", ".join(list(unknown_vars.keys())))
+ 
+ def set_config_header_define(env, varname, varval = 1):
+     env['CONFIG_HEADER_DEFINES'][varname] = varval
+@@ -1100,7 +1100,7 @@
+         context.Result(ret)
+         return ret;
+ 
+-    for k in processor_macros.keys():
++    for k in list(processor_macros.keys()):
+         ret = run_compile_check(k)
+         if ret:
+             context.Result('Detected a %s processor' % k)
+@@ -1222,7 +1222,7 @@
+     env['TARGET_ARCH'] = detected_processor
+ 
+ if env['TARGET_OS'] not in os_macros:
+-    print("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS']))
++    print(("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS'])))
+ elif not detectConf.CheckForOS(env['TARGET_OS']):
+     env.ConfError("TARGET_OS ({0}) is not supported by compiler", env['TARGET_OS'])
+ 
+@@ -2081,7 +2081,7 @@
+             # form -Wno-xxx (but not -Wno-error=xxx), we also add -Wxxx to the flags. GCC does
+             # warn on unknown -Wxxx style flags, so this lets us probe for availablity of
+             # -Wno-xxx.
+-            for kw in test_mutation.keys():
++            for kw in list(test_mutation.keys()):
+                 test_flags = test_mutation[kw]
+                 for test_flag in test_flags:
+                     if test_flag.startswith("-Wno-") and not test_flag.startswith("-Wno-error="):
+@@ -2095,7 +2095,7 @@
+         # to make them real errors.
+         cloned.Append(CCFLAGS=['-Werror'])
+         conf = Configure(cloned, help=False, custom_tests = {
+-                'CheckFlag' : lambda(ctx) : CheckFlagTest(ctx, tool, extension, flag)
++                'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag)
+         })
+         available = conf.CheckFlag()
+         conf.Finish()
+@@ -2611,7 +2611,7 @@
+         llvm_symbolizer = get_option('llvm-symbolizer')
+         if os.path.isabs(llvm_symbolizer):
+             if not myenv.File(llvm_symbolizer).exists():
+-                print("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer)
++                print(("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer))
+                 llvm_symbolizer = None
+         else:
+             llvm_symbolizer = myenv.WhereIs(llvm_symbolizer)
+@@ -2922,7 +2922,7 @@
+                         # TODO: If we could programmatically extract the paths from the info output
+                         # we could give a better message here, but brew info's machine readable output
+                         # doesn't seem to include the whole 'caveats' section.
+-                        message = subprocess.check_output([brew, "info", "openssl"])
++                        message = subprocess.check_output([brew, "info", "openssl"]).decode('utf-8')
+                         advice = textwrap.dedent(
+                             """\
+                             NOTE: HomeBrew installed to {0} appears to have OpenSSL installed.
+@@ -3082,7 +3082,7 @@
+         files = ['ssleay32.dll', 'libeay32.dll']
+         for extra_file in files:
+             if not addOpenSslLibraryToDistArchive(extra_file):
+-                print("WARNING: Cannot find SSL library '%s'" % extra_file)
++                print(("WARNING: Cannot find SSL library '%s'" % extra_file))
+ 
+ 
+ 
+@@ -3423,7 +3423,7 @@
+ 
+         outputIndex = next((idx for idx in [0,1] if conf.CheckAltivecVbpermqOutput(idx)), None)
+         if outputIndex is not None:
+-	    conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex)
++            conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex)
+         else:
+             myenv.ConfError("Running on ppc64le, but can't find a correct vec_vbpermq output index.  Compiler or platform not supported")
+ 
+@@ -3523,9 +3523,12 @@
+     import buildscripts.pylinters
+     buildscripts.pylinters.lint_all(None, {}, [])
+ 
+-    import buildscripts.lint
+-    if not buildscripts.lint.run_lint( [ "src/mongo/" ] ):
+-        raise Exception( "lint errors" )
++    env.Command(
++        target="#run_lint",
++        source=["buildscripts/lint.py", "src/mongo"],
++        action="$PYTHON $SOURCES[0] $SOURCES[1]",
++    )
++
+ 
+ env.Alias( "lint" , [] , [ doLint ] )
+ env.AlwaysBuild( "lint" )
+Only in .: build
+Only in ./buildscripts: __pycache__
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/aggregate_tracefiles.py ./buildscripts/aggregate_tracefiles.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/aggregate_tracefiles.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/aggregate_tracefiles.py	2020-11-16 20:09:38.798317000 +0100
+@@ -20,7 +20,7 @@
+ 
+     args += ['-o', output]
+ 
+-    print ' '.join(args)
++    print(' '.join(args))
+ 
+     return subprocess.call(args)
+ 
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/aws_ec2.py ./buildscripts/aws_ec2.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/aws_ec2.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/aws_ec2.py	2020-11-16 20:09:38.801388000 +0100
+@@ -1,8 +1,8 @@
+ #!/usr/bin/env python
+ """AWS EC2 instance launcher and controller."""
+ 
+-from __future__ import print_function
+ 
++
+ import base64
+ import collections
+ import datetime
+@@ -88,12 +88,13 @@
+             if reached_state:
+                 print(" Instance {}!".format(instance.state["Name"]), file=sys.stdout)
+             else:
+-                print(" Instance in state '{}', failed to reach state '{}'{}!".format(
+-                    instance.state["Name"], state, client_error), file=sys.stdout)
++                print(
++                    " Instance in state '{}', failed to reach state '{}'{}!".format(
++                        instance.state["Name"], state, client_error), file=sys.stdout)
+             sys.stdout.flush()
+         return 0 if reached_state else 1
+ 
+-    def control_instance(  #pylint: disable=too-many-arguments,too-many-branches
++    def control_instance(  #pylint: disable=too-many-arguments,too-many-branches,too-many-locals
+             self, mode, image_id, wait_time_secs=0, show_progress=False, console_output_file=None,
+             console_screenshot_file=None):
+         """Control an AMI instance. Returns 0 & status information, if successful."""
+@@ -296,14 +297,15 @@
+     status_options.add_option("--yamlFile", dest="yaml_file", default=None,
+                               help="Save the status into the specified YAML file.")
+ 
+-    status_options.add_option("--consoleOutputFile", dest="console_output_file", default=None,
+-                              help="Save the console output into the specified file, if"
+-                              " available.")
++    status_options.add_option(
++        "--consoleOutputFile", dest="console_output_file", default=None,
++        help="Save the console output into the specified file, if"
++        " available.")
+ 
+-    status_options.add_option("--consoleScreenshotFile", dest="console_screenshot_file",
+-                              default=None,
+-                              help="Save the console screenshot (JPG format) into the specified"
+-                              " file, if available.")
++    status_options.add_option(
++        "--consoleScreenshotFile", dest="console_screenshot_file", default=None,
++        help="Save the console screenshot (JPG format) into the specified"
++        " file, if available.")
+ 
+     parser.add_option_group(control_options)
+     parser.add_option_group(create_options)
+@@ -328,7 +330,6 @@
+                 parser.error("Block size must be an integer")
+             block_devices[device_name] = device_size
+ 
+-        # The 'expire-on' key is a UTC time.
+         expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=options.tag_expire_hours)
+         tags = [{"Key": "expire-on", "Value": expire_dt.strftime("%Y-%m-%d %H:%M:%S")},
+                 {"Key": "Name",
+Only in ./buildscripts/idl/idl: __pycache__
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/compiler.py ./buildscripts/idl/idl/compiler.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/compiler.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/idl/idl/compiler.py	2020-11-16 20:09:38.804248000 +0100
+@@ -31,8 +31,6 @@
+ Orchestrates the 3 passes (parser, binder, and generator) together.
+ """
+ 
+-from __future__ import absolute_import, print_function, unicode_literals
+-
+ import io
+ import logging
+ import os
+@@ -70,14 +68,14 @@
+     """Class for the IDL compiler to resolve imported files."""
+ 
+     def __init__(self, import_directories):
+-        # type: (List[unicode]) -> None
++        # type: (List[str]) -> None
+         """Construct a ImportResolver."""
+         self._import_directories = import_directories
+ 
+         super(CompilerImportResolver, self).__init__()
+ 
+     def resolve(self, base_file, imported_file_name):
+-        # type: (unicode, unicode) -> unicode
++        # type: (str, str) -> str
+         """Return the complete path to an imported file name."""
+ 
+         logging.debug("Resolving imported file '%s' for file '%s'", imported_file_name, base_file)
+@@ -108,7 +106,7 @@
+         raise errors.IDLError(msg)
+ 
+     def open(self, resolved_file_name):
+-        # type: (unicode) -> Any
++        # type: (str) -> Any
+         """Return an io.Stream for the requested file."""
+         return io.open(resolved_file_name, encoding='utf-8')
+ 
+@@ -125,7 +123,7 @@
+ 
+ 
+ def _update_import_includes(args, spec, header_file_name):
+-    # type: (CompilerArgs, syntax.IDLSpec, unicode) -> None
++    # type: (CompilerArgs, syntax.IDLSpec, str) -> None
+     """Update the list of imports with a list of include files for each import with structs."""
+     # This function is fragile:
+     # In order to try to generate headers with an "include what you use" set of headers, the IDL
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/syntax.py ./buildscripts/idl/idl/syntax.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/syntax.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/idl/idl/syntax.py	2020-11-16 20:09:38.806128000 +0100
+@@ -33,8 +33,6 @@
+ it follows the rules of the IDL, etc.
+ """
+ 
+-from __future__ import absolute_import, print_function, unicode_literals
+-
+ import itertools
+ from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
+ 
+@@ -70,7 +68,7 @@
+ 
+ 
+ def parse_array_type(name):
+-    # type: (unicode) -> unicode
++    # type: (str) -> str
+     """Parse a type name of the form 'array<type>' and extract type."""
+     if not name.startswith("array<") and not name.endswith(">"):
+         return None
+@@ -95,7 +93,7 @@
+     # type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]]
+     """Return an Iterator of (key, value) pairs from a dictionary."""
+     return itertools.chain.from_iterable(
+-        (_zip_scalar(value, key) for (key, value) in dic.viewitems()))
++        (_zip_scalar(value, key) for (key, value) in dic.items()))
+ 
+ 
+ class SymbolTable(object):
+@@ -115,7 +113,7 @@
+         self.types = []  # type: List[Type]
+ 
+     def _is_duplicate(self, ctxt, location, name, duplicate_class_name):
+-        # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> bool
++        # type: (errors.ParserContext, common.SourceLocation, str, str) -> bool
+         """Return true if the given item already exist in the symbol table."""
+         for (item, entity_type) in _item_and_type({
+                 "command": self.commands,
+@@ -179,12 +177,12 @@
+             self.add_type(ctxt, idltype)
+ 
+     def resolve_field_type(self, ctxt, location, field_name, type_name):
+-        # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]]
++        # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]]
+         """Find the type or struct a field refers to or log an error."""
+         return self._resolve_field_type(ctxt, location, field_name, type_name)
+ 
+     def _resolve_field_type(self, ctxt, location, field_name, type_name):
+-        # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]]
++        # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]]
+         """Find the type or struct a field refers to or log an error."""
+         # pylint: disable=too-many-return-statements
+ 
+@@ -237,15 +235,15 @@
+     """IDL imports object."""
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct an Imports section."""
+-        self.imports = []  # type: List[unicode]
++        self.imports = []  # type: List[str]
+ 
+         # These are not part of the IDL syntax but are produced by the parser.
+         # List of imports with structs.
+-        self.resolved_imports = []  # type: List[unicode]
++        self.resolved_imports = []  # type: List[str]
+         # All imports directly or indirectly included
+-        self.dependencies = []  # type: List[unicode]
++        self.dependencies = []  # type: List[str]
+ 
+         super(Import, self).__init__(file_name, line, column)
+ 
+@@ -262,16 +260,16 @@
+     # pylint: disable=too-many-instance-attributes
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct a Type."""
+-        self.name = None  # type: unicode
+-        self.description = None  # type: unicode
+-        self.cpp_type = None  # type: unicode
+-        self.bson_serialization_type = None  # type: List[unicode]
+-        self.bindata_subtype = None  # type: unicode
+-        self.serializer = None  # type: unicode
+-        self.deserializer = None  # type: unicode
+-        self.default = None  # type: unicode
++        self.name = None  # type: str
++        self.description = None  # type: str
++        self.cpp_type = None  # type: str
++        self.bson_serialization_type = None  # type: List[str]
++        self.bindata_subtype = None  # type: str
++        self.serializer = None  # type: str
++        self.deserializer = None  # type: str
++        self.default = None  # type: str
+ 
+         super(Type, self).__init__(file_name, line, column)
+ 
+@@ -288,15 +286,15 @@
+     # pylint: disable=too-many-instance-attributes
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct a Field."""
+-        self.name = None  # type: unicode
+-        self.cpp_name = None  # type: unicode
+-        self.description = None  # type: unicode
+-        self.type = None  # type: unicode
++        self.name = None  # type: str
++        self.cpp_name = None  # type: str
++        self.description = None  # type: str
++        self.type = None  # type: str
+         self.ignore = False  # type: bool
+         self.optional = False  # type: bool
+-        self.default = None  # type: unicode
++        self.default = None  # type: str
+         self.supports_doc_sequence = False  # type: bool
+         self.comparison_order = -1  # type: int
+         self.non_const_getter = False  # type: bool
+@@ -316,10 +314,10 @@
+     """
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct a Type."""
+-        self.name = None  # type: unicode
+-        self.cpp_name = None  # type: unicode
++        self.name = None  # type: str
++        self.cpp_name = None  # type: str
+ 
+         super(ChainedStruct, self).__init__(file_name, line, column)
+ 
+@@ -332,10 +330,10 @@
+     """
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct a Type."""
+-        self.name = None  # type: unicode
+-        self.cpp_name = None  # type: unicode
++        self.name = None  # type: str
++        self.cpp_name = None  # type: str
+ 
+         super(ChainedType, self).__init__(file_name, line, column)
+ 
+@@ -350,10 +348,10 @@
+     # pylint: disable=too-many-instance-attributes
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct a Struct."""
+-        self.name = None  # type: unicode
+-        self.description = None  # type: unicode
++        self.name = None  # type: str
++        self.description = None  # type: str
+         self.strict = True  # type: bool
+         self.immutable = False  # type: bool
+         self.inline_chained_structs = True  # type: bool
+@@ -399,10 +397,10 @@
+     """
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct an Enum."""
+-        self.name = None  # type: unicode
+-        self.value = None  # type: unicode
++        self.name = None  # type: str
++        self.value = None  # type: str
+ 
+         super(EnumValue, self).__init__(file_name, line, column)
+ 
+@@ -415,11 +413,11 @@
+     """
+ 
+     def __init__(self, file_name, line, column):
+-        # type: (unicode, int, int) -> None
++        # type: (str, int, int) -> None
+         """Construct an Enum."""
+-        self.name = None  # type: unicode
+-        self.description = None  # type: unicode
+-        self.type = None  # type: unicode
++        self.name = None  # type: str
++        self.description = None  # type: str
++        self.type = None  # type: str
+         self.values = None  # type: List[EnumValue]
+ 
+         # Internal property that is not represented as syntax. An imported enum is read from an
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idlc.py ./buildscripts/idl/idlc.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idlc.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/idl/idlc.py	2020-11-16 20:09:38.807705000 +0100
+@@ -29,8 +29,6 @@
+ #
+ """IDL Compiler Driver Main Entry point."""
+ 
+-from __future__ import absolute_import, print_function
+-
+ import argparse
+ import logging
+ import sys
+diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/utils.py ./buildscripts/utils.py
+--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/utils.py	2020-10-16 18:02:58.000000000 +0200
++++ ./buildscripts/utils.py	2020-11-16 20:09:38.809660000 +0100
+@@ -99,7 +99,7 @@
+     with open(os.devnull, "r+") as devnull:
+         proc = subprocess.Popen("git describe --abbrev=7", stdout=subprocess.PIPE, stderr=devnull,
+                                 stdin=devnull, shell=True)
+-        return proc.communicate()[0].strip()
++        return proc.communicate()[0].strip().decode('utf-8')
+ 
+ 
+ def execsys(args):
+Only in ./site_scons: __pycache__
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/libdeps.py ./site_scons/libdeps.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/libdeps.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/libdeps.py	2020-11-16 20:09:38.813403000 +0100
+@@ -61,7 +61,7 @@
+ missing_syslibdep = 'MISSING_LIBDEP_'
+ 
+ class dependency(object):
+-    Public, Private, Interface = range(3)
++    Public, Private, Interface = list(range(3))
+ 
+     def __init__(self, value, deptype):
+         self.target_node = value
+@@ -85,7 +85,7 @@
+ class DependencyCycleError(SCons.Errors.UserError):
+     """Exception representing a cycle discovered in library dependencies."""
+ 
+-    def __init__(self, first_node ):
++    def __init__(self, first_node):
+         super(DependencyCycleError, self).__init__()
+         self.cycle_nodes = [first_node]
+ 
+@@ -100,8 +100,8 @@
+         setattr(node.attributes, "libdeps_direct_sorted", direct_sorted)
+     return direct_sorted
+ 
+-def __get_libdeps(node):
+ 
++def __get_libdeps(node):
+     """Given a SCons Node, return its library dependencies, topologically sorted.
+ 
+     Computes the dependencies if they're not already cached.
+@@ -133,7 +133,7 @@
+                 marked.add(n.target_node)
+                 tsorted.append(n.target_node)
+ 
+-            except DependencyCycleError, e:
++            except DependencyCycleError as e:
+                 if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]:
+                     e.cycle_nodes.insert(0, n.target_node)
+                 raise
+@@ -150,6 +150,7 @@
+ 
+     return tsorted
+ 
++
+ def __get_syslibdeps(node):
+     """ Given a SCons Node, return its system library dependencies.
+ 
+@@ -161,11 +162,11 @@
+         for lib in __get_libdeps(node):
+             for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])):
+                 if syslib:
+-                    if type(syslib) in (str, unicode) and syslib.startswith(missing_syslibdep):
+-                        print("Target '%s' depends on the availability of a "
++                    if type(syslib) is str and syslib.startswith(missing_syslibdep):
++                        print(("Target '%s' depends on the availability of a "
+                               "system provided library for '%s', "
+                               "but no suitable library was found during configuration." %
+-                              (str(node), syslib[len(missing_syslibdep):]))
++                              (str(node), syslib[len(missing_syslibdep):])))
+                         node.get_env().Exit(1)
+                     syslibdeps.append(syslib)
+         setattr(node.attributes, cached_var_name, syslibdeps)
+@@ -181,18 +182,21 @@
+ 
+     if old_scanner:
+         path_function = old_scanner.path_function
++
+         def new_scanner(node, env, path=()):
+             result = old_scanner.function(node, env, path)
+             result.extend(__get_libdeps(node))
+             return result
+     else:
+         path_function = None
++
+         def new_scanner(node, env, path=()):
+             return __get_libdeps(node)
+ 
+     builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner,
+-                                                    path_function=path_function)
++                                                   path_function=path_function)
+ 
++
+ def get_libdeps(source, target, env, for_signature):
+     """Implementation of the special _LIBDEPS environment variable.
+ 
+@@ -202,6 +206,7 @@
+     target = env.Flatten([target])
+     return __get_libdeps(target[0])
+ 
++
+ def get_libdeps_objs(source, target, env, for_signature):
+     objs = []
+     for lib in get_libdeps(source, target, env, for_signature):
+@@ -209,6 +214,7 @@
+         objs.extend(lib.sources)
+     return objs
+ 
++
+ def get_syslibdeps(source, target, env, for_signature):
+     deps = __get_syslibdeps(target[0])
+     lib_link_prefix = env.subst('$LIBLINKPREFIX')
+@@ -220,7 +226,7 @@
+         # they're believed to represent library short names, that should be prefixed with -l
+         # or the compiler-specific equivalent.  I.e., 'm' becomes '-lm', but 'File("m.a") is passed
+         # through whole cloth.
+-        if type(d) in (str, unicode):
++        if type(d) is str:
+             result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix))
+         else:
+             result.append(d)
+@@ -382,6 +388,7 @@
+         except KeyError:
+             pass
+ 
++
+ def setup_conftests(conf):
+     def FindSysLibDep(context, name, libs, **kwargs):
+         var = "LIBDEPS_" + name.upper() + "_SYSLIBDEP"
+@@ -394,4 +401,5 @@
+                 return context.Result(result)
+         context.env[var] = __missing_syslib(name)
+         return context.Result(result)
++
+     conf.AddTest('FindSysLibDep', FindSysLibDep)
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/__init__.py ./site_scons/mongo/__init__.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/__init__.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/mongo/__init__.py	2020-11-16 20:09:38.815614000 +0100
+@@ -5,4 +5,4 @@
+ def print_build_failures():
+     from SCons.Script import GetBuildFailures
+     for bf in GetBuildFailures():
+-        print "%s failed: %s" % (bf.node, bf.errstr)
++        print("%s failed: %s" % (bf.node, bf.errstr))
+Only in ./site_scons/mongo: __pycache__
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/generators.py ./site_scons/mongo/generators.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/generators.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/mongo/generators.py	2020-11-16 20:09:38.817602000 +0100
+@@ -1,6 +1,6 @@
+ # -*- mode: python; -*-
+ 
+-import md5
++import hashlib
+ 
+ # Default and alternative generator definitions go here.
+ 
+@@ -15,22 +15,69 @@
+ # want to define them.
+ def default_buildinfo_environment_data():
+     return (
+-        ('distmod', '$MONGO_DISTMOD', True, True,),
+-        ('distarch', '$MONGO_DISTARCH', True, True,),
+-        ('cc', '$CC_VERSION', True, False,),
+-        ('ccflags', '$CCFLAGS', True, False,),
+-        ('cxx', '$CXX_VERSION', True, False,),
+-        ('cxxflags', '$CXXFLAGS', True, False,),
+-        ('linkflags', '$LINKFLAGS', True, False,),
+-        ('target_arch', '$TARGET_ARCH', True, True,),
+-        ('target_os', '$TARGET_OS', True, False,),
++        (
++            'distmod',
++            '$MONGO_DISTMOD',
++            True,
++            True,
++        ),
++        (
++            'distarch',
++            '$MONGO_DISTARCH',
++            True,
++            True,
++        ),
++        (
++            'cc',
++            '$CC_VERSION',
++            True,
++            False,
++        ),
++        (
++            'ccflags',
++            '$CCFLAGS',
++            True,
++            False,
++        ),
++        (
++            'cxx',
++            '$CXX_VERSION',
++            True,
++            False,
++        ),
++        (
++            'cxxflags',
++            '$CXXFLAGS',
++            True,
++            False,
++        ),
++        (
++            'linkflags',
++            '$LINKFLAGS',
++            True,
++            False,
++        ),
++        (
++            'target_arch',
++            '$TARGET_ARCH',
++            True,
++            True,
++        ),
++        (
++            'target_os',
++            '$TARGET_OS',
++            True,
++            False,
++        ),
+     )
+ 
++
+ # If you want buildInfo and --version to be relatively empty, set
+ # MONGO_BUILDINFO_ENVIRONMENT_DATA = empty_buildinfo_environment_data()
+ def empty_buildinfo_environment_data():
+     return ()
+ 
++
+ def default_variant_dir_generator(target, source, env, for_signature):
+ 
+     if env.GetOption('cache') != None:
+@@ -44,11 +91,11 @@
+ 
+     # Hash the named options and their values, and take the first 8 characters of the hash as
+     # the variant name
+-    hasher = md5.md5()
++    hasher = hashlib.md5()
+     for option in variant_options:
+-        hasher.update(option)
+-        hasher.update(str(env.GetOption(option)))
+-    variant_dir = hasher.hexdigest()[0:8]
++        hasher.update(option.encode('utf-8'))
++        hasher.update(str(env.GetOption(option)).encode('utf-8'))
++    variant_dir = str(hasher.hexdigest()[0:8])
+ 
+     # If our option hash yields a well known hash, replace it with its name.
+     known_variant_hashes = {
+Only in ./site_scons/site_tools: __pycache__
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/distsrc.py ./site_scons/site_tools/distsrc.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/distsrc.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/site_tools/distsrc.py	2020-11-16 20:09:38.819994000 +0100
+@@ -20,7 +20,7 @@
+ import tarfile
+ import time
+ import zipfile
+-import StringIO
++import io
+ 
+ from distutils.spawn import find_executable
+ 
+@@ -28,7 +28,7 @@
+ 
+ class DistSrcFile:
+     def __init__(self, **kwargs):
+-        [ setattr(self, key, val) for (key, val) in kwargs.items() ]
++        [ setattr(self, key, val) for (key, val) in list(kwargs.items()) ]
+ 
+     def __str__(self):
+         return self.name
+@@ -60,6 +60,7 @@
+     def close(self):
+         self.archive_file.close()
+ 
++
+ class DistSrcTarArchive(DistSrcArchive):
+     def __iter__(self):
+         file_list = self.archive_file.getnames()
+@@ -82,7 +83,7 @@
+ 
+     def append_file_contents(self, filename, file_contents,
+             mtime=time.time(),
+-            mode=0644,
++            mode=0o644,
+             uname="root",
+             gname="root"):
+         file_metadata = tarfile.TarInfo(name=filename)
+@@ -91,7 +92,7 @@
+         file_metadata.uname = uname
+         file_metadata.gname = gname
+         file_metadata.size = len(file_contents)
+-        file_buf = StringIO.StringIO(file_contents)
++        file_buf = io.BytesIO(file_contents.encode('utf-8'))
+         if self.archive_mode == 'r':
+             self.archive_file.close()
+             self.archive_file = tarfile.open(
+@@ -105,6 +106,7 @@
+     def append_file(self, filename, localfile):
+         self.archive_file.add(localfile, arcname=filename)
+ 
++
+ class DistSrcZipArchive(DistSrcArchive):
+     def __iter__(self):
+         file_list = self.archive_file.namelist()
+@@ -119,7 +121,7 @@
+             name=key,
+             size=item_data.file_size,
+             mtime=time.mktime(fixed_time),
+-            mode=0775 if is_dir else 0664,
++            mode=0o775 if is_dir else 0o664,
+             type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE,
+             uid=0,
+             gid=0,
+@@ -129,7 +131,7 @@
+ 
+     def append_file_contents(self, filename, file_contents,
+             mtime=time.time(),
+-            mode=0644,
++            mode=0o644,
+             uname="root",
+             gname="root"):
+         self.archive_file.writestr(filename, file_contents)
+@@ -139,7 +141,7 @@
+ 
+ def build_error_action(msg):
+     def error_stub(target=None, source=None, env=None):
+-        print msg
++        print(msg)
+         env.Exit(1)
+     return [ error_stub ]
+ 
+@@ -162,7 +164,7 @@
+ 
+     target_ext = str(target[0])[-3:]
+     if not target_ext in [ 'zip', 'tar' ]:
+-        print "Invalid file format for distsrc. Must be tar or zip file"
++        print("Invalid file format for distsrc. Must be tar or zip file")
+         env.Exit(1)
+ 
+     git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % (
+@@ -173,14 +175,14 @@
+         SCons.Action.Action(run_distsrc_callbacks, "Running distsrc callbacks for $TARGET")
+     ]
+ 
++
+ def add_callback(env, fn):
+     __distsrc_callbacks.append(fn)
+ 
++
+ def generate(env, **kwargs):
+     env.AddMethod(add_callback, 'AddDistSrcCallback')
+-    env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(
+-        generator=distsrc_action_generator,
+-    )
++    env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(generator=distsrc_action_generator, )
+ 
+     def DistSrc(env, target):
+         result = env.__DISTSRC(target=target, source=[])
+@@ -189,6 +191,7 @@
+         return result
+ 
+     env.AddMethod(DistSrc, 'DistSrc')
++
+ 
+ def exists(env):
+     return True
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/idl_tool.py ./site_scons/site_tools/idl_tool.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/idl_tool.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/site_tools/idl_tool.py	2020-11-16 20:09:38.821062000 +0100
+@@ -21,6 +21,7 @@
+ 
+ import SCons
+ 
++
+ def idlc_emitter(target, source, env):
+     """For each input IDL file, the tool produces a .cpp and .h file."""
+     first_source = str(source[0])
+@@ -43,7 +44,7 @@
+ def idl_scanner(node, env, path):
+     # Use the import scanner mode of the IDL compiler to file imported files
+     cmd = [sys.executable, "buildscripts/idl/idlc.py",  '--include','src', str(node), '--write-dependencies']
+-    deps_str = subprocess.check_output(cmd)
++    deps_str = subprocess.check_output(cmd).decode('utf-8')
+ 
+     deps_list = deps_str.splitlines()
+ 
+@@ -57,19 +58,14 @@
+ idl_scanner = SCons.Scanner.Scanner(function=idl_scanner, skeys=['.idl'])
+ 
+ # TODO: create a scanner for imports when imports are implemented
+-IDLCBuilder = SCons.Builder.Builder(
+-    action=IDLCAction,
+-    emitter=idlc_emitter,
+-    srcsuffx=".idl",
+-    suffix=".cpp",
+-    source_scanner = idl_scanner
+-    )
++IDLCBuilder = SCons.Builder.Builder(action=IDLCAction, emitter=idlc_emitter, srcsuffx=".idl",
++                                    suffix=".cpp", source_scanner=idl_scanner)
+ 
+ 
+ def generate(env):
+     bld = IDLCBuilder
+ 
+-    env.Append(SCANNERS = idl_scanner)
++    env.Append(SCANNERS=idl_scanner)
+ 
+     env['BUILDERS']['Idlc'] = bld
+ 
+diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/jstoh.py ./site_scons/site_tools/jstoh.py
+--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/jstoh.py	2020-10-16 18:02:58.000000000 +0200
++++ ./site_scons/site_tools/jstoh.py	2020-11-16 20:30:59.809428000 +0100
+@@ -39,7 +39,7 @@
+ 
+     text = '\n'.join(h)
+ 
+-    with open(outFile, 'wb') as out:
++    with open(outFile, 'w') as out:
+         try:
+             out.write(text)
+         finally:
+@@ -48,7 +48,7 @@
+ 

*** DIFF OUTPUT TRUNCATED AT 1000 LINES ***


More information about the svn-ports-all mailing list