diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index f77d783cd8..0771c18a89 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -1,4 +1,4 @@ -name: Build and Test +name: Build and test on: push: @@ -11,27 +11,16 @@ on: jobs: build: - name: ${{ matrix.name }} + name: Build - ${{ matrix.distribution }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: - name: [ "Build Bionic", "Build Focal", "Build Groovy" ] - include: - - - name: "Build Bionic" - release: bionic - - - name: "Build Focal" - release: focal - - - name: "Build Groovy" - release: groovy - - steps: + distribution: [bionic, focal] - - name: Check Out Repo + steps: + - name: Checkout repository uses: actions/checkout@v2 - name: Login to Docker Hub @@ -39,224 +28,250 @@ jobs: with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} - + - name: Build and push id: docker_build uses: docker/build-push-action@v2 with: context: . - file: docker/actions/Dockerfile.actions.${{ matrix.release }} + file: docker/actions/Dockerfile.actions.${{ matrix.distribution }} push: true - tags: fluidity/actions:${{ matrix.release }}-${{ github.sha }} + tags: fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} - testing: - - name: ${{ matrix.name }} + unit-test: + name: Unit Tests - ${{ matrix.distribution }} runs-on: ubuntu-latest needs: build - if: always() strategy: fail-fast: false matrix: - name: [ "Unit Bionic", "Short Bionic", "Medium Bionic", "Unit Focal", "Short Focal", "Medium Focal", "Unit Groovy", "Short Groovy", "Medium Groovy" ] - include: - - - name: "Unit Bionic" - release: bionic - command: "make unittest" - output: "test_results_unittests.xml" - - - name: "Short Bionic" - release: bionic - command: "make THREADS=2 test" - output: "test_results.xml" + distribution: [bionic, focal] + output: [unit-tests.xml] - - name: "Medium Bionic" - release: bionic - command: "make THREADS=2 mediumtest" - output: "test_results_medium.xml" + steps: + - name: Run tests + run: | + chmod 777 . + docker pull fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} + docker run -v $PWD:/host fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} \ + /bin/bash -c "make unittest && + cp -v tests/${{ matrix.output }} /host/${{ matrix.distribution }}-${{ matrix.output }}" - - name: "Unit Focal" - release: focal - command: "make unittest" - output: "test_results_unittests.xml" - - - name: "Short Focal" - release: focal - command: "make THREADS=2 test" - output: "test_results.xml" + - name: Upload test report + uses: actions/upload-artifact@v2 + with: + path: ${{ matrix.distribution }}-${{ matrix.output }} + name: xml_outputs - - name: "Medium Focal" - release: focal - command: "make THREADS=2 mediumtest" - output: "test_results_medium.xml" + - name: Publish test report + uses: mikepenz/action-junit-report@v2 + if: always() + with: + report_paths: ${{ matrix.distribution }}-${{ matrix.output }} + token: ${{ secrets.GITHUB_TOKEN }} + check_name: JUnit Test Report - Unit Tests - ${{ matrix.distribution }} + fail_on_failure: true - - name: "Unit Groovy" - release: groovy - command: "make unittest" - output: "test_results_unittests.xml" - - - name: "Short Groovy" - release: groovy - command: "make THREADS=2 test" - output: "test_results.xml" + short-test: + name: Short Tests - ${{ matrix.distribution }} + runs-on: ubuntu-latest + needs: build - - name: "Medium Groovy" - release: groovy - command: "make THREADS=2 mediumtest" - output: "test_results_medium.xml" + strategy: + fail-fast: false + matrix: + distribution: [bionic, focal] + output: [short-tests.xml] steps: - - - name: ${{ matrix.name }} Testing + - name: Run tests run: | - chmod 777 . - docker pull fluidity/actions:${{ matrix.release }}-${{ github.sha }} - docker run -v $PWD:/host fluidity/actions:${{ matrix.release }}-${{ github.sha }} /bin/bash -c "${{ matrix.command }} && cp -v tests/${{ matrix.output }} /host/${{ matrix.release }}-${{ matrix.output}}" + chmod 777 . + docker pull fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} + docker run -v $PWD:/host fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} \ + /bin/bash -c "make THREADS=2 test && + cp -v tests/${{ matrix.output }} /host/${{ matrix.distribution }}-${{ matrix.output }}" - - uses: actions/upload-artifact@v2 + - name: Upload test report + uses: actions/upload-artifact@v2 with: - path: ${{ matrix.release }}-${{ matrix.output }} - name: tests_xml_outputs + path: ${{ matrix.distribution }}-${{ matrix.output }} + name: xml_outputs - - name: ${{ matrix.name }} JUnit + - name: Publish test report uses: mikepenz/action-junit-report@v2 + if: always() with: - report_paths: ${{ matrix.release }}-${{ matrix.output }} - github_token: ${{ secrets.GITHUB_TOKEN }} - check_name: Test report ${{ matrix.name }} + report_paths: ${{ matrix.distribution }}-${{ matrix.output }} + token: ${{ secrets.GITHUB_TOKEN }} + check_name: JUnit Test Report - Short Tests - ${{ matrix.distribution }} fail_on_failure: true - longtesting: - - name: longtest-${{ matrix.name }} + medium-test: + name: Medium Tests - ${{ matrix.distribution }} runs-on: ubuntu-latest needs: build - if: always() strategy: fail-fast: false matrix: - name: [ - "mphase_tephra_settling_2d_adaptive", - "gyre_parallel", - "Stokes_square_convection_1e4_vv_p1p1", - "Stokes_square_convection_1e4_p1p1_Ra_Test", - "Stokes_square_convection_1e4_vv_gauss_p2p1", - "viscous_fs_drunkensailor", - "cylinder-3d-drag", - "viscosity_2d_p0_adaptive_parallel", - "circle-2d-drag", - "mphase_tephra_settling_3d", - "tidal_diagnostics", - "mms_burgers_dg_steady", - "mms_ns_p1bp1_steady", - "mms_ns_dg_steady_parallel", - "mms_burgers_cg_steady", - "mms_burgers_cg_structured_steady", - "mms_ns_cg_steady_full", - "foam_2d_p1dgp2_convergence", - "circular_duct_from_rest", - "mms_tracer_P1dg_cdg_diff_steady", - "mms_tracer_cg_supg_advdiff_steady", - "para_eddy", - "mms_ns_p1p1stabilised_steady", - "lock_exchange_3d", - "mms_tracer_p0_adv_steady_conservative", - "mms_tracer_cg_advdiff_steady", - "gls-MixedLayer", - "sphere-3D-drag-Re100", - "mms_ns_cg_steady", - "mms_tracer_P1dg_br_diff_steady", - "mphase_mms_p1dgp2_br", - "mphase_mms_p2p1_compressible_ie_heat_transfer", - "mphase_mms_p2p1_compressible_ie", - "mms_ns_p0p1_steady_periodic", - "mms_tracer_cv_advdiff_eg_steady", - "mms_ns_p1p1stabilised_supg_steady", - "sphere-3D-drag-Re1", - "mms_tracer_cv_diff_eg_steady", - "mms_tracer_cv_diff_steady_structured", - "mms_ns_p1lp1cv_steady_compressible", - "mphase_mms_p1dgp2_cdg", - "mms_tracer_cv_diff_steady", - "mms_tracer_p0_adv_steady", - "mms_ns_p0p1cv_steady_periodic", - "mphase_mms_p2p1_no_interactions", - "mms_ns_dg_steady", - "mphase_mms_p1dgp2_fpdrag", - "mms_tracer_cv_advdiff_steady", - "sphere-3D-drag-Re10", - "wetting_and_drying_thacker_dg_parallel", - "flow_past_sphere_Re1", - "flow_past_sphere_Re10", - "flow_past_sphere_Re100", - "particle_rayleigh_taylor_mu10", - "rotating_channel", - "tephra_settling", - "top_hat", - "water_collapse", - "stagnant_conical_island", - "lituya_bay_pseudo2dadapt", - "pseudo2dadapt_parallel_galerkinproj", - "mms_rans_p2p1_keps_linearmomentum_cv", - "mms_rans_p2p1_keps_lowRe", - "mphase_mms_p2p1_compressible_ie_p1cv_heat_transfer", - "mms_ns_p2lp1_steady_compressible", - "mphase_mms_p2p1_vfrac", - "lock_exchange_3d_dg", - "mms_rans_p1dgp2_upw_keps", - "mphase_mms_p2p1_compressible_ie_p1cv", - "mms_rans_p1dgp2_keps", - "open_ocean_deep_convection-parallel", - "mms_rans_p2p1_keps_linearmomentum", - "Stommel_tracer_advection", - "particle_stratified_stable_layer", - "spherical_benchmark_free_slip", - "spherical_benchmark_free_slip_p2bp1dg", - "spherical_benchmark_no_slip", - "spherical_benchmark_no_slip_p2bp1dg", - "spherical_benchmark_smooth_free_slip", - "spherical_benchmark_smooth_no_slip", - "backward_facing_step_2d", - "explicit-hyperc-shear", - "explicit-hyperc-shear-adapt", - "explicit-hyperc-superman", - "coarse-corner", - "gls-StationPapa", - "medp1dgp2", - "Stokes_mms_p1dg_p2", - "Stokes_subduction_zone_vanKeken2008_OneA", - "Stokes_subduction_zone_vanKeken2008_TwoB", - "lagrangian_detectors_3d_2e5", - "lock_exchange_2d_Lagrangian_paths", - "square-convection-1e6", - "wetting_and_drying_balzano1_dg_parallel" - ## Tests still failing and in need of fixing - #"lock_exchange", - #"lock_exchange_3d_parallel", - #"saltfinger2d_adaptive", - #"driven_cavity", - ] + distribution: [bionic, focal] + output: [medium-tests.xml] steps: + - name: Run tests + run: | + chmod 777 . + docker pull fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} + docker run -v $PWD:/host fluidity/actions:${{ matrix.distribution }}-${{ github.sha }} \ + /bin/bash -c "make THREADS=2 mediumtest && + cp -v tests/${{ matrix.output }} /host/${{ matrix.distribution }}-${{ matrix.output }}" + + - name: Upload test report + uses: actions/upload-artifact@v2 + with: + path: ${{ matrix.distribution }}-${{ matrix.output }} + name: xml_outputs + + - name: Publish test report + uses: mikepenz/action-junit-report@v2 + if: always() + with: + report_paths: ${{ matrix.distribution }}-${{ matrix.output }} + token: ${{ secrets.GITHUB_TOKEN }} + check_name: JUnit Test Report - Medium Tests - ${{ matrix.distribution }} + fail_on_failure: true - - name: ${{ matrix.name }} Longtesting + long-test: + name: Long Tests - ${{ matrix.test }} + runs-on: ubuntu-latest + needs: build + + strategy: + fail-fast: false + matrix: + test: [mphase_tephra_settling_2d_adaptive, + gyre_parallel, + Stokes_square_convection_1e4_vv_p1p1, + Stokes_square_convection_1e4_p1p1_Ra_Test, + Stokes_square_convection_1e4_vv_gauss_p2p1, + viscous_fs_drunkensailor, + cylinder-3d-drag, + viscosity_2d_p0_adaptive_parallel, + circle-2d-drag, + mphase_tephra_settling_3d, + tidal_diagnostics, + mms_burgers_dg_steady, + mms_ns_p1bp1_steady, + mms_ns_dg_steady_parallel, + mms_burgers_cg_steady, + mms_burgers_cg_structured_steady, + mms_ns_cg_steady_full, + foam_2d_p1dgp2_convergence, + circular_duct_from_rest, + mms_tracer_P1dg_cdg_diff_steady, + mms_tracer_cg_supg_advdiff_steady, + para_eddy, + mms_ns_p1p1stabilised_steady, + lock_exchange_3d, + mms_tracer_p0_adv_steady_conservative, + mms_tracer_cg_advdiff_steady, + gls-MixedLayer, + sphere-3D-drag-Re100, + mms_ns_cg_steady, + mms_tracer_P1dg_br_diff_steady, + mphase_mms_p1dgp2_br, + mphase_mms_p2p1_compressible_ie_heat_transfer, + mphase_mms_p2p1_compressible_ie, + mms_ns_p0p1_steady_periodic, + mms_tracer_cv_advdiff_eg_steady, + mms_ns_p1p1stabilised_supg_steady, + sphere-3D-drag-Re1, + mms_tracer_cv_diff_eg_steady, + mms_tracer_cv_diff_steady_structured, + mms_ns_p1lp1cv_steady_compressible, + mphase_mms_p1dgp2_cdg, + mms_tracer_cv_diff_steady, + mms_tracer_p0_adv_steady, + mms_ns_p0p1cv_steady_periodic, + mphase_mms_p2p1_no_interactions, + mms_ns_dg_steady, + mphase_mms_p1dgp2_fpdrag, + mms_tracer_cv_advdiff_steady, + sphere-3D-drag-Re10, + wetting_and_drying_thacker_dg_parallel, + flow_past_sphere_Re1, + flow_past_sphere_Re10, + flow_past_sphere_Re100, + particle_rayleigh_taylor_mu10, + rotating_channel, + tephra_settling, + top_hat, + water_collapse, + stagnant_conical_island, + lituya_bay_pseudo2dadapt, + pseudo2dadapt_parallel_galerkinproj, + mms_rans_p2p1_keps_linearmomentum_cv, + mms_rans_p2p1_keps_lowRe, + mphase_mms_p2p1_compressible_ie_p1cv_heat_transfer, + mms_ns_p2lp1_steady_compressible, + mphase_mms_p2p1_vfrac, + lock_exchange_3d_dg, + mms_rans_p1dgp2_upw_keps, + mphase_mms_p2p1_compressible_ie_p1cv, + mms_rans_p1dgp2_keps, + open_ocean_deep_convection-parallel, + mms_rans_p2p1_keps_linearmomentum, + Stommel_tracer_advection, + particle_stratified_stable_layer, + spherical_benchmark_free_slip, + spherical_benchmark_free_slip_p2bp1dg, + spherical_benchmark_no_slip, + spherical_benchmark_no_slip_p2bp1dg, + spherical_benchmark_smooth_free_slip, + spherical_benchmark_smooth_no_slip, + backward_facing_step_2d, + explicit-hyperc-shear, + explicit-hyperc-shear-adapt, + explicit-hyperc-superman, + coarse-corner, + gls-StationPapa, + medp1dgp2, + Stokes_mms_p1dg_p2, + Stokes_subduction_zone_vanKeken2008_OneA, + Stokes_subduction_zone_vanKeken2008_TwoB, + lagrangian_detectors_3d_2e5, + lock_exchange_2d_Lagrangian_paths, + square-convection-1e6, + wetting_and_drying_balzano1_dg_parallel] + # Tests still failing and in need of fixing + # lock_exchange + # lock_exchange_3d_parallel + # saltfinger2d_adaptive + # driven_cavity + + steps: + - name: Run test run: | chmod 777 . docker pull fluidity/actions:focal-${{ github.sha }} - docker run -v $PWD:/host fluidity/actions:focal-${{ github.sha }} /bin/bash -c "git clone https://github.com/fluidityproject/longtests && bin/testharness -x test_results_${{ matrix.name }}.xml -f ${{ matrix.name }}.xml && cp -v test_results_${{ matrix.name }}.xml /host" + docker run -v $PWD:/host fluidity/actions:focal-${{ github.sha }} \ + /bin/bash -c "git clone https://github.com/fluidityproject/longtests && + bin/testharness -x focal-${{ matrix.test }}.xml -f ${{ matrix.test }}.xml && + cp -v focal-${{ matrix.test }}.xml /host" - - uses: actions/upload-artifact@v2 + - name: Upload test report + uses: actions/upload-artifact@v2 with: - path: test_results_${{ matrix.name }}.xml - name: tests_xml_outputs + path: focal-${{ matrix.test }}.xml + name: xml_outputs - - name: ${{ matrix.name }} JUnit + - name: Publish test report uses: mikepenz/action-junit-report@v2 + if: always() with: - report_paths: test_results_${{ matrix.name }}.xml - github_token: ${{ secrets.GITHUB_TOKEN }} - check_name: Longtest report ${{ matrix.name }} + report_paths: focal-${{ matrix.test }}.xml + token: ${{ secrets.GITHUB_TOKEN }} + check_name: JUnit Test Report - ${{ matrix.test }} - focal fail_on_failure: true diff --git a/Makefile.in b/Makefile.in index 65e67a8b05..1964c3b491 100755 --- a/Makefile.in +++ b/Makefile.in @@ -1,5 +1,5 @@ # Copyright (C) 2006 Imperial College London and others. -# +# # Please see the AUTHORS file in the main source directory for a full list # of copyright holders. # @@ -9,7 +9,7 @@ # Imperial College London # # amcgsoftware@imperial.ac.uk -# +# # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation, @@ -151,7 +151,7 @@ lib/libvtkfortran.a: ifeq (@SPUD_ONLY@,yes) echo ' MAKE libspud'; $(MAKE) -C libspud && $(MAKE) -C libspud install-libspud else - echo ' MAKE libspud'; $(MAKE) -C libspud && $(MAKE) -C libspud install-libspud && cd libspud/python && python3 setup.py build && cd ../.. && cp libspud/python/build/lib*/libspud*.so python/ + echo ' MAKE libspud'; $(MAKE) -C libspud && $(MAKE) -C libspud install-libspud && cd libspud/python && @PYTHON@ setup.py build && cd ../.. && cp libspud/python/build/lib*/libspud*.so python/ endif libfemtools: sub_system @@ -190,19 +190,18 @@ static: fluidity_library shared: lib/shared/$(SLIB_FLUIDITY).1 -lib/shared/$(SLIB_FLUIDITY).1: fluidity_library +lib/shared/$(SLIB_FLUIDITY).1: fluidity_library @echo "BUILD shared libfluidity" @rm -rf tmp @mkdir -p tmp lib/shared @cp $(LIB_FLUIDITY) tmp @cd tmp; ar x lib$(FLUIDITY).a; rm lib$(FLUIDITY).a; cd .. @echo " LD lib$(FLUIDITY).so" - @$(EVAL) $(LINKER) -shared -Wl,-soname,$(SLIB_FLUIDITY).1 -o lib/shared/$(SLIB_FLUIDITY).1 tmp/* -L./lib -lvtkfortran + @$(EVAL) $(LINKER) -shared -Wl,-soname,$(SLIB_FLUIDITY).1 -o lib/shared/$(SLIB_FLUIDITY).1 tmp/* -L./lib -lvtkfortran @rm -rf tmp @cd lib/shared; ln -sf $(SLIB_FLUIDITY).1 $(SLIB_FLUIDITY); cd ../.. -fltools: fluidity_library - @$(MAKE) fldecomp +fltools: fldecomp @echo "BUILD fluidity tools" @cd tools; $(MAKE) @echo " MAKE climatology" @@ -216,14 +215,14 @@ manual: python_build: ifeq (@HAVE_PYTHON@,yes) @echo " MAKE python" - @cd python; python3 setup.py build > build.log 2>&1 - @cd python/fluidity; find ../build/lib* -name '*.so' -exec ln -sf {} . \; + @cd python; @PYTHON@ -m build + @PYTHON@ -m pip install --upgrade python/dist/fluidity*.whl endif python_clean: @echo " CLEAN python" - @cd python; rm -rf build - @cd python/fluidity; find . -type l -name '*.so' -exec rm -f {} \; + @cd python; rm -rf dist fluidity.egg-info + PYTHONPATH=@PYTHON_SITE_PKG@ @PYTHON@ -m pip uninstall --yes fluidity .PHONY: scripts @@ -297,9 +296,7 @@ ifeq (@HYPERLIGHT@,yes) @$(AR) $(ARFLAGS) lib/libfluidity.a hyperlight/*.o endif - - -fldecomp: fluidity_library +fldecomp: fluidity_library @echo "BUILD fldecomp" @echo " MKDIR bin" @mkdir -p bin @@ -381,9 +378,9 @@ endif clean-debian: @echo " CLEAN debian" - @cd debian; rm -rf files tmp fluidity python-fluidity *.substvars *.debhelper* + @cd debian; rm -rf files tmp fluidity python-fluidity *.substvars *.debhelper* -clean-test: +clean-test: @echo " CLEAN tests" @cd tests; PYTHONPATH=../python ../tools/testharness.py --clean >/dev/null @cd tests/data; $(MAKE) clean @@ -404,7 +401,7 @@ clean-unittest: @echo " CLEAN ocean_forcing/tests" @cd ocean_forcing/tests; $(MAKE) clean -clean-all-tests: clean-test +clean-all-tests: clean-test @echo " CLEAN parallel/special/long tests" @PYTHONPATH=python tools/testharness.py --parallelism=parallel --clean >/dev/null @PYTHONPATH=python tools/testharness.py --length=special --clean >/dev/null @@ -445,8 +442,7 @@ distclean: clean include/spud_enums.h include/tinystr.h include/tinyxml.h \ include/version.h include/vtk.h \ preprocessor/check_options.F90 \ - preprocessor/register_diagnostics.F90 python/setup.py > \ - /dev/null + preprocessor/register_diagnostics.F90 > /dev/null @for i in `find ./*/ -name Makefile.in`; do rm -f `echo $$i | sed 's/.in$$//'`; done > /dev/null @find ./ \( -name make.log \) -exec rm -f {} \; > /dev/null @rm -f Makefile > /dev/null @@ -454,10 +450,10 @@ distclean: clean test: serialtest serialtest: fltools bin/$(FLUIDITY) - @cd tests; ../bin/testharness -x test_results.xml -l short $(EXCLUDE_TAGS) -n $(THREADS) + @cd tests; ../bin/testharness -x short-tests.xml -l short $(EXCLUDE_TAGS) -n $(THREADS) mediumtest: fltools bin/$(FLUIDITY) manual spudtools - @cd tests; ../bin/testharness -x test_results_medium.xml -l medium $(EXCLUDE_TAGS) -n $(THREADS) + @cd tests; ../bin/testharness -x medium-tests.xml -l medium $(EXCLUDE_TAGS) -n $(THREADS) .PHONY: spudtools @@ -465,7 +461,7 @@ spudtools: @cd libspud ; $(MAKE) install-spudtools @echo " INSTALL spudtools" -setuputs: +setuputs: @echo "SETUP tests" @echo " RMDIR bin/tests" @rm -rf bin/tests @@ -494,7 +490,7 @@ endif unittest: build_unittest @echo "RUN bin/tests" - @bin/unittestharness -d bin/tests -x tests/test_results_unittests.xml + @bin/unittestharness -d bin/tests -x tests/unit-tests.xml bin/spud-preprocess: @echo "Installing spudtools" @@ -543,14 +539,14 @@ endif @cd diagnostics; ../bin/create_makefile --exclude \ "Diagnostic_Fields_Interfaces.F90 Diagnostic_Fields_New.F90" $(TESTOPTS) @cd diagnostics; $(MAKE) Diagnostic_Fields_Interfaces.o \ - Diagnostic_Fields_New.o + Diagnostic_Fields_New.o @echo " Generating main dependencies" @cd main; ../bin/create_makefile --exclude test_coupler.F90 $(TESTOPTS) @echo " Cleaning up the mess" $(MAKE) clean-light @echo " Congratulations, make makefiles succeeded!" -install: default fltools +install: @mkdir -p $(DESTDIR)$(bindir) $(DESTDIR)$(docdir)/fluidity find bin/ -maxdepth 1 -type f -exec cp '{}' $(DESTDIR)$(bindir) \; rm -f $(DESTDIR)$(bindir)/spud-* $(DESTDIR)$(bindir)/diamond $(DESTDIR)$(bindir)/silenteval @@ -559,14 +555,13 @@ install: default fltools cp -R schemas $(DESTDIR)$(datadir)/fluidity/ mkdir -p $(DESTDIR)$(datadir)/diamond/schemata $(SED) 's/$${datadir}/$(subst /,\/,$(datadir))/g' schemas/flml.in > $(DESTDIR)$(datadir)/diamond/schemata/flml - cd python ; python3 setup.py install --root=$(shell echo ${DESTDIR} | sed 's/^$$/\//') --prefix="$(prefix)" $$FLUIDITY_PYTHON_INSTALL_ARGS cp -R examples $(DESTDIR)$(docdir)/fluidity find $(DESTDIR)$(docdir)/fluidity/examples -type f -exec $(SED) -i "s/\.\.\/\.\.\/\.\.\/bin\///" '{}' \; find $(DESTDIR)$(docdir)/fluidity/examples -type f -exec $(SED) -i "s/\.\.\/\.\.\/bin\///" '{}' \; cd $(DESTDIR)$(docdir)/fluidity ; tar -czvf $(DESTDIR)$(docdir)/fluidity/examples.tar.gz examples/ rm -rf $(DESTDIR)$(docdir)/fluidity/examples/ -install-diamond: +install-diamond: cd libspud; ./configure --prefix=@prefix@; cd ../.. cd libspud; $(MAKE) clean; cd ../.. cd libspud; $(MAKE) install-diamond; cd ../.. diff --git a/configure b/configure index 4140f2f967..bd7ef6fb9b 100755 --- a/configure +++ b/configure @@ -1455,7 +1455,6 @@ Optional Features: --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-debugging turns on debugging flags - --enable-python embed python in fluidity for generic functions. --enable-cgal turns on use of the CGAL library --enable-openmp turns on OpenMP support --disable-openmp do not use OpenMP @@ -5981,469 +5980,229 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu +# Python -# Find python to link against. -# Check whether --enable-python was given. -if test "${enable_python+set}" = set; then : - enableval=$enable_python; +# Form Python executable +if test -z "$PYTHON_VERSION"; then + PYTHON_VERSION=3 fi +PYTHON=python$PYTHON_VERSION - -if test "x$enable_python" != "xno" -then - if test -z "$PYTHON_VERSION"; then - # if PYTHON_VERSION is set we use python$PYTHON_VERSION as the python interpreter - # if not, if there is no `python` in the path, or there is a `python` but its version is <3 - # then use `python3` (by setting PYTHON_VERSION=3). - # If there *is* a `python` of version 3, we use that - # Extract the first word of "python", so it can be a program name with args. -set dummy python; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_have_bare_python+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$have_bare_python"; then - ac_cv_prog_have_bare_python="$have_bare_python" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_have_bare_python=""yes"" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -have_bare_python=$ac_cv_prog_have_bare_python -if test -n "$have_bare_python"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_bare_python" >&5 -$as_echo "$have_bare_python" >&6; } +# Verify Python executable works +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $PYTHON" >&5 +$as_echo_n "checking for $PYTHON... " >&6; } +ac_python=`$PYTHON -c "print(True)"` +if test "$ac_python" = "True"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 + $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } + $as_echo "no" >&6; } + as_fn_error $? "Cannot find $PYTHON" "$LINENO" 5 fi - - if test "x$have_bare_python" = "xyes"; then - bare_python_is_two=`python -c "import sys; print(sys.version_info.major < 3)"` - if test "x$bare_python_is_two=xTrue"; then - PYTHON_VERSION=3 - fi - else - PYTHON_VERSION=3 - fi - fi - mycppflag=$CPPFLAGS - - # - # Allow the use of a (user set) custom python version - # - - - # Extract the first word of "python[$PYTHON_VERSION]", so it can be a program name with args. -set dummy python$PYTHON_VERSION; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_path_PYTHON+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $PYTHON in - [\\/]* | ?:[\\/]*) - ac_cv_path_PYTHON="$PYTHON" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_path_PYTHON="$as_dir/$ac_word$ac_exec_ext" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - - ;; -esac -fi -PYTHON=$ac_cv_path_PYTHON -if test -n "$PYTHON"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON" >&5 -$as_echo "$PYTHON" >&6; } +# Check Python version is at least 3.6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a version of Python >= '3.6'" >&5 + $as_echo_n "checking for a version of Python >= '3.6'... " >&6; } +ac_supports_python_ver=`$PYTHON -c "from pkg_resources import parse_version; \ +from sysconfig import get_python_version; \ +print(parse_version(get_python_version()) >= parse_version('3.6'))"` +if test "$ac_supports_python_ver" = "True"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 + $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } + $as_echo "no" >&6; } + as_fn_error $? "Fluidity requires Python >= '3.6'. If you have it installed, +but it isn't the default Python interpreter in your system path, please pass +the PYTHON_VERSION variable to configure. See \`\`configure --help'' for +reference. " "$LINENO" 5 fi - - if test -z "$PYTHON"; then - as_fn_error $? "Cannot find python$PYTHON_VERSION in your system path" "$LINENO" 5 - PYTHON_VERSION="" - fi - - # - # Check for a version of Python >= 2.1.0 - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a version of Python >= '2.1.0'" >&5 -$as_echo_n "checking for a version of Python >= '2.1.0'... " >&6; } - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[0]; \ - print (ver >= '2.1.0')"` - if test "$ac_supports_python_ver" != "True"; then - if test -z "$PYTHON_NOVERSIONCHECK"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? " -This version of the AC_PYTHON_DEVEL macro -doesn't work properly with versions of Python before -2.1.0. You may need to re-run configure, setting the -variables PYTHON_CPPFLAGS, PYTHON_LIBS, PYTHON_SITE_PKG, -PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand. -Moreover, to disable this check, set PYTHON_NOVERSIONCHECK -to something else than an empty string. - -See \`config.log' for more details" "$LINENO" 5; } - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: skip at user request" >&5 -$as_echo "skip at user request" >&6; } - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - fi - - # - # if the macro parameter ``version'' is set, honour it - # - if test -n ">= '3.6'"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a version of Python >= '3.6'" >&5 -$as_echo_n "checking for a version of Python >= '3.6'... " >&6; } - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[0]; \ - print (ver >= '3.6')"` - if test "$ac_supports_python_ver" = "True"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - as_fn_error $? "this package requires Python >= '3.6'. -If you have it installed, but it isn't the default Python -interpreter in your system path, please pass the PYTHON_VERSION -variable to configure. See \`\`configure --help'' for reference. -" "$LINENO" 5 - PYTHON_VERSION="" - fi - fi - - # - # Check if you have distutils, else fail - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for the distutils Python package" >&5 -$as_echo_n "checking for the distutils Python package... " >&6; } - ac_distutils_result=`$PYTHON -c "import distutils" 2>&1` - if test $? -eq 0; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - as_fn_error $? "cannot import Python module \"distutils\". -Please check your Python installation. The error was: -$ac_distutils_result" "$LINENO" 5 - PYTHON_VERSION="" - fi - - # - # Check for Python include path - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python include path" >&5 -$as_echo_n "checking for Python include path... " >&6; } - if test -z "$PYTHON_CPPFLAGS"; then - python_path=`$PYTHON -c "import distutils.sysconfig; \ - print (distutils.sysconfig.get_python_inc ());"` - plat_python_path=`$PYTHON -c "import distutils.sysconfig; \ - print (distutils.sysconfig.get_python_inc (plat_specific=1));"` - if test -n "${python_path}"; then - if test "${plat_python_path}" != "${python_path}"; then - python_path="-I$python_path -I$plat_python_path" - else - python_path="-I$python_path" - fi - fi - PYTHON_CPPFLAGS=$python_path - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_CPPFLAGS" >&5 -$as_echo "$PYTHON_CPPFLAGS" >&6; } - - - # - # Check for Python library path - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python library path" >&5 -$as_echo_n "checking for Python library path... " >&6; } - if test -z "$PYTHON_LIBS"; then - # (makes two attempts to ensure we've got a version number - # from the interpreter) - ac_python_version=`cat<>confdefs.h <<_ACEOF -#define HAVE_PYTHON "$ac_python_version" -_ACEOF - - - # First, the library directory: - ac_python_libdir=`cat<&5 -$as_echo "$PYTHON_LIBS" >&6; } - - - # - # Check for site packages - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python site-packages path" >&5 -$as_echo_n "checking for Python site-packages path... " >&6; } - if test -z "$PYTHON_SITE_PKG"; then - PYTHON_SITE_PKG=`$PYTHON -c "import distutils.sysconfig; \ - print (distutils.sysconfig.get_python_lib(0,0));"` - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SITE_PKG" >&5 -$as_echo "$PYTHON_SITE_PKG" >&6; } - - - # - # libraries which must be linked in when embedding - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking python extra libraries" >&5 -$as_echo_n "checking python extra libraries... " >&6; } - if test -z "$PYTHON_EXTRA_LIBS"; then - PYTHON_EXTRA_LIBS=`$PYTHON -c "import distutils.sysconfig; \ - conf = distutils.sysconfig.get_config_var; \ - print (conf('LIBS') + ' ' + conf('SYSLIBS'))"` - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_EXTRA_LIBS" >&5 -$as_echo "$PYTHON_EXTRA_LIBS" >&6; } - - - # - # linking flags needed when embedding - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking python extra linking flags" >&5 -$as_echo_n "checking python extra linking flags... " >&6; } - if test -z "$PYTHON_EXTRA_LDFLAGS"; then - PYTHON_EXTRA_LDFLAGS=`$PYTHON -c "import distutils.sysconfig; \ - conf = distutils.sysconfig.get_config_var; \ - print ('' if conf('PYTHONFRAMEWORK') else conf('LINKFORSHARED'))"` - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_EXTRA_LDFLAGS" >&5 -$as_echo "$PYTHON_EXTRA_LDFLAGS" >&6; } - - - # - # final check to see if everything compiles alright - # - { $as_echo "$as_me:${as_lineno-$LINENO}: checking consistency of all components of python development environment" >&5 -$as_echo_n "checking consistency of all components of python development environment... " >&6; } - # save current global flags - ac_save_LIBS="$LIBS" - ac_save_LDFLAGS="$LDFLAGS" - ac_save_CPPFLAGS="$CPPFLAGS" - LIBS="$ac_save_LIBS $PYTHON_LIBS $PYTHON_EXTRA_LIBS $PYTHON_EXTRA_LIBS" - LDFLAGS="$ac_save_LDFLAGS $PYTHON_EXTRA_LDFLAGS" - CPPFLAGS="$ac_save_CPPFLAGS $PYTHON_CPPFLAGS" - ac_ext=c +#define HAVE_PYTHON "$HAVE_PYTHON" +_ACEOF + +# Python include path +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python include path" >&5 + $as_echo_n "checking for Python include path... " >&6; } +if test -z "$PYTHON_CPPFLAGS"; then + python_path=`$PYTHON -c "from sysconfig import get_path; \ +print(get_path('include', scheme='posix_prefix'));"` + plat_python_path=`$PYTHON -c "from sysconfig import get_path; \ +print(get_path('platinclude', scheme='posix_prefix'));"` + if test "${plat_python_path}" != "${python_path}"; then + python_path="-I$python_path -I$plat_python_path" + else + python_path="-I$python_path" + fi + PYTHON_CPPFLAGS=$python_path +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_CPPFLAGS" >&5 + $as_echo "$PYTHON_CPPFLAGS" >&6; } + +# Python library path +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python library path" >&5 + $as_echo_n "checking for Python library path... " >&6; } +if test -z "$PYTHON_LIBS"; then + ac_python_libdir=$(cat <&5 + $as_echo "$PYTHON_LIBS" >&6; } + +# Python site packages +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for Python site-packages path" >&5 + $as_echo_n "checking for Python site-packages path... " >&6; } +if test -z "$PYTHON_SITE_PKG"; then + PYTHON_SITE_PKG=`$PYTHON -c "from sysconfig import get_path; \ +print(get_path('platlib', scheme='posix_user'));"` +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_SITE_PKG" >&5 + $as_echo "$PYTHON_SITE_PKG" >&6; } + +# Python libraries that must be linked in when embedding +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Python extra libraries" >&5 + $as_echo_n "checking Python extra libraries... " >&6; } +if test -z "$PYTHON_EXTRA_LIBS"; then + PYTHON_EXTRA_LIBS=`$PYTHON -c "from sysconfig import get_config_var; \ +print(get_config_var('LIBS'))"` +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_EXTRA_LIBS" >&5 + $as_echo "$PYTHON_EXTRA_LIBS" >&6; } + +# Python linking flags needed when embedding +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Python extra linking flags" >&5 + $as_echo_n "checking Python extra linking flags... " >&6; } +if test -z "$PYTHON_EXTRA_LDFLAGS"; then + PYTHON_EXTRA_LDFLAGS=`$PYTHON -c "from sysconfig import get_config_var; \ +print(get_config_var('LINKFORSHARED'))"` +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $PYTHON_EXTRA_LDFLAGS" >&5 + $as_echo "$PYTHON_EXTRA_LDFLAGS" >&6; } + +# Python final check to see if everything compiles alright +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking consistency of all components of Python development environment" >&5 + $as_echo_n "checking consistency of all components of Python development environment... " >&6; } +# Save current global flags +ac_save_LIBS="$LIBS" +ac_save_LDFLAGS="$LDFLAGS" +ac_save_CPPFLAGS="$CPPFLAGS" +LIBS="$ac_save_LIBS $PYTHON_LIBS $PYTHON_EXTRA_LIBS $PYTHON_EXTRA_LIBS" +LDFLAGS="$ac_save_LDFLAGS $PYTHON_EXTRA_LDFLAGS" +CPPFLAGS="$ac_save_CPPFLAGS $PYTHON_CPPFLAGS" +ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu - cat confdefs.h - <<_ACEOF >conftest.$ac_ext +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ - #include +#include #ifdef F77_DUMMY_MAIN - -# ifdef __cplusplus - extern "C" -# endif - int F77_DUMMY_MAIN() { return 1; } - + #ifdef __cplusplus + extern "C" + #endif + int F77_DUMMY_MAIN() { return 1; } #endif -int -main () +int main() { -Py_Initialize(); - ; + Py_Initialize(); return 0; } _ACEOF -if ac_fn_c_try_link "$LINENO"; then : +if ac_fn_c_try_link "$LINENO"; then pythonexists=yes else pythonexists=no fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - ac_ext=c +rm -f core conftest.err conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext +ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu - # turn back to default flags - CPPFLAGS="$ac_save_CPPFLAGS" - LIBS="$ac_save_LIBS" - LDFLAGS="$ac_save_LDFLAGS" +# Turn back to default flags +CPPFLAGS="$ac_save_CPPFLAGS" +LIBS="$ac_save_LIBS" +LDFLAGS="$ac_save_LDFLAGS" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $pythonexists" >&5 -$as_echo "$pythonexists" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $pythonexists" >&5 + $as_echo "$pythonexists" >&6; } - if test ! "x$pythonexists" = "xyes"; then - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? " - Could not link test program to Python. Maybe the main Python library has been - installed in some non-standard library path. If so, pass it to configure, - via the LIBS environment variable. - Example: ./configure LIBS=\"-L/usr/non-standard-path/python/lib\" - ============================================================================ - ERROR! - You probably have to install the development version of the Python package - for your distribution. The exact name of this package varies among them. - ============================================================================ +if test "x$pythonexists" != "xyes"; then + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 + $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + as_fn_error $? " +Could not link test program to Python. Maybe the main Python library has been +installed in some non-standard library path. If so, pass it to configure, +via the LIBS environment variable. +Example: ./configure LIBS=\"-L/usr/non-standard-path/python/lib\" +============================================================================ + ERROR! + You probably have to install the development version of the Python package + for your distribution. The exact name of this package varies among them. +============================================================================ See \`config.log' for more details" "$LINENO" 5; } - PYTHON_VERSION="" - fi - - # - # all done! - # +fi - CPPFLAGS=$mycppflag - # Save variables... - if test "x$pythonexists" = "xyes"; - then - if test "$fcompiler" = "pgf90"; then - PYTHON_EXTRA_LDFLAGS=${PYTHON_EXTRA_LDFLAGS/-Xlinker/} - PYTHON_EXTRA_LDFLAGS=${PYTHON_EXTRA_LDFLAGS/-export-dynamic/} - fi - if test "x$enable_debugging" = "xyes"; then - export PYTHON_EXTRA_LDFLAGS=$(echo $PYTHON_EXTRA_LDFLAGS | sed -e 's/-O1/-O0/g' -e 's/-O2/-O0/g' -e 's/-O3/-O0/g') - fi - LIBS="$LIBS $PYTHON_LIBS $PYTHON_EXTRA_LDFLAGS $PYTHON_EXTRA_LIBS" - CXXFLAGS="$CXXFLAGS $PYTHON_CPPFLAGS" - CPPFLAGS="$CPPFLAGS $PYTHON_CPPFLAGS" - CFLAGS="$CFLAGS $PYTHON_CPPFLAGS" - fi +# Python final check all done! - # Additionally check for numpy - NUMPY=$($PYTHON -c 'import numpy; print(numpy.get_include())' 2>/dev/null) - if ! test -z "$NUMPY"; then - echo "Numpy is " $NUMPY - CXXFLAGS="$CXXFLAGS -DHAVE_NUMPY -I$NUMPY" - CPPFLAGS="$CPPFLAGS -DHAVE_NUMPY -I$NUMPY" - CFLAGS="$CFLAGS -DHAVE_NUMPY -I$NUMPY" - FFLAGS="$FFLAGS -DHAVE_NUMPY -I$NUMPY" - FCFLAGS="$FCFLAGS -DHAVE_NUMPY -I$NUMPY" - else - echo "Failed to locate Numpy" - fi - # check for the assess python package used in some tests - # (see https://github.com/stephankramer/assess/) - $PYTHON -c "import assess" 2>/dev/null - if test $? -eq 0; - then - HAVE_ASSESS=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: Successfully imported assess python module" >&5 -$as_echo "$as_me: Successfully imported assess python module" >&6;} - else - HAVE_ASSESS=no - { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to import (optional) python module assess" >&5 -$as_echo "$as_me: Failed to import (optional) python module assess" >&6;} - fi +# Save variables +if test "x$pythonexists" = "xyes"; then + if test "$fcompiler" = "pgf90"; then + PYTHON_EXTRA_LDFLAGS=${PYTHON_EXTRA_LDFLAGS/-Xlinker/} + PYTHON_EXTRA_LDFLAGS=${PYTHON_EXTRA_LDFLAGS/-export-dynamic/} + fi + if test "x$enable_debugging" = "xyes"; then + export PYTHON_EXTRA_LDFLAGS=$(echo $PYTHON_EXTRA_LDFLAGS | sed -e 's/-O1/-O0/g' -e 's/-O2/-O0/g' -e 's/-O3/-O0/g') + fi + LIBS="$LIBS $PYTHON_LIBS $PYTHON_EXTRA_LDFLAGS $PYTHON_EXTRA_LIBS" + CXXFLAGS="$CXXFLAGS $PYTHON_CPPFLAGS" + CPPFLAGS="$CPPFLAGS $PYTHON_CPPFLAGS" + CFLAGS="$CFLAGS $PYTHON_CPPFLAGS" +fi +# Additionally check for NumPy +NUMPY=$($PYTHON -c 'import numpy; print(numpy.get_include())' 2>/dev/null) +if ! test -z "$NUMPY"; then + echo "Numpy is " $NUMPY + CXXFLAGS="$CXXFLAGS -DHAVE_NUMPY -I$NUMPY" + CPPFLAGS="$CPPFLAGS -DHAVE_NUMPY -I$NUMPY" + CFLAGS="$CFLAGS -DHAVE_NUMPY -I$NUMPY" + FFLAGS="$FFLAGS -DHAVE_NUMPY -I$NUMPY" + FCFLAGS="$FCFLAGS -DHAVE_NUMPY -I$NUMPY" +else + echo "Failed to locate Numpy" fi +# Check for the assess Python package used in some tests (https://github.com/stephankramer/assess) +$PYTHON -c "import assess" 2>/dev/null +if test $? -eq 0; then + HAVE_ASSESS=yes + { $as_echo "$as_me:${as_lineno-$LINENO}: Successfully imported assess Python module" >&5 + $as_echo "$as_me: Successfully imported assess Python module" >&6;} +else + HAVE_ASSESS=no + { $as_echo "$as_me:${as_lineno-$LINENO}: Failed to import (optional) Python module assess" >&5 + $as_echo "$as_me: Failed to import (optional) Python module assess" >&6;} +fi # Check whether --enable-cgal was given. if test "${enable_cgal+set}" = set; then : @@ -16820,7 +16579,7 @@ $as_echo "$ac_cv_path_GREP" >&6; } -ac_config_files="$ac_config_files Makefile debug/Makefile bathymetry/Makefile ocean_forcing/Makefile ocean_forcing/tests/Makefile sediments/Makefile population_balance/Makefile hyperlight/Makefile femtools/Makefile femtools/tests/Makefile forward_interfaces/Makefile horizontal_adaptivity/Makefile horizontal_adaptivity/tests/Makefile preprocessor/Makefile error_measures/Makefile error_measures/tests/Makefile parameterisation/Makefile parameterisation/tests/Makefile fldecomp/Makefile assemble/Makefile assemble/tests/Makefile diagnostics/Makefile main/Makefile tools/Makefile tools/version-info python/setup.py climatology/Makefile libmba2d/Makefile libmba3d/Makefile libjudy/Makefile libjudy/src/Makefile libjudy/src/JudyCommon/Makefile libjudy/src/Judy1/Makefile libjudy/src/JudyL/Makefile libjudy/src/JudySL/Makefile libjudy/src/JudyHS/Makefile libwm/Makefile libvtkfortran/Makefile tests/tools.mk" +ac_config_files="$ac_config_files Makefile debug/Makefile bathymetry/Makefile ocean_forcing/Makefile ocean_forcing/tests/Makefile sediments/Makefile population_balance/Makefile hyperlight/Makefile femtools/Makefile femtools/tests/Makefile forward_interfaces/Makefile horizontal_adaptivity/Makefile horizontal_adaptivity/tests/Makefile preprocessor/Makefile error_measures/Makefile error_measures/tests/Makefile parameterisation/Makefile parameterisation/tests/Makefile fldecomp/Makefile assemble/Makefile assemble/tests/Makefile diagnostics/Makefile main/Makefile tools/Makefile tools/version-info climatology/Makefile libmba2d/Makefile libmba3d/Makefile libjudy/Makefile libjudy/src/Makefile libjudy/src/JudyCommon/Makefile libjudy/src/Judy1/Makefile libjudy/src/JudyL/Makefile libjudy/src/JudySL/Makefile libjudy/src/JudyHS/Makefile libwm/Makefile libvtkfortran/Makefile tests/tools.mk" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure @@ -17539,7 +17298,6 @@ do "main/Makefile") CONFIG_FILES="$CONFIG_FILES main/Makefile" ;; "tools/Makefile") CONFIG_FILES="$CONFIG_FILES tools/Makefile" ;; "tools/version-info") CONFIG_FILES="$CONFIG_FILES tools/version-info" ;; - "python/setup.py") CONFIG_FILES="$CONFIG_FILES python/setup.py" ;; "climatology/Makefile") CONFIG_FILES="$CONFIG_FILES climatology/Makefile" ;; "libmba2d/Makefile") CONFIG_FILES="$CONFIG_FILES libmba2d/Makefile" ;; "libmba3d/Makefile") CONFIG_FILES="$CONFIG_FILES libmba3d/Makefile" ;; @@ -18138,4 +17896,3 @@ if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi - diff --git a/docker/Dockerfile.bionic b/docker/Dockerfile.bionic index 0aab9ce730..83fbfd4e7f 100644 --- a/docker/Dockerfile.bionic +++ b/docker/Dockerfile.bionic @@ -11,29 +11,21 @@ ENV DEBIAN_FRONTEND=noninteractive # Package updates and installs RUN apt-get update && \ - apt-get -y dist-upgrade && \ - apt-get -y install gnupg dirmngr && \ - echo "deb http://ppa.launchpad.net/fluidity-core/ppa/ubuntu bionic main" > /etc/apt/sources.list.d/fluidity-core-ppa-bionic.list && \ - gpg --keyserver keyserver.ubuntu.com --recv 0D45605A33BAC3BE && \ - gpg --export --armor 33BAC3BE | apt-key add - && \ - apt-get update && \ - echo "Europe/London" > /etc/timezone && \ - apt-get -y install fluidity-dev texlive-pstricks texlive texlive-latex-extra texlive-science && \ - rm -rf /var/cache/apt && \ - rm -rf /var/lib/apt/lists - -RUN apt-get update && \ - apt-get -y install python3-pip && \ - rm -rf /var/cache/apt && \ - rm -rf /var/lib/apt/lists - -RUN python3 -m pip install --upgrade junit-xml + apt-get -y dist-upgrade && \ + apt-get -y install gnupg dirmngr && \ + echo "deb http://ppa.launchpad.net/fluidity-core/ppa/ubuntu bionic main" > /etc/apt/sources.list.d/fluidity-core-ppa-bionic.list && \ + gpg --keyserver keyserver.ubuntu.com --recv 0D45605A33BAC3BE && \ + gpg --export --armor 33BAC3BE | apt-key add - && \ + apt-get update && \ + echo "Europe/London" > /etc/timezone && \ + apt-get -y install fluidity-dev texlive-pstricks texlive texlive-latex-extra texlive-science && \ + rm -rf /var/cache/apt && \ + rm -rf /var/lib/apt/lists WORKDIR /usr/local -RUN curl -fsL https://gmsh.info/bin/Linux/gmsh-4.8.3-Linux64.tgz | tar --strip-components=1 -zxf - +RUN curl -fsL https://gmsh.info/bin/Linux/gmsh-4.9.3-Linux64.tgz | tar --strip-components=1 -zxf - ENV PETSC_DIR /usr/lib/petscdir/3.8.3 -ENV LD_LIBRARY_PATH /usr/lib/petscdir/3.8.3/linux-gnu-c-opt/lib ENV LDFLAGS -L/usr/lib/x86_64-linux-gnu/hdf5/openmpi ENV CPPFLAGS -I/usr/include/hdf5/openmpi ENV OMPI_MCA_btl_vader_single_copy_mechanism none diff --git a/docker/Dockerfile.focal b/docker/Dockerfile.focal index 972243e105..f493a822b5 100644 --- a/docker/Dockerfile.focal +++ b/docker/Dockerfile.focal @@ -11,21 +11,19 @@ ENV DEBIAN_FRONTEND=noninteractive # Package updates and installs RUN apt-get update && \ - apt-get -y dist-upgrade && \ - apt-get -y install gnupg dirmngr && \ - echo "deb http://ppa.launchpad.net/fluidity-core/ppa/ubuntu focal main" > /etc/apt/sources.list.d/fluidity-core-ppa-focal.list && \ - gpg --keyserver keyserver.ubuntu.com --recv 0D45605A33BAC3BE && \ - gpg --export --armor 33BAC3BE | apt-key add - && \ - apt-get update && \ - echo "Europe/London" > /etc/timezone && \ - apt-get -y install fluidity-dev texlive-pstricks texlive texlive-latex-extra texlive-science python3-pip && \ - rm -rf /var/cache/apt && \ - rm -rf /var/lib/apt/lists - -RUN python3 -m pip install --upgrade junit-xml + apt-get -y dist-upgrade && \ + apt-get -y install gnupg dirmngr && \ + echo "deb http://ppa.launchpad.net/fluidity-core/ppa/ubuntu focal main" > /etc/apt/sources.list.d/fluidity-core-ppa-focal.list && \ + gpg --keyserver keyserver.ubuntu.com --recv 0D45605A33BAC3BE && \ + gpg --export --armor 33BAC3BE | apt-key add - && \ + apt-get update && \ + echo "Europe/London" > /etc/timezone && \ + apt-get -y install fluidity-dev texlive-pstricks texlive texlive-latex-extra texlive-science && \ + rm -rf /var/cache/apt && \ + rm -rf /var/lib/apt/lists WORKDIR /usr/local -RUN curl -fsL https://gmsh.info/bin/Linux/gmsh-4.8.3-Linux64.tgz | tar --strip-components=1 -zxf - +RUN curl -fsL https://gmsh.info/bin/Linux/gmsh-4.9.3-Linux64.tgz | tar --strip-components=1 -zxf - ENV PETSC_DIR /usr/lib/petscdir/3.12 ENV LDFLAGS -L/usr/lib/x86_64-linux-gnu/hdf5/openmpi diff --git a/docker/Dockerfile.groovy b/docker/Dockerfile.groovy deleted file mode 100644 index 66104eb69f..0000000000 --- a/docker/Dockerfile.groovy +++ /dev/null @@ -1,41 +0,0 @@ -# DockerFile for a Fludity development container - -# Use a Groovy base image -FROM ubuntu:groovy - -# This DockerFile is looked after by -MAINTAINER Tim Greaves - -# Installs shouldn't expect input -ENV DEBIAN_FRONTEND=noninteractive - -# Package updates and installs -RUN apt-get update && \ - apt-get -y dist-upgrade && \ - apt-get -y install gnupg dirmngr && \ - echo "deb http://ppa.launchpad.net/fluidity-core/ppa/ubuntu groovy main" > /etc/apt/sources.list.d/fluidity-core-ppa-groovy.list && \ - gpg --keyserver keyserver.ubuntu.com --recv 0D45605A33BAC3BE && \ - gpg --export --armor 33BAC3BE | apt-key add - && \ - apt-get update && \ - echo "Europe/London" > /etc/timezone && \ - apt-get -y install fluidity-dev texlive-pstricks texlive texlive-latex-extra texlive-science && \ - rm -rf /var/cache/apt && \ - rm -rf /var/lib/apt/lists - -ENV PETSC_DIR /usr/lib/petscdir/3.13 -ENV LDFLAGS -L/usr/lib/x86_64-linux-gnu/hdf5/openmpi -ENV CPPFLAGS -I/usr/include/hdf5/openmpi -ENV OMPI_MCA_btl_vader_single_copy_mechanism none -ENV OMPI_MCA_rmaps_base_oversubscribe 1 - -WORKDIR /usr/local -RUN curl -fsL https://gmsh.info/bin/Linux/gmsh-4.8.3-Linux64.tgz | tar --strip-components=1 -zxf - - -# Add a Fluidity user who will be the default user for this container -# Make sure the user has a userid matching the host system -# -- pass this as an argument at build time -ARG userid=1000 -RUN adduser --disabled-password --gecos "" -u $userid fluidity - -USER fluidity -WORKDIR /home/fluidity diff --git a/docker/actions/Dockerfile.actions.bionic b/docker/actions/Dockerfile.actions.bionic index 548023727c..0a9175e906 100644 --- a/docker/actions/Dockerfile.actions.bionic +++ b/docker/actions/Dockerfile.actions.bionic @@ -3,19 +3,18 @@ FROM fluidity/baseimages:bionic USER root RUN apt-get -y update && \ - apt-get -y dist-upgrade && \ - apt-get -y install sudo && \ - rm -rf /var/cache/apt/archives && \ - rm -rf /var/lib/apt/lists - -RUN adduser fluidity sudo -RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers + apt-get -y dist-upgrade && \ + apt-get -y install python3-pip python3-venv && \ + rm -rf /var/cache/apt/archives && \ + rm -rf /var/lib/apt/lists COPY . /home/fluidity RUN chown -R fluidity /home/fluidity USER fluidity +RUN python3 -m pip install build junit-xml + RUN ./configure --enable-2d-adaptivity RUN make makefiles RUN test -z "$(git status --porcelain */Makefile.dependencies)" diff --git a/docker/actions/Dockerfile.actions.focal b/docker/actions/Dockerfile.actions.focal index ba99070db6..bd6d366275 100644 --- a/docker/actions/Dockerfile.actions.focal +++ b/docker/actions/Dockerfile.actions.focal @@ -3,20 +3,19 @@ FROM fluidity/baseimages:focal USER root RUN apt-get -y update && \ - apt-get -y dist-upgrade && \ - apt-get -y install sudo && \ - rm -rf /var/cache/apt/archives && \ - rm -rf /var/lib/apt/lists - -RUN adduser fluidity sudo -RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers + apt-get -y dist-upgrade && \ + apt-get -y install python3-pip python3-venv && \ + rm -rf /var/cache/apt/archives && \ + rm -rf /var/lib/apt/lists COPY . /home/fluidity RUN chown -R fluidity /home/fluidity USER fluidity -ENV FCFLAGS="-I/usr/include" +RUN python3 -m pip install assess build junit-xml + +ENV FCFLAGS -I/usr/include RUN ./configure --enable-2d-adaptivity RUN make makefiles @@ -24,6 +23,3 @@ RUN test -z "$(git status --porcelain */Makefile.dependencies)" RUN make RUN make fltools RUN make manual - -# Python module 'assess' is required for some longtests -RUN python3 -m pip install assess diff --git a/docker/actions/Dockerfile.actions.groovy b/docker/actions/Dockerfile.actions.groovy deleted file mode 100644 index ada1683115..0000000000 --- a/docker/actions/Dockerfile.actions.groovy +++ /dev/null @@ -1,26 +0,0 @@ -FROM fluidity/baseimages:groovy - -USER root - -RUN apt-get -y update && \ - apt-get -y dist-upgrade && \ - apt-get -y install sudo && \ - rm -rf /var/cache/apt/archives && \ - rm -rf /var/lib/apt/lists - -RUN adduser fluidity sudo -RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers - -COPY . /home/fluidity -RUN chown -R fluidity /home/fluidity - -USER fluidity - -ENV FCFLAGS="-I/usr/include" - -RUN ./configure --enable-2d-adaptivity -RUN make makefiles -RUN test -z "$(git status --porcelain */Makefile.dependencies)" -RUN make -RUN make fltools -RUN make manual diff --git a/libspud/diamond/setup.py.in b/libspud/diamond/setup.py.in index ff24e3ae79..714f31c177 100644 --- a/libspud/diamond/setup.py.in +++ b/libspud/diamond/setup.py.in @@ -1,20 +1,19 @@ -from distutils.core import setup -from distutils.extension import Extension -import os -import os.path -import glob +from glob import glob +from os import listdir +from os.path import isdir, join +from setuptools import setup +from sys import argv, platform # There are a number of local hacks in this file, to deal with the multiple -# ways in which setup.py is called by various scripts and packaging methods +# ways in which setup.py is called by various scripts and packaging methods # that interact with spud, enabling setuptools to grok their intentions. # In some cases, we will be passed a 'DESTDIR' from an upstream packagaing # system. This will be a local directory to install into, and act as local '/' -# as far as all paths are concerned. Check for this, and fail nicely if not set. +# as far as all paths are concerned. Check for this and fail nicely if not set. prefix = None -import sys -packaging=False +packaging = False # We may also be given prefix, either as a configuration option (which will be # dealt with by substitutions later) or as a command line option. If a command @@ -27,9 +26,9 @@ packaging=False # parsed if present, and supercedes any previous DESTDIR picked up from # environment. -for i, arg in enumerate(sys.argv): - if "--prefix" in arg: - prefix = arg.split('=')[1] +for i, arg in enumerate(argv): + if "--prefix" in arg: + prefix = arg.split('=')[1] # Given the above prefix possibilities, as well as root and DESTDIR, we need to # construct a list of data directories to be installed @@ -43,45 +42,40 @@ for i, arg in enumerate(sys.argv): # on the command line in preference to the configure prefix. # First parse the plugin directories -plugin_dirs = [dir for dir in os.listdir('plugins') if os.path.isdir(os.path.join('plugins', dir)) and dir[0] != '.'] +plugin_dirs = [dir for dir in listdir('plugins') + if isdir(join('plugins', dir)) and dir[0] != '.'] plugin_data_files = [] -if sys.platform == 'darwin' and packaging: - for plugin in plugin_dirs: - plugin_data_files.append(("./plugins/" + plugin, - glob.glob('plugins/' + plugin + '/*.py'))) +if platform == 'darwin' and packaging: + for plugin in plugin_dirs: + plugin_data_files.append(("./plugins/" + plugin, + glob('plugins/' + plugin + '/*.py'))) else: - for plugin in plugin_dirs: - if prefix is None: - plugin_data_files.append(("@prefix@/share/diamond/plugins/" + plugin, - glob.glob('plugins/' + plugin + '/*.py'))) - else: - plugin_data_files.append((prefix + "/share/diamond/plugins/" + plugin, - glob.glob('plugins/' + plugin + '/*.py'))) + for plugin in plugin_dirs: + if prefix is None: + plugin_data_files.append( + ("@prefix@/share/diamond/plugins/" + plugin, + glob('plugins/' + plugin + '/*.py'))) + else: + plugin_data_files.append( + (prefix + "/share/diamond/plugins/" + plugin, + glob('plugins/' + plugin + '/*.py'))) # Now parse the GUI directories gui_data_files = [] -if sys.platform == 'darwin' and packaging : - gui_data_files.append(("./gui", - ["gui/gui.ui", "gui/diamond.svg", "gui/diamond.png"])) +if platform == 'darwin' and packaging: + gui_data_files.append( + ("./gui", ["gui/gui.ui", "gui/diamond.svg", "gui/diamond.png"])) else: - if prefix is None: - gui_data_files.append(("@prefix@/share/diamond/gui", - ["gui/gui.ui", "gui/diamond.svg"])) - else: - gui_data_files.append((prefix + "/share/diamond/gui", - ["gui/gui.ui", "gui/diamond.svg"])) + if prefix is None: + gui_data_files.append(("@prefix@/share/diamond/gui", + ["gui/gui.ui", "gui/diamond.svg"])) + else: + gui_data_files.append((prefix + "/share/diamond/gui", + ["gui/gui.ui", "gui/diamond.svg"])) # We now have all the information we need; run setup. -setup( - name='diamond', - version='1.0', - description="Fluidity preprocessor", - author = "The ICOM team", - author_email = "patrick.farrell@imperial.ac.uk", - url = "http://amcg.ese.ic.ac.uk", - packages = ['diamond'], - package_dir = {'diamond': 'diamond'}, - scripts=["bin/diamond"], - data_files = gui_data_files + plugin_data_files - ) - +setup(name='diamond', version='1.0', description="Fluidity preprocessor", + author="The ICOM team", author_email="patrick.farrell@imperial.ac.uk", + url="http://amcg.ese.ic.ac.uk", packages=['diamond'], + package_dir={'diamond': 'diamond'}, scripts=["bin/diamond"], + data_files=gui_data_files + plugin_data_files) diff --git a/libspud/dxdiff/setup.py b/libspud/dxdiff/setup.py index d60d901332..5178930056 100644 --- a/libspud/dxdiff/setup.py +++ b/libspud/dxdiff/setup.py @@ -1,21 +1,6 @@ -from distutils.core import setup -import os -import os.path -import glob - -try: - destdir = os.environ["DESTDIR"] -except KeyError: - destdir = "" - -setup( - name='dxdiff', - version='1.0', - description="An XML aware diff tool.", - author = "The ICOM team", - author_email = "fraser.waters08@imperial.ac.uk", - url = "http://amcg.ese.ic.ac.uk", - packages = ['dxdiff'], - scripts=["dxdiff/dxdiff"], - ) +from setuptools import setup +setup(name='dxdiff', version='1.0', description="An XML aware diff tool", + author="The ICOM team", author_email="fraser.waters08@imperial.ac.uk", + url="http://amcg.ese.ic.ac.uk", packages=['dxdiff'], + scripts=["dxdiff/dxdiff"]) diff --git a/libspud/python/libspud.c b/libspud/python/libspud.c index 39e02caf80..1d1a6beb9c 100644 --- a/libspud/python/libspud.c +++ b/libspud/python/libspud.c @@ -11,7 +11,7 @@ #define PyString_Type PyUnicode_Type #define PyString_AsString PyUnicode_AsUTF8 #define PyString_Check PyUnicode_Check -#define PyString_GET_SIZE PyUnicode_GET_SIZE +#define PyString_GET_SIZE PyUnicode_GET_LENGTH #endif static PyObject *SpudError; @@ -360,7 +360,7 @@ spud_get_option_aux_scalar_or_string(const char *key, int key_len, int type, int int i; for (i = 0; i < size+1; i++) val[i] = '\0'; - + outcomeGetOption = spud_get_option(key, key_len, val); if (error_checking(outcomeGetOption, "get option aux scalar or string") == NULL){ return NULL; @@ -478,7 +478,7 @@ libspud_get_option(PyObject *self, PyObject *args) if (error_checking(outcomeGetOptionShape, "get option") == NULL){ return NULL; } - + if (rank == -1){ // type error char errormessage [MAXLENGTH]; snprintf(errormessage, MAXLENGTH, "Error: The specified option has a different \ @@ -506,7 +506,7 @@ libspud_get_option(PyObject *self, PyObject *args) } else if (type == SPUD_INT){ //a tensor of ints return spud_get_option_aux_tensor_ints(key, key_len, type, rank, shape); - } + } } PyErr_SetString(SpudError,"Error: Get option failed."); @@ -562,7 +562,7 @@ set_option_aux_list_doubles(PyObject *pylist, const char *key, int key_len, int static PyObject* set_option_aux_string(PyObject *pystring, const char *key, int key_len, int type, int rank, int *shape) { // this function is for setting option when the second argument is of type string - char *val = PyString_AsString(pystring); + const char *val = PyString_AsString(pystring); int outcomeSetOption = spud_set_option(key, key_len, val, type, rank, shape); return error_checking(outcomeSetOption, "set option aux string"); } @@ -613,10 +613,10 @@ set_option_aux_tensor_doubles(PyObject *pylist, const char *key, int key_len, in int outcomeSetOption; int size = shape[0]*shape[1]; - + double element; double val [size]; - + for (i = 0; i < shape[0]; i++){ PyObject* pysublist = PyList_GetItem(pylist, i); for (j = 0; j < shape[1]; j++){ @@ -687,7 +687,7 @@ libspud_set_option(PyObject *self, PyObject *args) int shape[2]; PyObject* firstArg; PyObject* secondArg; - + if(PyTuple_GET_SIZE(args)!=2){ PyErr_SetString(SpudError,"Error: set_option takes exactly 2 arguments."); return NULL; @@ -697,19 +697,19 @@ libspud_set_option(PyObject *self, PyObject *args) secondArg = PyTuple_GetItem(args, 1); PyArg_Parse(firstArg, "s", &key); key_len = strlen(key); - + if (!spud_have_option(key, key_len)){ //option does not exist yet int outcomeAddOption = spud_add_option(key, key_len); error_checking(outcomeAddOption, "set option"); - } - + } + if (PyInt_Check(secondArg)){ //just an int type = SPUD_INT; rank = 0; shape[0] = -1; shape[1] = -1; - - } + + } else if (PyString_Check(secondArg)){// a string type = SPUD_STRING; rank = 1; @@ -745,13 +745,13 @@ libspud_set_option(PyObject *self, PyObject *args) } else if (PyFloat_Check(sublistElement)){//list of lists of doubles type = SPUD_DOUBLE; - } + } rank = 2; shape[0] = pylistSize; shape[1] = pysublistSize; } } - + if (rank == 0){ // scalar set_option_aux_scalar(secondArg, key, key_len, type, rank, shape); } @@ -761,10 +761,10 @@ libspud_set_option(PyObject *self, PyObject *args) } else if (type == SPUD_INT) { // list of ints set_option_aux_list_ints(secondArg, key, key_len, type, rank, shape); - } + } else if (type == SPUD_DOUBLE){ // list of doubles set_option_aux_list_doubles(secondArg, key, key_len, type, rank, shape); - } + } } else if (rank == 2){ // tensor if (type == SPUD_DOUBLE) { // tensor of doubles @@ -907,5 +907,3 @@ initlibspud(void) #endif } - - diff --git a/libspud/python/setup.py b/libspud/python/setup.py index da84618c8b..f064169864 100644 --- a/libspud/python/setup.py +++ b/libspud/python/setup.py @@ -1,9 +1,9 @@ -from distutils.core import setup, Extension -import os.path +from os.path import abspath +from setuptools import setup, Extension -module1 = Extension('libspud', sources = ['libspud.c'], libraries=["spud"], library_dirs=[os.path.abspath("..")], include_dirs=[os.path.abspath("../include")]) - -setup (name = 'libspud', - version = '1.1.3', - description = 'Python bindings for libspud', - ext_modules = [module1]) +setup(name='libspud', version='1.1.3', + description='Python bindings for libspud', + ext_modules=[Extension('libspud', sources=['libspud.c'], + libraries=["spud"], + library_dirs=[abspath("..")], + include_dirs=[abspath("../include")])]) diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 0000000000..9787c3bdf0 --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/python/setup.cfg b/python/setup.cfg new file mode 100644 index 0000000000..56dc4a587d --- /dev/null +++ b/python/setup.cfg @@ -0,0 +1,11 @@ +[metadata] +name = fluidity +version = 0.1 +description = Fluidity python files +author = The ICOM team +author_email = patrick.farrell06@imperial.ac.uk +url = http://amcg.ese.ic.ac.uk + +[options] +packages = fluidity, fluidity.diagnostics +py_modules = fluidity_tools, GFD_basisChange_tools, vtktools diff --git a/python/setup.py.in b/python/setup.py.in deleted file mode 100644 index 6e52e837c3..0000000000 --- a/python/setup.py.in +++ /dev/null @@ -1,23 +0,0 @@ -from distutils.core import setup -from distutils.extension import Extension -import os -import os.path - -try: - destdir = os.environ["DESTDIR"] -except KeyError: - destdir = "" - -setup( - name='fluidity', - version='0.1', - description="Fluidity python files", - author = "The ICOM team", - author_email = "patrick.farrell06@imperial.ac.uk", - url = "http://amcg.ese.ic.ac.uk", - packages = ['fluidity', 'fluidity.diagnostics'], - package_dir = {'fluidity': 'fluidity'}, - py_modules = ['fluidity_tools', 'vtktools'] - ) - - diff --git a/tools/Fladapt_main.cpp b/tools/Fladapt_main.cpp index c7d726494a..8670f88579 100644 --- a/tools/Fladapt_main.cpp +++ b/tools/Fladapt_main.cpp @@ -1,5 +1,5 @@ /* Copyright (C) 2006 Imperial College London and others. - + Please see the AUTHORS file in the main source directory for a full list of copyright holders. @@ -9,7 +9,7 @@ Imperial College London amcgsoftware@imperial.ac.uk - + This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, @@ -29,6 +29,7 @@ #include #include #include +#include #include "confdefs.h" @@ -77,7 +78,7 @@ int main(int argc, char** argv){ // Modified version of flredecomp argument parsing // Get any command line arguments // Reset optarg so we can detect changes - optarg = NULL; + optarg = NULL; char c; map args; while((c = getopt(argc, argv, "hv")) != -1){ @@ -103,14 +104,14 @@ int main(int argc, char** argv){ Usage(); exit(0); } - + // Verbosity int verbosity = 0; if(args.count('v') > 0){ verbosity = 3; } set_global_debug_level_fc(&verbosity); - + // Input and output base names string input_basename, output_basename; if(argc > optind + 2){ @@ -123,7 +124,7 @@ int main(int argc, char** argv){ Usage(); exit(-1); } - + size_t input_basename_len = input_basename.size(); size_t output_basename_len = output_basename.size(); @@ -134,11 +135,11 @@ int main(int argc, char** argv){ // Finalize the Python Interpreter python_end_(); #endif - + #ifdef HAVE_PETSC PetscFinalize(); #endif - + #ifdef HAVE_MPI MPI_Finalize(); #endif diff --git a/tools/Streamfunction_2D_main.cpp b/tools/Streamfunction_2D_main.cpp index c1f256bb5c..1792846cce 100644 --- a/tools/Streamfunction_2D_main.cpp +++ b/tools/Streamfunction_2D_main.cpp @@ -1,5 +1,5 @@ /* Copyright (C) 2006 Imperial College London and others. - + Please see the AUTHORS file in the main source directory for a full list of copyright holders. @@ -9,7 +9,7 @@ Imperial College London amcgsoftware@imperial.ac.uk - + This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, @@ -29,6 +29,7 @@ #include #include #include +#include #include "confdefs.h" @@ -78,7 +79,7 @@ int main(int argc, char** argv){ // Modified version of flredecomp argument parsing // Get any command line arguments // Reset optarg so we can detect changes - optarg = NULL; + optarg = NULL; char c; map args; while((c = getopt(argc, argv, "hv")) != -1){ @@ -104,14 +105,14 @@ int main(int argc, char** argv){ Usage(); exit(0); } - + // Verbosity int verbosity = 0; if(args.count('v') > 0){ verbosity = 3; } set_global_debug_level_fc(&verbosity); - + // Input and output base names string input_basename, output_basename; if(argc > optind + 2){ @@ -124,7 +125,7 @@ int main(int argc, char** argv){ Usage(); exit(-1); } - + size_t input_basename_len = input_basename.size(); size_t output_basename_len = output_basename.size(); streamfunction_2d(input_basename.c_str(), input_basename_len, @@ -134,11 +135,11 @@ int main(int argc, char** argv){ // Finalize the Python Interpreter python_end_(); #endif - + #ifdef HAVE_PETSC PetscFinalize(); #endif - + #ifdef HAVE_MPI MPI_Finalize(); #endif diff --git a/tools/Vertical_Integration_main.cpp b/tools/Vertical_Integration_main.cpp index 57f7eeb38c..e82a587c9c 100644 --- a/tools/Vertical_Integration_main.cpp +++ b/tools/Vertical_Integration_main.cpp @@ -1,5 +1,5 @@ /* Copyright (C) 2006 Imperial College London and others. - + Please see the AUTHORS file in the main source directory for a full list of copyright holders. @@ -9,7 +9,7 @@ Imperial College London amcgsoftware@imperial.ac.uk - + This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, @@ -29,6 +29,7 @@ #include #include #include +#include #include "confdefs.h" @@ -88,7 +89,7 @@ int main(int argc, char** argv){ // Modified version of flredecomp argument parsing // Get any command line arguments // Reset optarg so we can detect changes - optarg = NULL; + optarg = NULL; char c; map args; while((c = getopt(argc, argv, "b:dhp:s:t:v")) != -1){ @@ -114,14 +115,14 @@ int main(int argc, char** argv){ Usage(); exit(0); } - + // Verbosity int verbosity = 0; if(args.count('v') > 0){ verbosity = 3; } set_global_debug_level_fc(&verbosity); - + // Options double bottom, sizing, top = 0.0; if(args.count('b') > 0){ @@ -137,7 +138,7 @@ int main(int argc, char** argv){ cerr << "Sizing required" << endl; Usage(); exit(-1); - } + } if(args.count('t') > 0){ top = atof(args['t'].c_str()); } @@ -146,7 +147,7 @@ int main(int argc, char** argv){ result_degree = atoi(args['p'].c_str()); } int result_continuity = args.count('d') > 0 ? -1 : (result_degree == 0 ? -1 : 0); - + // Input / output string target_basename, integrated_filename, integrated_fieldname, output_basename; if(argc > optind + 3){ @@ -161,7 +162,7 @@ int main(int argc, char** argv){ Usage(); exit(-1); } - + size_t target_basename_len = target_basename.size(); size_t integrated_filename_len = integrated_filename.size(); size_t output_basename_len = output_basename.size(); @@ -174,11 +175,11 @@ int main(int argc, char** argv){ // Finalize the Python Interpreter python_end_(); #endif - + #ifdef HAVE_PETSC PetscFinalize(); #endif - + #ifdef HAVE_MPI MPI_Finalize(); #endif diff --git a/tools/petsc_readnsolve_main.cpp b/tools/petsc_readnsolve_main.cpp index 34455d7b7d..f41d635ed6 100644 --- a/tools/petsc_readnsolve_main.cpp +++ b/tools/petsc_readnsolve_main.cpp @@ -3,6 +3,7 @@ #include #include +#include #include #include #include @@ -34,12 +35,12 @@ void usage(int argc, char **argv){ // access those, and we can't access the command-line from fortran // We read those here and stick them in the PETSc options database to be // read from fortran - + char flml_extension[]=".flml"; char *flml_file=NULL; PetscErrorCode ierr; PetscBool flg; - + // if it's already specified as a PETSc option, we do nothing: ierr = PetscOptionsHasName(NULL, "prns_","-flml",&flg); if (flg) { @@ -65,7 +66,7 @@ void usage(int argc, char **argv){ } ierr = PetscOptionsInsertString(NULL, my_PETSc_options.c_str() ); } - + // -l option needs to be dealt with in c++ already ierr = PetscOptionsHasName(NULL, "","-l",&flg); if (flg) { @@ -112,16 +113,16 @@ int main(int argc, char **argv){ PetscErrorCode ierr = PetscInitialize(&argc, &argv, NULL, help); // PetscInitializeFortran needs to be called when initialising PETSc from C, but calling it from Fortran ierr = PetscInitializeFortran(); - + usage(argc, argv); - + #ifdef HAVE_PYTHON // Initialize the Python Interpreter python_init_(); #endif - + petsc_readnsolve_(); - + #ifdef HAVE_PYTHON // Finalize the Python Interpreter python_end_(); @@ -131,10 +132,10 @@ int main(int argc, char **argv){ #ifdef HAVE_MPI MPI_Finalize(); #endif - + return 0; #else - cerr << "ERROR: Not configured with PETSc, so petsc_readnsolve is not gonna work!" << endl; + cerr << "ERROR: Not configured with PETSc, so petsc_readnsolve is not gonna work!" << endl; return 1; #endif diff --git a/tools/test_pressure_solve_main.cpp b/tools/test_pressure_solve_main.cpp index 2fc77b1e4f..ad13992e1f 100644 --- a/tools/test_pressure_solve_main.cpp +++ b/tools/test_pressure_solve_main.cpp @@ -3,6 +3,7 @@ #include #include +#include #include #include #include @@ -46,7 +47,7 @@ int main(int argc, char **argv){ PetscErrorCode ierr = PetscInitialize(&argc, &argv, NULL, help); // PetscInitializeFortran needs to be called when initialising PETSc from C, but calling it from Fortran ierr = PetscInitializeFortran(); - + test_pressure_solve_(); PetscFinalize(); #ifdef HAVE_PYTHON @@ -55,7 +56,7 @@ int main(int argc, char **argv){ #endif return 0; #else - cerr << "ERROR: Not configured with PETSc, so test_pressure_solve is not gonna work!" << endl; + cerr << "ERROR: Not configured with PETSc, so test_pressure_solve is not gonna work!" << endl; return 1; #endif diff --git a/tools/testharness.py b/tools/testharness.py index 89d1ef5359..f57a78abfa 100755 --- a/tools/testharness.py +++ b/tools/testharness.py @@ -8,45 +8,49 @@ from io import StringIO try: - import fluidity.regressiontest as regressiontest + import fluidity.regressiontest as regressiontest except ImportError: - # try again by adding the path "../python" relative to testharness' own location to sys.path - head,tail = os.path.split(sys.argv[0]) - python_path = os.path.abspath(os.path.join(head,'..','python')) - sys.path.append(python_path) - import fluidity.regressiontest as regressiontest + # try again by adding the path "../python" relative to testharness' own location to sys.path + head, tail = os.path.split(sys.argv[0]) + python_path = os.path.abspath(os.path.join(head, '..', 'python')) + sys.path.append(python_path) + import fluidity.regressiontest as regressiontest import traceback import multiprocessing try: - import Queue + import Queue except ImportError: - import queue as Queue + import queue as Queue import xml.parsers.expat import string try: - from junit_xml import TestSuite, TestCase + from junit_xml import TestSuite, TestCase except ImportError: - class TestSuite(object): - def __init__(self, name, test_cases): - self.test_cases=test_cases - def to_file(self,*args): - print("cannot generate xml report without junit_xml module.") - class TestCase(object): - def __init__(self,*args,**kwargs): + class TestSuite(object): + def __init__(self, name, test_cases): + self.test_cases = test_cases + + def to_file(self, *args): + print("cannot generate xml report without junit_xml module.") + + class TestCase(object): + def __init__(self, *args, **kwargs): pass - def add_failure_info(self,*args,**kwargs): + + def add_failure_info(self, *args, **kwargs): pass # make sure we use the correct version of regressiontest -sys.path.insert(0, os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]), os.pardir, "python")) -import fluidity.regressiontest as regressiontest +sys.path.insert(0, os.path.join( + os.getcwd(), os.path.dirname(sys.argv[0]), os.pardir, "python")) try: - import xml.etree.ElementTree as etree + import xml.etree.ElementTree as etree except ImportError: - import elementtree.ElementTree as etree + import elementtree.ElementTree as etree + class TestHarness: def __init__(self, length="any", parallel="any", exclude_tags=None, @@ -65,239 +69,245 @@ def __init__(self, length="any", parallel="any", exclude_tags=None, self.justtest = justtest self.valgrind = valgrind self.genpbs = genpbs - self.xml_parser=TestSuite('TestHarness',[]) - self.cwd=os.getcwd() + self.xml_parser = TestSuite('TestHarness', []) + self.cwd = os.getcwd() self.iolock = multiprocessing.Lock() - self.xml_outfile=xml_outfile - self.exit_fails=exit_fails + self.xml_outfile = xml_outfile + self.exit_fails = exit_fails fluidity_command = self.decide_fluidity_command() if file == "": - print("Test criteria:") - print("-" * 80) - print("length: ", length) - print("parallel: ", parallel) - print("tags to include: ", tags) - print("tags to exclude: ", exclude_tags) - print("-" * 80) - print() + print("Test criteria:") + print("-" * 80) + print("length: ", length) + print("parallel: ", parallel) + print("tags to include: ", tags) + print("tags to exclude: ", exclude_tags) + print("-" * 80) + print() # step 1. form a list of all the xml files to be considered. xml_files = [] - rootdir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), os.pardir)) + rootdir = os.path.abspath(os.path.join( + os.path.dirname(sys.argv[0]), os.pardir)) dirnames = [] testpaths = ["examples", "tests", "longtests"] for directory in testpaths: - if os.path.exists(os.path.join(rootdir, directory)): - dirnames.append(directory) - testdirs = [ os.path.join( rootdir, x ) for x in dirnames ] + if os.path.exists(os.path.join(rootdir, directory)): + dirnames.append(directory) + testdirs = [os.path.join(rootdir, x) for x in dirnames] for directory in testdirs: - subdirs = [ os.path.join(directory, x) for x in os.listdir(directory)] - for subdir in subdirs: - g = glob.glob1(subdir, "*.xml") - for xml_file in g: - try: - p = etree.parse(os.path.join(subdir, xml_file)) - x = p.getroot() - if x.tag == "testproblem": - xml_files.append(os.path.join(subdir, xml_file)) - except xml.parsers.expat.ExpatError: - print(("Warning: %s mal-formed" % xml_file)) - traceback.print_exc() + subdirs = [os.path.join(directory, x) + for x in os.listdir(directory)] + for subdir in subdirs: + g = glob.glob1(subdir, "*.xml") + for xml_file in g: + try: + p = etree.parse(os.path.join(subdir, xml_file)) + x = p.getroot() + if x.tag == "testproblem": + xml_files.append(os.path.join(subdir, xml_file)) + except xml.parsers.expat.ExpatError: + print(("Warning: %s mal-formed" % xml_file)) + traceback.print_exc() # step 2. if the user has specified a particular file, let's use that. if file != "": - files = [file] + files = [file] elif from_file: - try: - f = open(from_file, 'r') - files = [line[:-1] for line in f.readlines()] - except IOError as e: - sys.stderr.write("Unable to read tests from file %s: %s" % (from_file, e)) - sys.exit(1) - f.close() + try: + f = open(from_file, 'r') + files = [line[:-1] for line in f.readlines()] + except IOError as e: + sys.stderr.write( + "Unable to read tests from file %s: %s" % (from_file, e)) + sys.exit(1) + f.close() else: - files = None + files = None if files: - for (subdir, xml_file) in [os.path.split(x) for x in xml_files]: - temp_files=files - for file in temp_files: - if xml_file == file: - p = etree.parse(os.path.join(subdir,xml_file)) - prob_defn = p.findall("problem_definition")[0] - prob_nprocs = int(prob_defn.attrib["nprocs"]) - testprob = regressiontest.TestProblem(filename=os.path.join(subdir, xml_file), - verbose=self.verbose, replace=self.modify_command_line(prob_nprocs), genpbs=genpbs) - self.tests.append((subdir, testprob)) - files.remove(xml_file) - if files != []: - print("Could not find the following specified test files:") - for f in files: - print(f) - sys.exit(1) - return + for (subdir, xml_file) in [os.path.split(x) for x in xml_files]: + temp_files = files + for file in temp_files: + if xml_file == file: + p = etree.parse(os.path.join(subdir, xml_file)) + prob_defn = p.findall("problem_definition")[0] + prob_nprocs = int(prob_defn.attrib["nprocs"]) + testprob = regressiontest.TestProblem(filename=os.path.join(subdir, xml_file), + verbose=self.verbose, replace=self.modify_command_line(prob_nprocs), genpbs=genpbs) + self.tests.append((subdir, testprob)) + files.remove(xml_file) + if files != []: + print("Could not find the following specified test files:") + for f in files: + print(f) + sys.exit(1) + return # step 3. form a cut-down list of the xml files matching the correct length and the correct parallelism. working_set = [] for xml_file in xml_files: - p = etree.parse(xml_file) - prob_defn = p.findall("problem_definition")[0] - prob_length = prob_defn.attrib["length"] - prob_nprocs = int(prob_defn.attrib["nprocs"]) - if prob_length == length or (length == "any" and prob_length not in ["special", "long"]): - if self.parallel == "parallel": - if prob_nprocs > 1: - working_set.append(xml_file) - elif self.parallel == "serial": - if prob_nprocs == 1: - working_set.append(xml_file) - elif self.parallel == "any": - working_set.append(xml_file) - + p = etree.parse(xml_file) + prob_defn = p.findall("problem_definition")[0] + prob_length = prob_defn.attrib["length"] + prob_nprocs = int(prob_defn.attrib["nprocs"]) + if prob_length == length or (length == "any" and prob_length not in ["special", "long"]): + if self.parallel == "parallel": + if prob_nprocs > 1: + working_set.append(xml_file) + elif self.parallel == "serial": + if prob_nprocs == 1: + working_set.append(xml_file) + elif self.parallel == "any": + working_set.append(xml_file) + def get_xml_file_tags(xml_file): - p = etree.parse(xml_file) - p_tags = p.findall("tags") - if len(p_tags) > 0 and not p_tags[0].text is None: - xml_tags = p_tags[0].text.split() - else: - xml_tags = [] - - return xml_tags - + p = etree.parse(xml_file) + p_tags = p.findall("tags") + if len(p_tags) > 0 and not p_tags[0].text is None: + xml_tags = p_tags[0].text.split() + else: + xml_tags = [] + + return xml_tags + # step 4. if there are any excluded tags, let's exclude tests that have # them if exclude_tags is not None: - to_remove = [] - for xml_file in working_set: - p_tags = get_xml_file_tags(xml_file) - include = True - for tag in exclude_tags: - if tag in p_tags: - include = False - break - if not include: - to_remove.append(xml_file) - for xml_file in to_remove: - working_set.remove(xml_file) + to_remove = [] + for xml_file in working_set: + p_tags = get_xml_file_tags(xml_file) + include = True + for tag in exclude_tags: + if tag in p_tags: + include = False + break + if not include: + to_remove.append(xml_file) + for xml_file in to_remove: + working_set.remove(xml_file) # step 5. if there are any tags, let's use them if tags is not None: - tagged_set = [] - for xml_file in working_set: - p_tags = get_xml_file_tags(xml_file) + tagged_set = [] + for xml_file in working_set: + p_tags = get_xml_file_tags(xml_file) - include = True - for tag in tags: - if tag not in p_tags: - include = False + include = True + for tag in tags: + if tag not in p_tags: + include = False - if include is True: - tagged_set.append(xml_file) + if include is True: + tagged_set.append(xml_file) else: - tagged_set = working_set + tagged_set = working_set for (subdir, xml_file) in [os.path.split(x) for x in tagged_set]: - # need to grab nprocs here to pass through to modify_command_line - p = etree.parse(os.path.join(subdir,xml_file)) - prob_defn = p.findall("problem_definition")[0] - prob_nprocs = int(prob_defn.attrib["nprocs"]) - testprob = regressiontest.TestProblem(filename=os.path.join(subdir, xml_file), - verbose=self.verbose, replace=self.modify_command_line(prob_nprocs)) - self.tests.append((subdir, testprob)) + # need to grab nprocs here to pass through to modify_command_line + p = etree.parse(os.path.join(subdir, xml_file)) + prob_defn = p.findall("problem_definition")[0] + prob_nprocs = int(prob_defn.attrib["nprocs"]) + testprob = regressiontest.TestProblem(filename=os.path.join(subdir, xml_file), + verbose=self.verbose, replace=self.modify_command_line(prob_nprocs)) + self.tests.append((subdir, testprob)) if len(self.tests) == 0: - print("Warning: no matching tests.") + print("Warning: no matching tests.") def length_matches(self, filelength): - if self.length == filelength: return True - if self.length == "medium" and filelength == "short": return True + if self.length == filelength: + return True + if self.length == "medium" and filelength == "short": + return True return False def decide_fluidity_command(self): bindir = os.environ["PATH"].split(':')[0] - + for binaryBase in ["dfluidity", "fluidity"]: - binary = binaryBase - debugBinary = binaryBase + "-debug" - try: - fluidity_mtime = os.stat(os.path.join(bindir, binary))[-2] - have_fluidity = True - except OSError: - fluidity_mtime = 1e30 - have_fluidity = False - - try: - debug_mtime = os.stat(os.path.join(bindir, debugBinary))[-2] - have_debug = True - except OSError: - debug_mtime = 1e30 - have_debug = False - - if have_fluidity is True or have_debug is True: - if have_fluidity is False and have_debug is True: - flucmd = debugBinary - - elif have_fluidity is True and have_debug is False: - flucmd = binary - - elif fluidity_mtime > debug_mtime: - flucmd = binary - else: - flucmd = debugBinary - - # no longer valid since debugging doesn't change the name - any suitable alternative tests? - # if self.valgrind is True: - # if flucmd != debugBinary: - # print("Error: you really should compile with debugging for use with valgrind!") - # sys.exit(1) - - return flucmd - + binary = binaryBase + debugBinary = binaryBase + "-debug" + try: + fluidity_mtime = os.stat(os.path.join(bindir, binary))[-2] + have_fluidity = True + except OSError: + fluidity_mtime = 1e30 + have_fluidity = False + + try: + debug_mtime = os.stat(os.path.join(bindir, debugBinary))[-2] + have_debug = True + except OSError: + debug_mtime = 1e30 + have_debug = False + + if have_fluidity is True or have_debug is True: + if have_fluidity is False and have_debug is True: + flucmd = debugBinary + + elif have_fluidity is True and have_debug is False: + flucmd = binary + + elif fluidity_mtime > debug_mtime: + flucmd = binary + else: + flucmd = debugBinary + + # no longer valid since debugging doesn't change the name - any suitable alternative tests? + # if self.valgrind is True: + # if flucmd != debugBinary: + # print("Error: you really should compile with debugging for use with valgrind!") + # sys.exit(1) + + return flucmd + return None def modify_command_line(self, nprocs): - flucmd = self.decide_fluidity_command() - print(flucmd) - def f(s): - if not flucmd in [None, "fluidity"]: - s = s.replace('fluidity ', flucmd + ' ') + flucmd = self.decide_fluidity_command() + print(flucmd) - if self.valgrind: - s = "valgrind --tool=memcheck --leak-check=full -v" + \ - " --show-reachable=yes --num-callers=8 --error-limit=no " + \ - "--log-file=test.log " + s + def f(s): + if not flucmd in [None, "fluidity"]: + s = s.replace('fluidity ', flucmd + ' ') - # when calling genpbs, genpbs should take care of inserting the right -n magic - if not self.genpbs: - s = s.replace('mpiexec ', 'mpiexec -n %(nprocs)d ' % {'nprocs': nprocs}) + if self.valgrind: + s = "valgrind --tool=memcheck --leak-check=full -v" + \ + " --show-reachable=yes --num-callers=8 --error-limit=no " + \ + "--log-file=test.log " + s - return s + # when calling genpbs, genpbs should take care of inserting the right -n magic + if not self.genpbs: + s = s.replace('mpiexec ', 'mpiexec -n %(nprocs)d ' % + {'nprocs': nprocs}) - return f + return s + return f def log(self, str): if self.verbose == True: print(str) def clean(self): - self.log(" ") - for t in self.tests: - os.chdir(t[0]) - t[1].clean() + self.log(" ") + for t in self.tests: + os.chdir(t[0]) + t[1].clean() - return + return def run(self): self.log(" ") if not self.justtest: - threadlist=[] + threadlist = [] self.test_exception_ids = multiprocessing.Queue() - tests_by_nprocs={} + tests_by_nprocs = {} for test_id in range(len(self.tests)): # sort tests by number of processes requested tests_by_nprocs.setdefault(self.tests[test_id][1].nprocs, @@ -307,22 +317,23 @@ def run(self): # collect serial tests to pass to worker threads serial_tests.put(test) for nprocs in sorted(list(tests_by_nprocs.keys()), reverse=True): - for i in range(len(threadlist), - max(0, options.thread_count-nprocs)): - # spin up enough new workers to fully subscribe thread count - threadlist.append(multiprocessing.Process(target=self.threadrun, args=[serial_tests])) - threadlist[-1].start() - if nprocs==1: - # remaining tests are serial. Join the workers - self.threadrun(serial_tests) - else: - tests = tests_by_nprocs[nprocs] - queue = Queue.Queue() - for test in tests: - queue.put(test) - - # run the parallel queue on master thread - self.threadrun(queue) + for i in range(len(threadlist), + max(0, options.thread_count - nprocs)): + # spin up enough new workers to fully subscribe thread count + threadlist.append(multiprocessing.Process( + target=self.threadrun, args=[serial_tests])) + threadlist[-1].start() + if nprocs == 1: + # remaining tests are serial. Join the workers + self.threadrun(serial_tests) + else: + tests = tests_by_nprocs[nprocs] + queue = Queue.Queue() + for test in tests: + queue.put(test) + + # run the parallel queue on master thread + self.threadrun(queue) for t in threadlist: '''Wait until all threads finish''' t.join() @@ -335,80 +346,83 @@ def run(self): except Queue.Empty: break for e, lines in exceptions: - tc=TestCase(e[1].name, - '%s.%s'%(e[1].length, - e[1].filename[:-4])) + tc = TestCase(e[1].name, + '%s.%s' % (e[1].length, + e[1].filename[:-4])) tc.add_failure_info("Failure", lines) - self.xml_parser.test_cases+= [tc] + self.xml_parser.test_cases += [tc] self.tests.remove(e) self.completed_tests += [e[1]] count = len(self.tests) while True: for t in self.tests: - if t is None: continue - test = t[1] - os.chdir(t[0]) - if test.is_finished(): - if test.length == "long": - test.fl_logs(nLogLines = 20) - else: - test.fl_logs(nLogLines = 0) - try: - self.teststatus += test.test() - except: - self.log("Error: %s raised an exception while testing:" % test.filename) - lines = traceback.format_exception( sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2] ) - for line in lines: - self.log(line) - self.teststatus += ['F'] - test.pass_status = ['F'] - self.completed_tests += [test] - self.xml_parser.test_cases+=test.xml_reports - t = None - count -= 1 - - if count == 0: break - time.sleep(60) + if t is None: + continue + test = t[1] + os.chdir(t[0]) + if test.is_finished(): + if test.length == "long": + test.fl_logs(nLogLines=20) + else: + test.fl_logs(nLogLines=0) + try: + self.teststatus += test.test() + except: + self.log( + "Error: %s raised an exception while testing:" % test.filename) + lines = traceback.format_exception( + sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) + for line in lines: + self.log(line) + self.teststatus += ['F'] + test.pass_status = ['F'] + self.completed_tests += [test] + self.xml_parser.test_cases += test.xml_reports + t = None + count -= 1 + + if count == 0: + break + time.sleep(60) else: - for t in self.tests: - test = t[1] - os.chdir(t[0]) - if self.length == "long": - test.fl_logs(nLogLines = 20) - else: - test.fl_logs(nLogLines = 0) - self.teststatus += test.test() - self.completed_tests += [test] - - self.xml_parser.test_cases+=test.xml_reports + for t in self.tests: + test = t[1] + os.chdir(t[0]) + if self.length == "long": + test.fl_logs(nLogLines=20) + else: + test.fl_logs(nLogLines=0) + self.teststatus += test.test() + self.completed_tests += [test] + + self.xml_parser.test_cases += test.xml_reports self.passcount = self.teststatus.count('P') self.failcount = self.teststatus.count('F') self.warncount = self.teststatus.count('W') - + if self.failcount + self.warncount > 0: print() print("Summary of test problems with failures or warnings:") for t in self.completed_tests: - if t.pass_status.count('F')+t.warn_status.count('W')>0: - print(t.filename+':', ''.join(t.pass_status+t.warn_status)) + if t.pass_status.count('F') + t.warn_status.count('W') > 0: + print(t.filename + ':', ''.join(t.pass_status + t.warn_status)) print() - + if self.passcount + self.failcount + self.warncount > 0: print("Passes: %d" % self.passcount) print("Failures: %d" % self.failcount) print("Warnings: %d" % self.warncount) - if self.xml_outfile!="": - fd=open(self.cwd+'/'+self.xml_outfile,'w') - self.xml_parser.to_file(fd,[self.xml_parser]) + if self.xml_outfile != "": + fd = open(self.cwd + '/' + self.xml_outfile, 'w') + self.xml_parser.to_file(fd, [self.xml_parser]) fd.close() if self.exit_fails: sys.exit(self.failcount) - def threadrun(self, queue): '''This is the portion of the loop which actually runs the tests. This is split out so that it can be threaded. @@ -422,7 +436,7 @@ def threadrun(self, queue): buf = StringIO() sys.stdout = buf try: - #pull a test number from the queue + # pull a test number from the queue test_id = queue.get(timeout=0.1) (dir, test) = self.tests[test_id] except Queue.Empty: @@ -430,20 +444,22 @@ def threadrun(self, queue): sys.stdout = main_stdout buf.seek(0) with self.iolock: - print (buf.read()) + print(buf.read()) break try: - runtime=test.run(dir) - if self.length=="short" and runtime>30.0: - self.log("Warning: short test ran for %f seconds which"+ - " is longer than the permitted 30s run time"%runtime) + runtime = test.run(dir) + if self.length == "short" and runtime > 30.0: + self.log("Warning: short test ran for %f seconds which" + + " is longer than the permitted 30s run time" % runtime) self.teststatus += ['W'] test.pass_status = ['W'] - + except: - self.log("Error: %s raised an exception while running:" % test.filename) - lines = traceback.format_exception( sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2] ) + self.log("Error: %s raised an exception while running:" % + test.filename) + lines = traceback.format_exception( + sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]) for line in lines: self.log(line) test.pass_status = ['F'] @@ -455,47 +471,57 @@ def threadrun(self, queue): print(buf.read()) def list(self): - for (subdir, test) in self.tests: - print(os.path.join(subdir, test.filename)) + for (subdir, test) in self.tests: + print(os.path.join(subdir, test.filename)) if __name__ == "__main__": import optparse parser = optparse.OptionParser() - parser.add_option("-l", "--length", dest="length", help="length of problem (default=any)", default="any") + parser.add_option("-l", "--length", dest="length", + help="length of problem (default=any)", default="any") parser.add_option("-p", "--parallelism", dest="parallel", help="parallelism of problem: options are serial, parallel or any (default=any)", default="any") - parser.add_option("-e", "--exclude-tags", dest="exclude_tags", help="run only tests that do not have specific tags (takes precidence over -t)", default=[], action="append") - parser.add_option("-t", "--tags", dest="tags", help="run tests with specific tags", default=[], action="append") - parser.add_option("-f", "--file", dest="file", help="specific test case to run (by filename)", default="") + parser.add_option("-e", "--exclude-tags", dest="exclude_tags", + help="run only tests that do not have specific tags (takes precidence over -t)", default=[], action="append") + parser.add_option("-t", "--tags", dest="tags", + help="run tests with specific tags", default=[], action="append") + parser.add_option("-f", "--file", dest="file", + help="specific test case to run (by filename)", default="") parser.add_option("--from-file", dest="from_file", default=None, help="run tests listed in FROM_FILE (one test per line)") parser.add_option("-n", "--threads", dest="thread_count", type="int", help="number of tests to run at the same time", default=1) parser.add_option("-v", "--valgrind", action="store_true", dest="valgrind") - parser.add_option("-c", "--clean", action="store_true", dest="clean", default = False) - parser.add_option("--just-test", action="store_true", dest="justtest", default=False) + parser.add_option("-c", "--clean", action="store_true", + dest="clean", default=False) + parser.add_option("--just-test", action="store_true", + dest="justtest", default=False) parser.add_option("--just-list", action="store_true", dest="justlist") parser.add_option("--genpbs", action="store_true", dest="genpbs") - parser.add_option("-x","--xml-output", dest="xml_outfile", default="", help="filename for xml output") - parser.add_option("--exit-failure-count", action="store_true", dest="exit_fails", help="Return failure count on exit") + parser.add_option("-x", "--xml-output", dest="xml_outfile", + default="", help="filename for xml output") + parser.add_option("--exit-failure-count", action="store_true", + dest="exit_fails", help="Return failure count on exit") (options, args) = parser.parse_args() - if len(args) > 0: parser.error("Too many arguments.") + if len(args) > 0: + parser.error("Too many arguments.") if options.parallel not in ['serial', 'parallel', 'any']: - parser.error("Specify parallelism as either serial, parallel or any.") - - os.environ["PATH"] = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "bin")) + ":" + os.environ["PATH"] - try: - os.environ["PYTHONPATH"] = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "python")) + ":" + os.environ["PYTHONPATH"] - except KeyError: - os.putenv("PYTHONPATH", os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "python"))) + parser.error("Specify parallelism as either serial, parallel or any.") + + os.environ["PATH"] = os.path.abspath(os.path.join( + os.path.dirname(sys.argv[0]), "..", "bin")) + ":" + os.environ["PATH"] + os.environ["PATH"] = os.path.abspath(os.path.join(os.path.dirname( + sys.argv[0]), "..", "libspud", "bin")) + ":" + os.environ["PATH"] try: - os.environ["LD_LIBRARY_PATH"] = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "lib")) + ":" + os.environ["LD_LIBRARY_PATH"] + os.environ["LD_LIBRARY_PATH"] = os.path.abspath(os.path.join(os.path.dirname( + sys.argv[0]), "..", "lib")) + ":" + os.environ["LD_LIBRARY_PATH"] except KeyError: - os.putenv("LD_LIBRARY_PATH", os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "lib"))) + os.putenv("LD_LIBRARY_PATH", os.path.abspath( + os.path.join(os.path.dirname(sys.argv[0]), "..", "lib"))) try: os.mkdir(os.environ["HOME"] + os.sep + "lock") @@ -503,14 +529,14 @@ def list(self): pass if len(options.exclude_tags) == 0: - exclude_tags = None + exclude_tags = None else: - exclude_tags = options.exclude_tags + exclude_tags = options.exclude_tags if len(options.tags) == 0: - tags = None + tags = None else: - tags = options.tags + tags = options.tags testharness = TestHarness(length=options.length, parallel=options.parallel, exclude_tags=exclude_tags, tags=tags, @@ -523,25 +549,29 @@ def list(self): xml_outfile=options.xml_outfile) if options.justlist: - testharness.list() + testharness.list() elif options.clean: - testharness.clean() + testharness.clean() else: - print("-" * 80) - which = os.popen("which %s" % testharness.decide_fluidity_command()).read() - if len(which) > 0: - print("which %s: %s" % ("fluidity", which), end=' ') - versio = os.popen("%s -V" % testharness.decide_fluidity_command()).read() - if len(versio) > 0: - print(versio) - print("-" * 80) - - if options.valgrind is True: print("-" * 80) - print("I see you are using valgrind!") - print("A couple of points to remember.") - print("a) The log file will be produced in the directory containing the tests.") - print("b) Valgrind typically takes O(100) times as long. I hope your test is short.") + which = os.popen("which %s" % + testharness.decide_fluidity_command()).read() + if len(which) > 0: + print("which %s: %s" % ("fluidity", which), end=' ') + versio = os.popen("%s -V" % + testharness.decide_fluidity_command()).read() + if len(versio) > 0: + print(versio) print("-" * 80) - testharness.run() + if options.valgrind is True: + print("-" * 80) + print("I see you are using valgrind!") + print("A couple of points to remember.") + print( + "a) The log file will be produced in the directory containing the tests.") + print( + "b) Valgrind typically takes O(100) times as long. I hope your test is short.") + print("-" * 80) + + testharness.run()