diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000..72ed83955 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,44 @@ +# Modified from +# https://github.com/calliope-project/calliope/blob/master/.travis.yml + +language: python +sudo: false # Use container-based infrastructure + +matrix: + include: + - env: + - PYTHON_VERSION="2.7" + - env: + - PYTHON_VERSION="3.6" + +before_install: + - if [[ "$PYTHON_VERSION" == "2.7" ]]; then + wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; + else + wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; + fi + - bash miniconda.sh -b -p $HOME/miniconda + - export PATH="$HOME/miniconda/bin:$PATH" + - hash -r + - conda config --set always_yes yes --set changeps1 no + - conda update -q conda + # Useful for debugging any issues with conda + - conda info -a + +install: + - conda create -q -n pypsa python=$PYTHON_VERSION + - conda env update -q -n pypsa --file=requirements.yml + - conda env update -q -n pypsa --file=requirements_dev.yml + - source activate pypsa + # - conda install -q -c conda-forge python-coveralls # don't install on appveyor + - pip install --no-cache-dir . + +# before_script: # configure a headless display to test plot generation +# - "export DISPLAY=:99.0" +# - "sh -e /etc/init.d/xvfb start" +# - sleep 3 # give xvfb some time to start + +script: "make test" + +# after_success: +# - coveralls diff --git a/MANIFEST.in b/MANIFEST.in index 97a50c19a..1ee75eeeb 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,5 @@ include pypsa/component_attrs/*.csv include pypsa/standard_types/*.csv include pypsa/components.csv +include README.rst LICENSE.txt +include requirements.yml diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..10928d116 --- /dev/null +++ b/Makefile @@ -0,0 +1,15 @@ +.PHONY : test sdist upload clean dist + +test : + pytest --cov pypsa --cov-report term-missing + +sdist : + python setup.py sdist + +upload : + twine upload dist/* + +clean : + rm dist/* + +dist : sdist upload clean diff --git a/README.rst b/README.rst index 554930cd3..f8e74fa2e 100644 --- a/README.rst +++ b/README.rst @@ -1,3 +1,7 @@ +|badge_travis| |badge_pypi| |badge_license| + +----- + ################################ Python for Power System Analysis ################################ @@ -16,18 +20,18 @@ PyPSA is a `free software `_ toolbox for simulating and optimising modern power systems that include features such as conventional generators with unit commitment, variable wind -and solar generation, storage units, sector coupling and mixed -alternating and direct current networks. PyPSA is designed to scale -well with large networks and long time series. - -As of 2017 PyPSA is under heavy development and therefore it is -recommended to use caution when using it in a production environment. -Some APIs may change - the changes in each PyPSA version are listed in -the `doc/release_notes.rst `_. - - - -PyPSA was initially developed by the `Renewable Energy Group +and solar generation, storage units, coupling to other energy sectors, +and mixed alternating and direct current networks. PyPSA is designed +to scale well with large networks and long time series. + +This project is maintained by the `Energy System Modelling +group `_ at the `Institute for +Automation and Applied +Informatics `_ at the +`Karlsruhe Institute of +Technology `_. The group is funded by the +`Helmholtz Association `_ until 2024. +Previous versions were developed by the `Renewable Energy Group `_ at `FIAS `_ to carry out simulations for the `CoNDyNet project `_, financed by the @@ -57,14 +61,15 @@ PyPSA can calculate: * static power flow (using both the full non-linear network equations and the linearised network equations) -* linear optimal power flow (optimisation of power plant and storage - dispatch within network constraints, using the linear network - equations, over several snapshots) +* linear optimal power flow (least-cost optimisation of power plant + and storage dispatch within network constraints, using the linear + network equations, over several snapshots) * security-constrained linear optimal power flow -* total electricity system investment optimisation (using linear - network equations, over several snapshots simultaneously for - optimisation of generation and storage dispatch and investment in - the capacities of generation, storage and transmission) +* total electricity/energy system least-cost investment optimisation + (using linear network equations, over several snapshots + simultaneously for optimisation of generation and storage dispatch + and investment in the capacities of generation, storage, + transmission and other infrastructure) It has models for: @@ -80,31 +85,29 @@ It has models for: * basic components out of which more complicated assets can be built, such as Combined Heat and Power (CHP) units, heat pumps, resistive Power-to-Heat (P2H), Power-to-Gas (P2G), battery electric vehicles - (BEVs), etc.; each of these is demonstrated in the `examples + (BEVs), Fischer-Tropsch, direct air capture (DAC), etc.; each of + these is demonstrated in the `examples `_ -Functionality that will definitely be added soon: +Functionality that may be added in the future: * Multi-year investment optimisation -* Simple RMS simulations with the swing equation * Distributed active power slack -* Non-linear power flow solution using `analytic continuation - `_ - in the complex plane following `GridCal - `_ - -Functionality that may be added in the future: - -* Short-circuit current calculations -* Dynamic RMS simulations -* Small signal stability analysis * Interactive web-based GUI with SVG * OPF with the full non-linear network equations -* Dynamic EMT simulations -* Unbalanced load flow * Port to `Julia `_ +Other complementary libraries: + +* `pandapower `_ for more + detailed modelling of distribution grids, short-circuit + calculations, unbalanced load flow and more +* `PowerDynamics.jl + `_ for dynamic + modelling of power grids at time scales where differential equations are relevant + + Example scripts as Jupyter notebooks ==================================== @@ -117,7 +120,12 @@ available as Python scripts in `examples/ `_. Screenshots =========== -The showcase for PyPSA is the `SciGRID example +Results from a PyPSA simulation can be converted into an interactive +online animation using `PyPSA-animation +`_, see the `PyPSA-Eur-30 +example `_. + +Another showcase for PyPSA is the `SciGRID example `_ which demonstrates interactive plots generated with the `plotly `_ library. @@ -147,7 +155,7 @@ What PyPSA uses under the hood =============================== PyPSA is written and tested to be compatible with both Python 2.7 and -Python 3.5. +Python 3.6. It leans heavily on the following Python packages: @@ -183,12 +191,31 @@ Citing PyPSA If you use PyPSA for your research, we would appreciate it if you -would cite the following preprint paper (which has not yet been -through peer review): +would cite the following paper: * T. Brown, J. Hörsch, D. Schlachtberger, `PyPSA: Python for Power - System Analysis `_, 2017, - `preprint arXiv:1707.09913 `_ + System Analysis `_, 2018, + `Journal of Open Research Software + `_, 6(1), + `arXiv:1707.09913 `_, + `DOI:10.5334/jors.188 `_ + + +Please use the following BibTeX: :: + + @article{PyPSA, + author = {T. Brown and J. H\"orsch and D. Schlachtberger}, + title = {{PyPSA: Python for Power System Analysis}}, + journal = {Journal of Open Research Software}, + volume = {6}, + issue = {1}, + number = {4}, + year = {2018}, + eprint = {1707.09913}, + url = {https://doi.org/10.5334/jors.188}, + doi = {10.5334/jors.188} + } + If you want to cite a specific PyPSA version, each release of PyPSA is stored on `Zenodo `_ with a release-specific DOI. @@ -201,8 +228,8 @@ This can be found linked from the overall PyPSA Zenodo DOI: Licence ======= -Copyright 2015-2017 Tom Brown (FIAS), Jonas Hörsch (FIAS), David -Schlachtberger (FIAS) +Copyright 2015-2019 Tom Brown (KIT, FIAS), Jonas Hörsch (KIT, FIAS), +David Schlachtberger (FIAS) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -213,3 +240,17 @@ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the `GNU General Public License `_ for more details. + +.. |link-latest-doi| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.786605.svg +.. _link-latest-doi: https://doi.org/10.5281/zenodo.786605 + +.. |badge_pypi| image:: https://img.shields.io/pypi/v/pypsa.svg + :target: https://pypi.python.org/pypi/pypsa + :alt: PyPI version + +.. |badge_license| image:: https://img.shields.io/pypi/l/pypsa.svg + :target: #license + +.. |badge_travis| image:: https://img.shields.io/travis/PyPSA/PyPSA/master.svg + :target: https://travis-ci.org/PyPSA/PyPSA + :alt: Build status on Linux diff --git a/doc/comparable_software.rst b/doc/comparable_software.rst index 1429f7dd2..a46a2e319 100644 --- a/doc/comparable_software.rst +++ b/doc/comparable_software.rst @@ -2,7 +2,7 @@ Comparable Software ####################### -The `PyPSA preprint paper `_ +The `PyPSA paper `_ contains a discussion of PyPSA's features compared to other software tools and a functionality comparison in Table III. diff --git a/doc/components.rst b/doc/components.rst index f4f78fe53..d3cd4766d 100644 --- a/doc/components.rst +++ b/doc/components.rst @@ -434,10 +434,48 @@ the ``Link`` with ``efficiency = 1``, ``marginal_cost = 0``, :file: ../pypsa/component_attrs/links.csv +.. _components-links-multiple-outputs: + +Link with multiple outputs or inputs +------------------------------------ + +Links can also be defined with multiple outputs in fixed ratio to the +power in the single input by defining new columns ``bus2``, ``bus3``, +etc. (``bus`` followed by an integer) in ``network.links`` along with +associated columns for the efficiencies ``efficiency2``, +``efficiency3``, etc. The different outputs are then proportional to +the input according to the efficiency; see :ref:`opf-links` for how +these are used in the LOPF and the `example of a CHP with a fixed +power-heat ratio +`_. + +To define the new columns ``bus2``, ``efficiency2``, ``bus3``, +``efficiency3``, etc. in ``network.links`` you need to override the +standard component attributes by passing ``pypsa.Network()`` an +``override_component_attrs`` argument. See the section +:ref:`custom_components` and the `example of a CHP with a fixed +power-heat ratio +`_. + + +If the column ``bus2`` exists, values in the column are not compulsory +for all links; if the link has no 2nd output, simply leave it empty +``network.links.at["my_link","bus2"] = ""``. + +For links with multiple inputs in fixed ratio to a single output, +simply reverse the flow in a link with one input and multiple outputs +by setting ``my_link.p_max_pu = 0`` and ``my_link.p_min_pu = -1``. + +For multiple inputs to multiple outputs, connect a multi-to-single +link to a single-to-multi link with an auxiliary bus in the middle. + + Groups of Components ==================== -In the code components are grouped according to their properties. +In the code components are grouped according to their properties in +sets such as ``network.one_port_components`` and +``network.branch_components``. One-ports share the property that they all connect to a single bus, i.e. generators, loads, storage units, etc.. They share the attributes @@ -453,3 +491,31 @@ nodal power imbalances, i.e. lines and transformers. Controllable branches are branches whose power flow can be controlled by e.g. the LOPF optimisation, i.e. links. + + +.. _custom_components: + +Custom Components +================= + +If you want to define your own components and override the standard +functionality of PyPSA, you can easily override the standard +components by passing pypsa.Network() the arguments +``override_components`` and ``override_component_attrs``. + +For this network, these will replace the standard definitions in +``pypsa.components.components`` and +``pypsa.components.component_attrs``, which correspond to the +repository CSV files ``pypsa/components.csv`` and +``pypsa/component_attrs/*.csv``. + +``components`` is a pandas.DataFrame with the component ``name``, +``list_name`` and ``description``. ``component_attrs`` is a +pypsa.descriptors.Dict of pandas.DataFrame with the attribute +properties for each component. Just follow the formatting for the +standard components. + +There are examples for defining new components in the git repository +in ``examples/new_components/``, including an example of +overriding e.g. ``network.lopf()`` for functionality for +combined-heat-and-power (CHP) plants. diff --git a/doc/conf.py b/doc/conf.py index 587aac62c..760b2dca3 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -55,17 +55,17 @@ # General information about the project. project = u'PyPSA' -copyright = u'2016-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)' -author = u'Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)' +copyright = u'2015-2019 Tom Brown (KIT, FIAS), Jonas Hoersch (KIT, FIAS), David Schlachtberger (FIAS)' +author = u'Tom Brown (KIT, FIAS), Jonas Hoersch (KIT, FIAS), David Schlachtberger (FIAS)' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u'0.11' +version = u'0.13' # The full version, including alpha/beta/rc tags. -release = u'0.11.0' +release = u'0.13.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/design.rst b/doc/design.rst index a8a43ef0e..2c17c9461 100644 --- a/doc/design.rst +++ b/doc/design.rst @@ -14,8 +14,8 @@ Python 3.5. Network object is the overall container ======================================= -The ``pypsa.components.Network`` is an overall container for all -network components; components cannot exist without a network. +The ``pypsa.Network`` is an overall container for all network +components; components cannot exist without a network. It is also the object on which calculations, such as power flow and optimal power flow, are performed. @@ -24,9 +24,8 @@ optimal power flow, are performed. Buses are the fundamental nodes =============================== -The ``pypsa.components.Bus`` is the fundamental node to which all -loads, generators, storage units, lines, transformers and links -attach. +The bus is the fundamental node to which all loads, generators, +storage units, lines, transformers and links attach. You can have as many components attached to a bus as you want. diff --git a/doc/img/pypsa-animation.png b/doc/img/pypsa-animation.png new file mode 100644 index 000000000..65ace04ea Binary files /dev/null and b/doc/img/pypsa-animation.png differ diff --git a/doc/import_export.rst b/doc/import_export.rst index 35cd504b2..bc28edc01 100644 --- a/doc/import_export.rst +++ b/doc/import_export.rst @@ -181,18 +181,52 @@ Following the previous example: "Generator", "p_max_pu") +Export to netCDF +================ + +Export network and components to a netCDF file. + +netCDF files take up less space than CSV files and are faster to load. + +netCDF is also preferred over HDF5 because netCDF is structured more +cleanly, is easier to use from other programming languages, can limit +float precision to save space and supports lazy loading. + +Both static and series attributes of components are exported, but only +if they have non-default values. + +``network.export_to_netcdf(file.nc)`` + +If ``file.nc`` does not already exist, it is created. + + +Import from netCDF +================== + +Import network data from netCDF file ``file.nc``: + +``network.import_from_netcdf(file.nc)`` + + Export to HDF5 ============== Export network and components to an HDF store. +NB: netCDF is preferred over HDF5 because netCDF is structured more +cleanly, is easier to use from other programming languages, can limit +float precision to save space and supports lazy loading. + + Both static and series attributes of components are exported, but only if they have non-default values. -If path does not already exist, it is created. +``network.export_to_hdf5(path)`` + + +If ``path`` does not already exist, it is created. -``network.export_to_hdf5(filename)`` Import from HDF5 ================ diff --git a/doc/installation.rst b/doc/installation.rst index 886277cfb..d706757e7 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -49,6 +49,10 @@ If you have the Python package installer ``pip`` then just run:: pip install pypsa +If you're feeling adventurous, you can also install the latest master branch from github with:: + + pip install git+https://github.com/PyPSA/PyPSA.git + "Manual" installation with setuptools ===================================== @@ -132,7 +136,7 @@ We recommend always keeping your PyPSA installation up-to-date, since bugs get fixed and new features are added. To upgrade PyPSA with pip, do at the command line:: - pip install -U pandas + pip install -U pypsa Don't forget to read the :doc:`release_notes` regarding API changes that might require you to update your code. diff --git a/doc/introduction.rst b/doc/introduction.rst index eb1423865..233fe4265 100644 --- a/doc/introduction.rst +++ b/doc/introduction.rst @@ -8,17 +8,18 @@ PyPSA is a `free software `_ toolbox for simulating and optimising modern power systems that include features such as conventional generators with unit commitment, variable wind -and solar generation, storage units, sector coupling and mixed -alternating and direct current networks. PyPSA is designed to scale -well with large networks and long time series. - - -As of 2017 PyPSA is under heavy development and therefore it is -recommended to use caution when using it in a production -environment. Some APIs may change - the changes in each PyPSA version -are listed in the :doc:`release_notes`. - -PyPSA was initially developed by the `Renewable Energy Group +and solar generation, storage units, coupling to other energy sectors, +and mixed alternating and direct current networks. PyPSA is designed +to scale well with large networks and long time series. + +This project is maintained by the `Energy System Modelling +group `_ at the `Institute for +Automation and Applied +Informatics `_ at the +`Karlsruhe Institute of +Technology `_. The group is funded by the +`Helmholtz Association `_ until 2024. +Previous versions were developed by the `Renewable Energy Group `_ at `FIAS `_ to carry out simulations for the `CoNDyNet project `_, financed by the @@ -32,14 +33,14 @@ PyPSA can calculate: * static power flow (using both the full non-linear network equations and the linearised network equations) -* linear optimal power flow (optimisation of power plant and storage +* linear optimal power flow (least-cost optimisation of power plant and storage dispatch within network constraints, using the linear network equations, over several snapshots) * security-constrained linear optimal power flow -* total electricity system investment optimisation (using linear +* total electricity/energy system least-cost investment optimisation (using linear network equations, over several snapshots simultaneously for optimisation of generation and storage dispatch and investment in - the capacities of generation, storage and transmission) + the capacities of generation, storage, transmission and other infrastructure) It has models for: @@ -55,31 +56,28 @@ It has models for: * basic components out of which more complicated assets can be built, such as Combined Heat and Power (CHP) units, heat pumps, resistive Power-to-Heat (P2H), Power-to-Gas (P2G), battery electric vehicles - (BEVs), etc.; each of these is demonstrated in the `examples + (BEVs), Fischer-Tropsch, direct air capture (DAC), etc.; each of + these is demonstrated in the `examples `_ -Functionality that will definitely be added soon: +Functionality that may be added in the future: * Multi-year investment optimisation -* Simple RMS simulations with the swing equation * Distributed active power slack -* Non-linear power flow solution using `analytic continuation - `_ - in the complex plane following `GridCal - `_ - -Functionality that may be added in the future: - -* Short-circuit current calculations -* Dynamic RMS simulations -* Small signal stability analysis * Interactive web-based GUI with SVG * OPF with the full non-linear network equations -* Dynamic EMT simulations -* Unbalanced load flow * Port to `Julia `_ +Other complementary libraries: + +* `pandapower `_ for more + detailed modelling of distribution grids, short-circuit + calculations, unbalanced load flow and more +* `PowerDynamics.jl + `_ for dynamic + modelling of power grids at time scales where differential equations are relevant + Example scripts as Jupyter notebooks @@ -94,6 +92,17 @@ Screenshots =========== +Results from a PyPSA simulation can be converted into an interactive +online animation using `PyPSA-animation +`_, see the `PyPSA-Eur-30 +example `_. + +Another showcase for PyPSA is the `SciGRID example +`_ which +demonstrates interactive plots generated with the `plotly +`_ library. + + .. image:: img/line-loading.png .. image:: img/lmp.png @@ -153,8 +162,8 @@ and DC. PyPSA uses some of the sparse-matrix constructs from PYPOWER. What PyPSA uses under the hood =============================== -PyPSA is written and tested to be compatible with Python 2.7 and -Python 3.5. +PyPSA is written and tested to be compatible with both Python 2.7 and +Python 3.6. It leans heavily on the following Python packages: @@ -187,15 +196,32 @@ PyPSA has a Google Group `forum / mailing list Citing PyPSA ============ - - If you use PyPSA for your research, we would appreciate it if you -would cite the following preprint paper (which has not yet been -through peer review): +would cite the following paper: * T. Brown, J. Hörsch, D. Schlachtberger, `PyPSA: Python for Power - System Analysis `_, 2017, - `preprint arXiv:1707.09913 `_ + System Analysis `_, 2018, + `Journal of Open Research Software + `_, 6(1), + `arXiv:1707.09913 `_, + `DOI:10.5334/jors.188 `_ + +Please use the following BibTeX: :: + + @article{PyPSA, + author = {T. Brown and J. H\"orsch and D. Schlachtberger}, + title = {{PyPSA: Python for Power System Analysis}}, + journal = {Journal of Open Research Software}, + volume = {6}, + issue = {1}, + number = {4}, + year = {2018}, + eprint = {1707.09913}, + url = {https://doi.org/10.5334/jors.188}, + doi = {10.5334/jors.188} + } + + If you want to cite a specific PyPSA version, each release of PyPSA is diff --git a/doc/optimal_power_flow.rst b/doc/optimal_power_flow.rst index 1129112c0..8bc34a43f 100644 --- a/doc/optimal_power_flow.rst +++ b/doc/optimal_power_flow.rst @@ -28,15 +28,20 @@ Overview Execute: -``network.lopf(snapshots, solver_name="glpk", solver_io=None, extra_functionality=None, solver_options={}, keep_files=False, formulation="angles")`` +``network.lopf(snapshots, solver_name="glpk", solver_io=None, +extra_functionality=None, solver_options={}, keep_files=False, +formulation="angles",extra_postprocessing=None)`` where ``snapshots`` is an iterable of snapshots, ``solver_name`` is a string, e.g. "gurobi" or "glpk", ``solver_io`` is a string, ``extra_functionality`` is a function of network and snapshots that is -called before the solver (see below), ``solver_options`` is a -dictionary of flags to pass to the solver, ``keep_files`` means that -the ``.lp`` file is saved and ``formulation`` is a string in -``["angles","cycles","kirchhoff","ptdf"]`` (see :ref:`formulations` for more details). +called before the solver (see below), ``extra_postprocessing`` is a +function of network, snapshots and duals that is called after solving +(see below), ``solver_options`` is a dictionary of flags to pass to +the solver, ``keep_files`` means that the ``.lp`` file is saved and +``formulation`` is a string in +``["angles","cycles","kirchhoff","ptdf"]`` (see :ref:`formulations` +for more details). The linear OPF module can optimises the dispatch of generation and storage and the capacities of generation, storage and transmission. @@ -460,7 +465,7 @@ cases. The speedup is higher for larger networks with dispatchable generators at most nodes. - +.. _opf-links: Controllable branch flows: links --------------------------------- @@ -476,8 +481,14 @@ optimisation variable for each component which satisfies |f_{l,t}| \leq F_l If the link flow is positive :math:`f_{l,t} > 0` then it withdraws -:math:`f_{l,t}` from bus0 and feeds in :math:`\eta_l f_{l,t}` to bus1, -where :math:`\eta_l` is the link efficiency. +:math:`f_{l,t}` from ``bus0`` and feeds in :math:`\eta_l f_{l,t}` to +``bus1``, where :math:`\eta_l` is the link efficiency. + +If additional output buses ``busi`` for :math:`i=2,3,\dots` are +defined (i.e. ``bus2``, ``bus3``, etc) and their associated +efficiencies ``efficiencyi``, i.e. :math:`\eta_{i,l}`, then at +``busi`` the feed-in is :math:`\eta_{i,l} f_{l,t}`. See also +:ref:`components-links-multiple-outputs`. .. _nodal-power-balance: @@ -560,6 +571,12 @@ and stores both pass an ``extra_functionality`` argument to the LOPF to add functionality. +The function ``extra_postprocessing`` is called after the model has +solved and the results are extracted. This function must take three +arguments `extra_postprocessing(network,snapshots,duals)`. It allows +the user to extract further information about the solution, such as +additional shadow prices for constraints. + Inputs ------ diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b657b616a..0e8c53676 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -3,6 +3,166 @@ Release Notes ####################### +PyPSA 0.13.2 (10th January 2019) +================================ + +This minor release contains small new features and fixes. + +* Optimisation now works with Pyomo >= 5.6 (there was a Pyomo update + that affected the opt.py LConstraint object). +* New functional argument can be passed to Network.lopf: + extra_postprocessing(network,snapshots,duals), which is called after + solving and results are extracted. It can be used to get the values + of shadow prices for constraints that are not normally extracted by + PyPSA. +* In the lopf kirchhoff formulation, the cycle constraint is rescaled + by a factor 1e5, which improves the numerical stability of the + interior point algorithm (since the coefficients in the constraint + matrix were very small). +* Updates and fixes to networkclustering, io, plot. + +We thank Soner Candas of TUM for reporting the problem with the most +recent version of Pyomo and providing the fix. + + +PyPSA 0.13.1 (27th March 2018) +============================== + +This release contains bug fixes for the new features introduced in +0.13.0. + +* Export network to netCDF file bug fixed (components that were all + standard except their name were ignored). +* Import/export network to HDF5 file bug fixed and now works with more + than 1000 columns; HDF5 format is no longer deprecated. +* When networks are copied or sliced, overridden components + (introduced in 0.13.0) are also copied. +* Sundry other small fixes. + +We thank Tim Kittel for pointing out the first and second bugs. We +thank Kostas Syranidis for not only pointing out the third issue with +copying overridden components, but also submitting a fix as a pull +request. + +For this release we acknowledge funding to Tom Brown from the +`RE-INVEST project `_. + + + +PyPSA 0.13.0 (25th January 2018) +================================ + +This release contains new features aimed at coupling power networks to +other energy sectors, fixes for library dependencies and some minor +internal API changes. + +* If you want to define your own components and override the standard + functionality of PyPSA, you can now override the standard components + by passing pypsa.Network() the arguments ``override_components`` and + ``override_component_attrs``, see the section on + :ref:`custom_components`. There are examples for defining new + components in the git repository in ``examples/new_components/``, + including an example of overriding ``network.lopf()`` for + functionality for combined-heat-and-power (CHP) plants. +* The ``Link`` component can now be defined with multiple outputs in + fixed ratio to the power in the single input by defining new columns + ``bus2``, ``bus3``, etc. (``bus`` followed by an integer) in + ``network.links`` along with associated columns for the efficiencies + ``efficiency2``, ``efficiency3``, etc. The different outputs are + then proportional to the input according to the efficiency; see + sections :ref:`components-links-multiple-outputs` and + :ref:`opf-links` and the `example of a CHP with a fixed power-heat + ratio + `_. +* Networks can now be exported to and imported from netCDF files with + ``network.export_to_netcdf()`` and + ``network.import_from_netcdf()``. This is faster than using CSV + files and the files take up less space. Import and export with HDF5 + files, introduced in PyPSA 0.12.0, is now deprecated. +* The export and import code has been refactored to be more general + and abstract. This does not affect the API. +* The internally-used sets such as ``pypsa.components.all_components`` + and ``pypsa.components.one_port_components`` have been moved from + ``pypsa.components`` to ``network``, i.e. ``network.all_components`` + and ``network.one_port_components``, since these sets may change + from network to network. +* For linear power flow, PyPSA now pre-calculates the effective per + unit reactance ``x_pu_eff`` for AC lines to take account of the + transformer tap ratio, rather than doing it on the fly; this makes + some code faster, particularly the kirchhoff formulation of the + LOPF. +* PyPSA is now compatible with networkx 2.0 and 2.1. +* PyPSA now requires Pyomo version greater than 5.3. +* PyPSA now uses the `Travis CI `_ + continuous integration service to test every commit in the `PyPSA + GitHub repository `_. This will + allow us to catch library dependency issues faster. + +We thank Russell Smith of Edison Energy for the pull request for the +effective reactance that sped up the LOPF code and Tom Edwards for +pointing out the Pyomo version dependency issue. + +For this release we also acknowledge funding to Tom Brown from the +`RE-INVEST project `_. + + + + +PyPSA 0.12.0 (30th November 2017) +================================= + +This release contains new features and bug fixes. + +* Support for Pyomo's persistent solver interface, so if you're making + small changes to an optimisation model (e.g. tweaking a parameter), + you don't have to rebuild the model every time. To enable this, + ``network_lopf`` has been internally split into ``build_model``, + ``prepare_solver`` and ``solve`` to allow more fine-grained control of the + solving steps. Currently the new Pyomo PersistentSolver interface + is not in the main Pyomo branch, see + the `pull request `_; you can obtain it with + ``pip install git+https://github.com/Pyomo/pyomo@persistent_interfaces`` +* Lines and transformers (i.e. passive branches) have a new attribute + ``s_max_pu`` to restrict the flow in the OPF, just like ``p_max_pu`` + for generators and links. It works by restricting the absolute value + of the flow per unit of the nominal rating ``abs(flow) <= + s_max_pu*s_nom``. For lines this can represent an n-1 contingency + factor or it can be time-varying to represent weather-dependent + dynamic line rating. +* The ``marginal_cost`` attribute of generators, storage units, stores + and links can now be time dependent. +* When initialising the Network object, i.e. ``network = + pypsa.Network()``, the first keyword argument is now ``import_name`` + instead of ``csv_folder_name``. With ``import_name`` PyPSA + recognises whether it is a CSV folder or an HDF5 file based on the + file name ending and deals with it appropriately. Example usage: + ``nw1 = pypsa.Network("my_store.h5")`` and ``nw2 = + pypsa.Network("/my/folder")``. The keyword argument + ``csv_folder_name`` is still there but is deprecated. +* The value ``network.objective`` is now read from the Pyomo results + attribute ``Upper Bound`` instead of ``Lower Bound``. This is + because for MILP problems under certain circumstances CPLEX records + the ``Lower bound`` as the relaxed value. ``Upper bound`` is correctly + recorded as the integer objective value. +* Bug fix due to changes in pandas 0.21.0: A bug affecting various + places in the code, including causing ``network.lopf`` to fail with + GLPK, is fixed. This is because in pandas 0.21.0 the sum of an empty + Series/DataFrame returns NaN, whereas before it returned zero. This + is a subtle bug; we hope we've fixed all instances of it, but get in + touch if you notice NaNs creeping in where they shouldn't be. All + our tests run fine. +* Bug fix due to changes in scipy 1.0.0: For the new version of scipy, + ``csgraph`` has to be imported explicit. +* Bug fix: A bug whereby logging level was not always correctly being + seen by the OPF results printout is fixed. +* Bug fix: The storage unit spillage had a bug in the LOPF, whereby it + was not respecting ``network.snapshot_weightings`` properly. + +We thank René Garcia Rosas, João Gorenstein Dedecca, Marko Kolenc, +Matteo De Felice and Florian Kühnlenz for promptly notifying us about +issues. + + PyPSA 0.11.0 (21st October 2017) ================================ diff --git a/doc/unit_testing.rst b/doc/unit_testing.rst index 0c3ec3e3a..b435e4fc5 100644 --- a/doc/unit_testing.rst +++ b/doc/unit_testing.rst @@ -7,4 +7,6 @@ Unit testing is performed with py.test. Tests can be found in pypsa/test/. -Power flow is tested against Pypower using its built-in cases. +Power flow is tested against Pypower (the Python implementation of MATPOWER) using its built-in cases. + +Unit testing of new GitHub commits is automated with `Travis CI `_. diff --git a/examples/ac-dc-meshed/ac-dc-lopf.py b/examples/ac-dc-meshed/ac-dc-lopf.py index 10cc49521..bf4560e99 100644 --- a/examples/ac-dc-meshed/ac-dc-lopf.py +++ b/examples/ac-dc-meshed/ac-dc-lopf.py @@ -29,9 +29,7 @@ now = network.snapshots[5] -print("\nCheck power balance at each branch:") - -branches = network.branches() +print("\nCheck power balance at each bus:") for bus in network.buses.index: print("\n"*3+bus) @@ -44,7 +42,7 @@ p0 = 0. p1 = 0. - for c in network.iterate_components(pypsa.components.branch_components): + for c in network.iterate_components(network.branch_components): bs = (c.df.bus0 == bus) diff --git a/examples/ac-dc-meshed/ac-dc-lpf.py b/examples/ac-dc-meshed/ac-dc-lpf.py index f20fdc71c..ca1e291a6 100644 --- a/examples/ac-dc-meshed/ac-dc-lpf.py +++ b/examples/ac-dc-meshed/ac-dc-lpf.py @@ -38,9 +38,7 @@ now = network.snapshots[5] -print("\nCheck power balance at each branch:") - -branches = network.branches() +print("\nCheck power balance at each bus:") for bus in network.buses.index: print("\n"*3+bus) @@ -53,7 +51,7 @@ p0 = 0. p1 = 0. - for c in network.iterate_components(pypsa.components.branch_components): + for c in network.iterate_components(network.branch_components): bs = (c.df.bus0 == bus) diff --git a/examples/build_model.py b/examples/build_model.py new file mode 100644 index 000000000..da996ab25 --- /dev/null +++ b/examples/build_model.py @@ -0,0 +1,89 @@ +## Demonstrate PyPSA unit commitment with a one-bus two-generator example +# +# +#To enable unit commitment on a generator, set its attribute committable = True. +# +# +#Available as a Jupyter notebook at http://www.pypsa.org/examples/unit-commitment.ipynb. + +import pypsa + +from pypsa.opf import network_lopf_build_model as build_model + +import argparse +import logging +import random +import copy + +random.seed( 55) + +parser = argparse.ArgumentParser() + +parser.add_argument( + '-v', '--verbose', + help="Be verbose", + action="store_const", dest="loglevel", const=logging.INFO, +) +parser.add_argument( + '-d', '--debug', + help="Print lots of debugging statements", + action="store_const", dest="loglevel", const=logging.DEBUG, + default=logging.WARNING, + +) +args = parser.parse_args() +logging.basicConfig( level=args.loglevel) + +### Minimum part load demonstration +# +#In final hour load goes below part-load limit of coal gen (30%), forcing gas to commit. + +for test_i in range(1): + + snapshots = range( 1, 101) + p_set = [ p*20 for p in snapshots] + + nu = pypsa.Network() + + nu.set_snapshots(snapshots) + + + n_gen = 100 + + generator_p_nom = [ i for i in range( n_gen)] + generator_marginal_cost = [ 1000 - i for i in range( n_gen)] + p_min_pu = .3 + + for gen_i, gen in enumerate( generator_p_nom): + nu.add("Bus","bus" + str(gen_i)) + nu.add("Load","load" + str(gen_i),bus= "bus" + str(gen_i), + p_set= generator_p_nom[gen_i] / 2. )#[4000,6000,5000,800]) + + nu.add( "Generator", + "gas_" + str(gen_i), + bus = "bus" + str(gen_i), + committable = True, + p_min_pu = p_min_pu, + marginal_cost = generator_marginal_cost[gen_i], + p_nom = generator_p_nom[gen_i],) + if gen_i > 0: + nu.add("Line", + "{} - {} line".format( gen_i - 1, gen_i), + bus0= "bus" + str(gen_i - 1), + bus1= "bus" + str(gen_i), + x=0.1, + r=0.01) + random_gen_to_connect = random.randint( 0, n_gen - 1) + nu.add("Line", + "{} - {} line".format( gen_i, random_gen_to_connect), + bus0= "bus" + str(random_gen_to_connect), + bus1= "bus" + str(gen_i), + x = .2, + r = .08) + + formulations = ["kirchhoff", "angles", "cycles"] #"ptdf" + for formulation in formulations: + nu_copy = copy.deepcopy(nu) + build_model( nu, nu.snapshots, formulation = formulation) +# build_model( nu, nu.snapshots, formulation = "angles") +# nu.lopf( nu.snapshots, formulation = "kirchhoff") diff --git a/examples/new_components/add_components_from_library.py b/examples/new_components/add_components_from_library.py new file mode 100644 index 000000000..43f3d730b --- /dev/null +++ b/examples/new_components/add_components_from_library.py @@ -0,0 +1,79 @@ + +#Use the component_library to add components. + +#NB: This only works with Python 3 because of super() in component_library + + +import component_library + +import pandas as pd + +n = component_library.Network() + +n.set_snapshots(list(range(4))) + +places = pd.Index(["Aarhus","Aalborg","Copenhagen"]) + + +n.madd("Bus", places) + +n.madd("Load", places, bus=places, p_set=10.5) + +n.madd("Bus",places + " heat") + +n.madd("Load", places+ " heat", bus=places+ " heat", p_set=4.2) + +n.loads_t.p_set["Aalborg heat"] = 3.7 + +n.madd("Bus",places + " gas") + +n.madd("Store",places + " gas", + bus=places + " gas", + e_min_pu=-1, + marginal_cost=50, + e_nom_extendable=True) + +#Some HVDC transmission lines +n.madd("Link", + [0,1], + bus0="Aarhus", + bus1=["Aalborg","Copenhagen"], + p_nom_extendable=True, + p_min_pu=-1, + capital_cost=1e2) + +n.add("Generator", + "wind", + p_nom_extendable=True, + capital_cost=1e2, + bus="Aalborg") + +n.madd("CHP", + places + " CHP", + bus_source=places + " gas", + bus_elec=places, + bus_heat=places + " heat", + p_nom_extendable=True, + capital_cost=1.4e4, + eta_elec=0.468, + c_v=0.15, + c_m=0.75, + p_nom_ratio=1.5) + + + +print("\nn.chps:\n") +print(n.chps) + +n.lopf() + + + +print("\nGenerator capacity:\n") +print(n.generators.p_nom_opt) + +print("\nLine capacities:\n") +print(n.links.loc[["0","1"],"p_nom_opt"]) + +print("\nCHP electrical output:\n") +print(n.links.loc[places + " CHP electric","p_nom_opt"]*n.links.loc[places + " CHP electric","efficiency"]) diff --git a/examples/new_components/add_components_simple.py b/examples/new_components/add_components_simple.py new file mode 100644 index 000000000..5869cfd5b --- /dev/null +++ b/examples/new_components/add_components_simple.py @@ -0,0 +1,40 @@ + + +#In this example we add a new component type "dynamically" by passing +#the pypsa.Network the required information. Here we add a simple +#"ShadowPrice" component for storing the shadow prices of global +#constraints. + +import pypsa, pandas as pd, numpy as np + +from pypsa.descriptors import Dict + +#take a copy of the components pandas.DataFrame +override_components = pypsa.components.components.copy() + +#Pass it the list_name, description and component type. +override_components.loc["ShadowPrice"] = ["shadow_prices","Shadow price for a global constraint.",np.nan] + +print("\nNew components table:\n") +print(override_components) + +#create a pandas.DataFrame with the properties of the new component attributes. +#the format should be the same as pypsa/component_attrs/*.csv +override_component_attrs = Dict({k : v.copy() for k,v in pypsa.components.component_attrs.items()}) +override_component_attrs["ShadowPrice"] = pd.DataFrame(columns = ["type","unit","default","description","status"]) +override_component_attrs["ShadowPrice"].loc["name"] = ["string","n/a","n/a","Unique name","Input (required)"] +override_component_attrs["ShadowPrice"].loc["value"] = ["float","n/a",0.,"shadow value","Output"] + +print("\nComponent attributes for ShadowPrice:\n") +print(override_component_attrs["ShadowPrice"]) + +#pass Network the information +n = pypsa.Network(override_components=override_components, + override_component_attrs=override_component_attrs) + +n.add("ShadowPrice","line_volume_constraint",value=4567.1) +n.add("ShadowPrice","co2_constraint",value=326.3) + +print("\nnetwork.shadow_prices:\n") + +print(n.shadow_prices) diff --git a/examples/new_components/component_library.py b/examples/new_components/component_library.py new file mode 100644 index 000000000..33cc9774f --- /dev/null +++ b/examples/new_components/component_library.py @@ -0,0 +1,112 @@ + +#Create a library with its own Network class that has a new CHP +#component and a new LOPF function for the CHP constraints + +#NB: This only works with Python 3 because of super() + +import pypsa, pandas as pd, numpy as np + +from pypsa.descriptors import Dict + +from pyomo.environ import Constraint + + + +override_components = pypsa.components.components.copy() +override_components.loc["ShadowPrice"] = ["shadow_prices","Shadow price for a global constraint.",np.nan] +override_components.loc["CHP"] = ["chps","Combined heat and power plant.",np.nan] + + +override_component_attrs = Dict({k : v.copy() for k,v in pypsa.components.component_attrs.items()}) +override_component_attrs["ShadowPrice"] = pd.DataFrame(columns = ["type","unit","default","description","status"]) +override_component_attrs["ShadowPrice"].loc["name"] = ["string","n/a","n/a","Unique name","Input (required)"] +override_component_attrs["ShadowPrice"].loc["value"] = ["float","n/a",0.,"shadow value","Output"] + +override_component_attrs["CHP"] = pd.DataFrame(columns = ["type","unit","default","description","status"]) +override_component_attrs["CHP"].loc["name"] = ["string","n/a","n/a","Unique name","Input (required)"] +override_component_attrs["CHP"].loc["bus_fuel"] = ["string","n/a","n/a","Name of bus where fuel source is.","Input (required)"] +override_component_attrs["CHP"].loc["bus_elec"] = ["string","n/a","n/a","Name of bus where electricity is supplied.","Input (required)"] +override_component_attrs["CHP"].loc["bus_heat"] = ["string","n/a","n/a","Name of bus where heat is supplied.","Input (required)"] +override_component_attrs["CHP"].loc["p_nom_extendable"] = ["boolean","n/a",False,"","Input (optional)"] +override_component_attrs["CHP"].loc["capital_cost"] = ["float","EUR/MW",0.,"Capital cost per rating of electricity output.","Input (optional)"] +override_component_attrs["CHP"].loc["eta_elec"] = ["float","n/a",1.,"Electrical efficiency with no heat output, i.e. in condensing mode","Input (optional)"] +override_component_attrs["CHP"].loc["c_v"] = ["float","n/a",1.,"Loss of fuel for each addition of heat","Input (optional)"] +override_component_attrs["CHP"].loc["c_m"] = ["float","n/a",1.,"Backpressure ratio","Input (optional)"] +override_component_attrs["CHP"].loc["p_nom_ratio"] = ["float","n/a",1.,"Ratio of max heat output to max electrical output; max heat of 500 MWth and max electricity of 1000 MWth means p_nom_ratio is 0.5","Input (optional)"] + + + + +class Network(pypsa.Network): + + def __init__(self,*args,**kwargs): + kwargs["override_components"]=override_components + kwargs["override_component_attrs"]=override_component_attrs + super().__init__(*args,**kwargs) + + + def lopf(self,*args,**kwargs): + + #at this point check that all the extra links are in place for the CHPs + if not self.chps.empty: + + self.madd("Link", + self.chps.index + " electric", + bus0=self.chps.bus_source.values, + bus1=self.chps.bus_elec.values, + p_nom_extendable=self.chps.p_nom_extendable.values, + capital_cost=self.chps.capital_cost.values*self.chps.eta_elec.values, + efficiency=self.chps.eta_elec.values) + + self.madd("Link", + self.chps.index + " heat", + bus0=self.chps.bus_source.values, + bus1=self.chps.bus_heat.values, + p_nom_extendable=self.chps.p_nom_extendable.values, + efficiency=self.chps.eta_elec.values/self.chps.c_v.values) + + + if "extra_functionality" in kwargs: + user_extra_func = kwargs.pop('extra_functionality') + else: + user_extra_func = None + + #the following function should add to any extra_functionality in kwargs + def extra_func(network, snapshots): + #at this point add the constraints for the CHPs + if not network.chps.empty: + print("Setting up CHPs:",network.chps.index) + + def chp_nom(model, chp): + return network.chps.at[chp,"eta_elec"]*network.chps.at[chp,'p_nom_ratio']*model.link_p_nom[chp + " electric"] == network.chps.at[chp,"eta_elec"]/network.chps.at[chp,"c_v"]*model.link_p_nom[chp + " heat"] + + + network.model.chp_nom = Constraint(list(network.chps.index),rule=chp_nom) + + + def backpressure(model,chp,snapshot): + return network.chps.at[chp,'c_m']*network.chps.at[chp,"eta_elec"]/network.chps.at[chp,"c_v"]*model.link_p[chp + " heat",snapshot] <= network.chps.at[chp,"eta_elec"]*model.link_p[chp + " electric",snapshot] + + network.model.backpressure = Constraint(list(network.chps.index),list(snapshots),rule=backpressure) + + + def top_iso_fuel_line(model,chp,snapshot): + return model.link_p[chp + " heat",snapshot] + model.link_p[chp + " electric",snapshot] <= model.link_p_nom[chp + " electric"] + + network.model.top_iso_fuel_line = Constraint(list(network.chps.index),list(snapshots),rule=top_iso_fuel_line) + + + + + + if user_extra_func is not None: + print("Now doing user defined extra functionality") + user_extra_func(network,snapshots) + + kwargs["extra_functionality"]=extra_func + + super().lopf(*args,**kwargs) + + #Afterwards you can process the outputs, e.g. into network.chps_t.p_out + + #You could also delete the auxiliary links created above diff --git a/examples/opf-storage-hvdc/opf-storage.py b/examples/opf-storage-hvdc/opf-storage.py index c842e4dcc..d045156c0 100644 --- a/examples/opf-storage-hvdc/opf-storage.py +++ b/examples/opf-storage-hvdc/opf-storage.py @@ -105,7 +105,7 @@ p0 = 0. p1 = 0. - for c in network.iterate_components(pypsa.components.branch_components): + for c in network.iterate_components(network.branch_components): bs = (c.df.bus0 == bus) diff --git a/examples/sector-coupling/biomass-synthetic-fuels-carbon-management.py b/examples/sector-coupling/biomass-synthetic-fuels-carbon-management.py new file mode 100644 index 000000000..a22eeccc7 --- /dev/null +++ b/examples/sector-coupling/biomass-synthetic-fuels-carbon-management.py @@ -0,0 +1,202 @@ +## Biomass, synthetic fuels and carbon management +# +#In this example we show how to manage different biomass stocks with different potentials and costs, carbon dioxide hydrogenation from biogas, direct air capture (DAC) and carbon capture and usage/sequestration/cycling (CCU/S/C). +# +#Demand for electricity and diesel transport have to be met from various biomass sources, natural gas with possibility for carbon capture, electrolysis for hydrogen production, direct air capture of CO2, and diesel synthesis via Fischer-Tropsch. +# +#The system has to reach a target of net negative emissions over the period. +# +#All numbers/costs/efficiencies are fictitious to allow easy analysis. +# +# +#This Jupyter Notebook is also available to download at: http://www.pypsa.org/examples/biomass-synthetic-fuels-carbon-management.ipynb. +# +#It demonstrates features of the energy system modelling tool PyPSA : https://github.com/PyPSA/PyPSA. + +import pypsa +import numpy as np + +#First tell PyPSA that links can have multiple outputs by +#overriding the component_attrs. This can be done for +#as many buses as you need with format busi for i = 2,3,4,5,.... +#See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs + +override_component_attrs = pypsa.descriptors.Dict({k : v.copy() for k,v in pypsa.components.component_attrs.items()}) +override_component_attrs["Link"].loc["bus2"] = ["string",np.nan,np.nan,"2nd bus","Input (optional)"] +override_component_attrs["Link"].loc["bus3"] = ["string",np.nan,np.nan,"3rd bus","Input (optional)"] +override_component_attrs["Link"].loc["efficiency2"] = ["static or series","per unit",1.,"2nd bus efficiency","Input (optional)"] +override_component_attrs["Link"].loc["efficiency3"] = ["static or series","per unit",1.,"3rd bus efficiency","Input (optional)"] +override_component_attrs["Link"].loc["p2"] = ["series","MW",0.,"2nd bus output","Output"] +override_component_attrs["Link"].loc["p3"] = ["series","MW",0.,"3rd bus output","Output"] + +n = pypsa.Network(override_component_attrs=override_component_attrs) + +n.set_snapshots(range(10)) + +#add a constant electrical load +n.add("Bus","bus") +n.add("Load","load",bus="bus", + p_set=1.) + +#add a constant demand for transport +n.add("Bus","transport") +n.add("Load","transport",bus="transport", + p_set=1.) + + +n.add("Bus","diesel") + + +n.add("Store","diesel",bus="diesel", + e_cyclic=True, + e_nom=1000.) + +n.add("Bus","hydrogen") + +n.add("Store","hydrogen",bus="hydrogen", + e_cyclic=True, + e_nom=1000.) + +#n.add("Load","hydrogen", +# bus="hydrogen", +# p_set=1.) + + +n.add("Link","electrolysis", + p_nom=2., + efficiency=0.8, + bus0="bus", + bus1="hydrogen") + + +#Allow production of diesel from H2 and CO2 using Fischer-Tropsch +n.add("Link","FT", + p_nom=4, + bus0="hydrogen", + bus1="diesel", + bus2="co2 stored", + efficiency=1., + efficiency2=-1) + + +#minus sign because opposite to how fossil fuels used: +#CH4 burning puts CH4 down, atmosphere up +n.add("Carrier","co2", + co2_emissions=-1.) + +#this tracks CO2 in the atmosphere +n.add("Bus","co2 atmosphere", + carrier="co2") + +#NB: can also be negative +n.add("Store","co2 atmosphere", + e_nom=1000, + e_min_pu=-1, + bus="co2 atmosphere") + +#this tracks CO2 stored, e.g. underground +n.add("Bus","co2 stored") + +#NB: can also be negative +n.add("Store","co2 stored", + e_nom = 1000, + e_min_pu=-1, + bus="co2 stored") + +#direct air capture consumes electricity to take CO2 from the air to the underground store +n.add("Link","DAC", + bus0="bus", + bus1="co2 stored", + bus2 = "co2 atmosphere", + efficiency=1, + efficiency2=-1, + p_nom=5.) + + +#meet transport with diesel +n.add("Link","diesel car", + bus0="diesel", + bus1="transport", + bus2="co2 atmosphere", + efficiency=1., + efficiency2=1., + p_nom=2.) + +n.add("Bus","gas") + +n.add("Store","gas", + e_initial=50, + e_nom=50, + marginal_cost=20, + bus="gas") + +n.add("Link","OCGT", + bus0 = "gas", + bus1 = "bus", + bus2 = "co2 atmosphere", + p_nom_extendable=True, + efficiency = 0.5, + efficiency2 = 1) + + +n.add("Link","OCGT+CCS", + bus0 = "gas", + bus1 = "bus", + bus2 = "co2 stored", + bus3 = "co2 atmosphere", + p_nom_extendable=True, + efficiency = 0.4, + efficiency2 = 0.9, + efficiency3 = 0.1) + + + +#Cheap and expensive biomass +biomass_marginal_cost = [20.,50.] +biomass_stored = [40.,15.] + +for i in range(2): + n.add("Bus","biomass"+str(i)) + + n.add("Store","biomass"+str(i), + bus="biomass"+str(i), + e_nom_extendable=True, + marginal_cost=biomass_marginal_cost[i], + e_nom=biomass_stored[i], + e_initial=biomass_stored[i]) + + #simultaneously empties and refills co2 atmosphere + n.add("Link","biomass"+str(i), + bus0 = "biomass"+str(i), + bus1 = "bus", + p_nom_extendable=True, + efficiency = 0.5) + + n.add("Link","biomass+CCS"+str(i), + bus0 = "biomass"+str(i), + bus1 = "bus", + bus2 = "co2 stored", + bus3 = "co2 atmosphere", + p_nom_extendable=True, + efficiency = 0.4, + efficiency2 = 1., + efficiency3 = -1) + + +#can go to -50, but at some point can't generate enough electricity for DAC and demand +target = -50 + +n.add("GlobalConstraint","co2_limit", + sense="<=", + carrier_attribute="co2_emissions", + constant=target) + +n.lopf() + +n.stores_t.e.plot() + +n.links_t.p0[["biomass+CCS0","biomass+CCS1","OCGT+CCS","DAC"]].plot() + +#at all times, the amount of carbon is constant +n.stores_t.e[["co2 stored","co2 atmosphere","gas","diesel"]].sum(axis=1) + diff --git a/examples/sector-coupling/chained-hydro-reservoirs.py b/examples/sector-coupling/chained-hydro-reservoirs.py index f489f0fcb..4f7e43473 100644 --- a/examples/sector-coupling/chained-hydro-reservoirs.py +++ b/examples/sector-coupling/chained-hydro-reservoirs.py @@ -1,19 +1,36 @@ ## Two chained reservoirs with inflow in one supply two electric loads # -#Two disconnected electrical loads are fed from two reservoirs linked by a river; the first reservoir has inflow from a water basin. +#Two disconnected electrical loads are fed from two reservoirs linked by a river; the first reservoir has inflow from rain onto a water basin. # #Note that the two reservoirs are tightly coupled, meaning there is no time delay between the first one emptying and the second one filling, as there would be if there were a long stretch of river between the reservoirs. The reservoirs are essentially assumed to be close to each other. A time delay would require a "Link" element between different snapshots, which is not yet supported by PyPSA (but could be enabled by passing network.lopf() an extra_functionality function). import pypsa import pandas as pd +import numpy as np from pyomo.environ import Constraint +#First tell PyPSA that links will have a 2nd bus by +#overriding the component_attrs. This is needed so that +#water can both go through a turbine AND feed the next reservoir -network = pypsa.Network() +override_component_attrs = pypsa.descriptors.Dict({k : v.copy() for k,v in pypsa.components.component_attrs.items()}) +override_component_attrs["Link"].loc["bus2"] = ["string",np.nan,np.nan,"2nd bus","Input (optional)"] +override_component_attrs["Link"].loc["efficiency2"] = ["static or series","per unit",1.,"2nd bus efficiency","Input (optional)"] +override_component_attrs["Link"].loc["p2"] = ["series","MW",0.,"2nd bus output","Output"] + + +network = pypsa.Network(override_component_attrs=override_component_attrs) network.set_snapshots(pd.date_range("2016-01-01 00:00","2016-01-01 03:00",freq="H")) +network.add("Carrier", + "reservoir") + +network.add("Carrier", + "rain") + + network.add("Bus", "0", carrier="AC") @@ -22,20 +39,15 @@ "1", carrier="AC") + network.add("Bus", "0 reservoir", carrier="reservoir") - network.add("Bus", "1 reservoir", carrier="reservoir") -network.add("Carrier", - "reservoir") - -network.add("Carrier", - "rain") network.add("Generator", "rain", @@ -44,22 +56,38 @@ p_nom=1000, p_max_pu=[0.,0.2,0.7,0.4]) + network.add("Load", "0 load", bus="0", p_set=20.) - network.add("Load", "1 load", bus="1", p_set=30.) + +#The efficiency of a river is the relation between the gravitational potential +#energy of 1 m^3 of water in reservoir 0 relative to its turbine versus the +#potential energy of 1 m^3 of water in reservoir 1 relative to its turbine + +network.add("Link", + "spillage", + bus0="0 reservoir", + bus1="1 reservoir", + efficiency=0.5, + p_nom_extendable=True) + + +#water from turbine also goes into next reservoir network.add("Link", "0 turbine", bus0="0 reservoir", bus1="0", + bus2="1 reservoir", efficiency=0.9, + efficiency2=0.5, capital_cost=1000, p_nom_extendable=True) @@ -70,21 +98,7 @@ efficiency=0.9, capital_cost=1000, p_nom_extendable=True) - - - - -#The efficiency of a river is the relation between the gravitational potential -#energy of 1 m^3 of water in reservoir 0 relative to its turbine versus the -#potential energy of 1 m^3 of water in reservoir 1 relative to its turbine - -network.add("Link", - "river", - bus0="0 reservoir", - bus1="1 reservoir", - efficiency=0.5, - p_nom_extendable=True) - + network.add("Store", "0 reservoir", @@ -92,7 +106,6 @@ e_cyclic=True, e_nom_extendable=True) - network.add("Store", "1 reservoir", bus="1 reservoir", @@ -102,11 +115,15 @@ network.lopf(network.snapshots) print("Objective:",network.objective) -print(pd.DataFrame({attr: network.stores_t[attr]["0 reservoir"] for attr in ["p","e"]})) - -print(pd.DataFrame({attr: network.stores_t[attr]["1 reservoir"] for attr in ["p","e"]})) +print(network.generators_t.p) print(network.links_t.p0) -print(network.generators_t.p) +print(network.links_t.p1) + +print(network.links_t.p2) + +print(pd.DataFrame({attr: network.stores_t[attr]["0 reservoir"] for attr in ["p","e"]})) + +print(pd.DataFrame({attr: network.stores_t[attr]["1 reservoir"] for attr in ["p","e"]})) diff --git a/examples/sector-coupling/chp-fixed-heat-power-ratio.py b/examples/sector-coupling/chp-fixed-heat-power-ratio.py new file mode 100644 index 000000000..96912fc69 --- /dev/null +++ b/examples/sector-coupling/chp-fixed-heat-power-ratio.py @@ -0,0 +1,79 @@ +## Demonstration of Link with multiple outputs: Combined-Heat-and-Power (CHP) with fixed heat-power ratio +# +#For a CHP with a more complicated heat-power feasible operational area, see https://www.pypsa.org/examples/power-to-gas-boiler-chp.html. +# +#This example demonstrates a Link component with more than one bus output ("bus2" in this case). In general links can have many output buses. +# +#In this example a CHP must be heat-following because there is no other supply of heat to the bus "Frankfurt heat". + +import pypsa, numpy as np + +#First tell PyPSA that links will have a 2nd bus by +#overriding the component_attrs. This can be done for +#as many buses as you need with format busi for i = 2,3,4,5,.... + +override_component_attrs = pypsa.descriptors.Dict({k : v.copy() for k,v in pypsa.components.component_attrs.items()}) +override_component_attrs["Link"].loc["bus2"] = ["string",np.nan,np.nan,"2nd bus","Input (optional)"] +override_component_attrs["Link"].loc["efficiency2"] = ["static or series","per unit",1.,"2nd bus efficiency","Input (optional)"] +override_component_attrs["Link"].loc["p2"] = ["series","MW",0.,"2nd bus output","Output"] + + +network = pypsa.Network(override_component_attrs=override_component_attrs) + +network.add("Bus", + "Frankfurt", + carrier="AC") + +network.add("Load", + "Frankfurt", + bus="Frankfurt", + p_set=5) + +network.add("Bus", + "Frankfurt heat", + carrier="heat") + +network.add("Load", + "Frankfurt heat", + bus="Frankfurt heat", + p_set=3) + +network.add("Bus", + "Frankfurt gas", + carrier="gas") + +network.add("Store", + "Frankfurt gas", + e_initial=1e6, + e_nom=1e6, + bus="Frankfurt gas") + +network.add("Link", + "OCGT", + bus0="Frankfurt gas", + bus1="Frankfurt", + p_nom_extendable=True, + capital_cost=600, + efficiency=0.4) + + +network.add("Link", + "CHP", + bus0="Frankfurt gas", + bus1="Frankfurt", + bus2="Frankfurt heat", + p_nom_extendable=True, + capital_cost=1400, + efficiency=0.3, + efficiency2=0.3) + +network.lopf() + +network.loads_t.p + +network.links_t.p0 + +network.links_t.p1 + +network.links_t.p2 + diff --git a/examples/unit-commitment.py b/examples/unit-commitment.py index 68540e7a4..9f4de05e0 100644 --- a/examples/unit-commitment.py +++ b/examples/unit-commitment.py @@ -239,4 +239,3 @@ nu.generators.initial_status nu.generators.loc["coal"] - diff --git a/pypsa/__init__.py b/pypsa/__init__.py index 3c707fde7..4a1892632 100644 --- a/pypsa/__init__.py +++ b/pypsa/__init__.py @@ -1,7 +1,7 @@ -## Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David -## Schlachtberger (FIAS) +## Copyright 2015-2019 Tom Brown (KIT, FIAS), Jonas Hoersch (KIT, +## FIAS), David Schlachtberger (FIAS) ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as @@ -25,14 +25,11 @@ from __future__ import absolute_import -from . import components +from . import components, descriptors from . import pf, opf, plot, networkclustering, io, contingency, geo from .components import Network, SubNetwork -import logging -logging.basicConfig(level=logging.INFO) - -__version__ = "0.11.0" -__author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)" -__copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS), GNU GPL 3" +__version__ = "0.13.2" +__author__ = "Tom Brown (KIT, FIAS), Jonas Hoersch (KIT, FIAS), David Schlachtberger (FIAS)" +__copyright__ = "Copyright 2015-2019 Tom Brown (KIT, FIAS), Jonas Hoersch (KIT, FIAS), David Schlachtberger (FIAS), GNU GPL 3" diff --git a/pypsa/component_attrs/generators.csv b/pypsa/component_attrs/generators.csv index 2d0b27510..aa806f9ab 100644 --- a/pypsa/component_attrs/generators.csv +++ b/pypsa/component_attrs/generators.csv @@ -8,12 +8,12 @@ p_nom_extendable,boolean,n/a,False,Switch to allow capacity p_nom to be extended p_nom_min,float,MW,0.,"If p_nom is extendable in OPF, set its minimum value.",Input (optional) p_nom_max,float,MW,inf,"If p_nom is extendable in OPF, set its maximum value (e.g. limited by technical potential).",Input (optional) p_min_pu,static or series,per unit,0.,"The minimum output for each snapshot per unit of p_nom for the OPF (e.g. for variable renewable generators this can change due to weather conditions and compulsory feed-in; for conventional generators it represents a minimal dispatch). Note that if comittable is False and p_min_pu > 0, this represents a must-run condition.",Input (optional) -p_max_pu,static or series,per unit,1.,"The maximum output for each snapshot per unit of p_nom for the OPF (e.g. for varialbe renewable generators this can change due to weather conditions; for conventional generators it represents a maximum dispatch).",Input (optional) +p_max_pu,static or series,per unit,1.,"The maximum output for each snapshot per unit of p_nom for the OPF (e.g. for variable renewable generators this can change due to weather conditions; for conventional generators it represents a maximum dispatch).",Input (optional) p_set,static or series,MW,0.,active power set point (for PF),Input (optional) q_set,static or series,MVar,0.,reactive power set point (for PF),Input (optional) sign,float,n/a,1.,power sign,Input (optional) carrier,string,n/a,n/a,"Prime mover energy carrier (e.g. coal, gas, wind, solar); required for global constraints on primary energy in OPF",Input (optional) -marginal_cost,float,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) +marginal_cost,static or series,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) capital_cost,float,currency/MW,0.,"Capital cost of extending p_nom by 1 MW.",Input (optional) efficiency,float,per unit,1.,"Ratio between primary energy and electrical energy, e.g. takes value 0.4 MWh_elec/MWh_thermal for gas. This is required for global constraints on primary energy in OPF.",Input (optional) committable,boolean,n/a,False,Use unit commitment (only possible if p_nom is not extendable).,Input (optional) diff --git a/pypsa/component_attrs/lines.csv b/pypsa/component_attrs/lines.csv index 67996ec73..44fbc3274 100644 --- a/pypsa/component_attrs/lines.csv +++ b/pypsa/component_attrs/lines.csv @@ -11,7 +11,8 @@ s_nom,float,MVA,0.,Limit of apparent power which can pass through branch.,Input s_nom_extendable,boolean,n/a,False,Switch to allow capacity s_nom to be extended in OPF.,Input (optional) s_nom_min,float,MVA,0.,"If s_nom is extendable in OPF, set its minimum value.",Input (optional) s_nom_max,float,MVA,inf,"If s_nom is extendable in OPF, set its maximum value (e.g. limited by potential).",Input (optional) -s_nom_total, float,MVA,inf,"If s_nom is reduced by a branch-capacity factor, this shows its total installed capacity.",Input (optional) +s_max_pu,static or series,per unit,1.,"The maximum allowed absolute flow per unit of s_nom for the OPF (e.g. can be set <1 to approximate n-1 factor, or can be time-varying to represent weather-dependent dynamic line rating for overhead lines).",Input (optional) +s_nom_total,float,MVA,inf,If s_nom is reduced by a branch-capacity factor, this shows its total installed capacity. capital_cost,float,currency/MVA,0.,"Capital cost of extending s_nom by 1 MVA.",Input (optional) length,float,km,0.,"Length of line used when ""type"" is set, also useful for calculating the capital cost.",Input (optional) terrain_factor,float,per unit,1.,"Terrain factor for increasing capital cost.",Input (optional) @@ -27,6 +28,8 @@ x_pu,float,per unit,0.,Per unit series reactance calculated by PyPSA from x and r_pu,float,per unit,0.,Per unit series resistance calculated by PyPSA from r and bus.v_nom,Output g_pu,float,per unit,0.,Per unit shunt conductivity calculated by PyPSA from g and bus.v_nom,Output b_pu,float,per unit,0.,Per unit shunt susceptance calculated by PyPSA from b and bus.v_nom,Output +x_pu_eff,float,per unit,0.,"Effective per unit series reactance for linear power flow, calculated by PyPSA from x, tap_ratio for transformers and bus.v_nom.",Output +r_pu_eff,float,per unit,0.,"Effective per unit series resistance for linear power flow, calculated by PyPSA from x, tap_ratio for transformers and bus.v_nom.",Output s_nom_opt,float,MVA,0.,Optimised capacity for apparent power.,Output mu_lower,series,currency/MVA,0.,Shadow price of lower s_nom limit -F \leq f. Always non-negative.,Output mu_upper,series,currency/MVA,0.,Shadow price of upper s_nom limit f \leq F. Always non-negative.,Output diff --git a/pypsa/component_attrs/links.csv b/pypsa/component_attrs/links.csv index 4611041cd..a8cd131a8 100644 --- a/pypsa/component_attrs/links.csv +++ b/pypsa/component_attrs/links.csv @@ -12,7 +12,7 @@ p_set,static or series,MW,0.,"The dispatch set point for p0 of the link in PF.", p_min_pu,static or series,per unit of p_nom,0.,"Minimal dispatch (can also be negative) per unit of p_nom for the link in OPF.",Input (optional) p_max_pu,static or series,per unit of p_nom,1.,"Maximal dispatch (can also be negative) per unit of p_nom for the link in OPF.",Input (optional) capital_cost,float,currency/MW,0.,"Capital cost of extending p_nom by 1 MW.",Input (optional) -marginal_cost,float,currency/MWh,0.,"Marginal cost of transfering 1 MWh (before efficiency losses) from bus0 to bus1. NB: marginal cost only makes sense in OPF if p_max_pu >= 0.",Input (optional) +marginal_cost,static or series,currency/MWh,0.,"Marginal cost of transfering 1 MWh (before efficiency losses) from bus0 to bus1. NB: marginal cost only makes sense in OPF if p_max_pu >= 0.",Input (optional) length,float,km,0.,"Length of line, useful for calculating the capital cost.",Input (optional) terrain_factor,float,per unit,1.,"Terrain factor for increasing capital cost.",Input (optional) p0,series,MW,0.,Active power at bus0 (positive if branch is withdrawing power from bus0).,Output diff --git a/pypsa/component_attrs/storage_units.csv b/pypsa/component_attrs/storage_units.csv index c306895f5..64962f0ff 100644 --- a/pypsa/component_attrs/storage_units.csv +++ b/pypsa/component_attrs/storage_units.csv @@ -13,7 +13,7 @@ p_set,static or series,MW,0.,active power set point (for PF),Input (optional) q_set,static or series,MVar,0.,reactive power set point (for PF),Input (optional) sign,float,n/a,1.,power sign,Input (optional) carrier,string,n/a,n/a,"Prime mover energy carrier (e.g. coal, gas, wind, solar); required for global constraints on primary energy in OPF",Input (optional) -marginal_cost,float,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) +marginal_cost,static or series,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) capital_cost,float,currency/MW,0.,"Capital cost of extending p_nom by 1 MW.",Input (optional) state_of_charge_initial,float,MWh,0.,State of charge before the snapshots in the OPF.,Input (optional) state_of_charge_set,static or series,MWh,NaN,State of charge set points for snapshots in the OPF.,Input (optional) @@ -27,4 +27,4 @@ p,series,MW,0.,active power at bus (positive if net generation),Output q,series,MVar,0.,reactive power (positive if net generation),Output state_of_charge,series,MWh,NaN,State of charge as calculated by the OPF.,Output spill,series,MW,0.,Spillage for each snapshot.,Output -p_nom_opt,float,MW,0.,Optimised nominal power.,Output \ No newline at end of file +p_nom_opt,float,MW,0.,Optimised nominal power.,Output diff --git a/pypsa/component_attrs/stores.csv b/pypsa/component_attrs/stores.csv index b67cee434..5d5e032a8 100644 --- a/pypsa/component_attrs/stores.csv +++ b/pypsa/component_attrs/stores.csv @@ -13,7 +13,7 @@ e_cyclic,boolean,n/a,False,"Switch: if True, then e_initial is ignored and the i p_set,static or series,MW,0.,active power set point (for PF),Input (optional) q_set,static or series,MVar,0.,reactive power set point (for PF),Input (optional) sign,float,n/a,1.,power sign,Input (optional) -marginal_cost,float,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) +marginal_cost,static or series,currency/MWh,0.,"Marginal cost of production of 1 MWh.",Input (optional) capital_cost,float,currency/MWh,0.,"Capital cost of extending e_nom by 1 MWh.",Input (optional) standing_loss,float,per unit,0.,Losses per hour to energy.,Input (optional) p,series,MW,0.,active power at bus (positive if net generation),Output diff --git a/pypsa/component_attrs/transformers.csv b/pypsa/component_attrs/transformers.csv index 591da135d..f5698268a 100644 --- a/pypsa/component_attrs/transformers.csv +++ b/pypsa/component_attrs/transformers.csv @@ -12,6 +12,7 @@ s_nom,float,MVA,0.,Limit of apparent power which can pass through branch.,Input s_nom_extendable,boolean,n/a,False,Switch to allow capacity s_nom to be extended in OPF.,Input (optional) s_nom_min,float,MVA,0.,"If s_nom is extendable in OPF, set its minimum value.",Input (optional) s_nom_max,float,MVA,inf,"If s_nom is extendable in OPF, set its maximum value (e.g. limited by potential).",Input (optional) +s_max_pu,static or series,per unit,1.,"The maximum allowed absolute flow per unit of s_nom for the OPF.",Input (optional) capital_cost,float,currency/MVA,0.,"Capital cost of extending s_nom by 1 MVA.",Input (optional) num_parallel,float,n/a,1,"When ""type"" is set, this is the number of parallel transformers (can also be fractional). If ""type"" is empty """" this value is ignored.",Input (optional) tap_ratio,float,per unit,1.,"Ratio of per unit voltages at each bus for tap changed. Ignored if type defined.",Input (optional) @@ -29,6 +30,8 @@ x_pu,float,per unit,0.,Per unit series reactance calculated by PyPSA from x and r_pu,float,per unit,0.,Per unit series resistance calculated by PyPSA from r and bus.v_nom,Output g_pu,float,per unit,0.,Per unit shunt conductivity calculated by PyPSA from g and bus.v_nom,Output b_pu,float,per unit,0.,Per unit shunt susceptance calculated by PyPSA from b and bus.v_nom,Output +x_pu_eff,float,per unit,0.,"Effective per unit series reactance for linear power flow, calculated by PyPSA from x, tap_ratio for transformers and bus.v_nom.",Output +r_pu_eff,float,per unit,0.,"Effective per unit series resistance for linear power flow, calculated by PyPSA from x, tap_ratio for transformers and bus.v_nom.",Output s_nom_opt,float,MVA,0.,Optimised capacity for apparent power.,Output mu_lower,series,currency/MVA,0.,Shadow price of lower s_nom limit -F \leq f. Always non-negative.,Output mu_upper,series,currency/MVA,0.,Shadow price of upper s_nom limit f \leq F. Always non-negative.,Output \ No newline at end of file diff --git a/pypsa/components.csv b/pypsa/components.csv index 0516c8f30..17b53b8c9 100644 --- a/pypsa/components.csv +++ b/pypsa/components.csv @@ -1,16 +1,16 @@ -component,list_name,description +component,list_name,description,type Network,networks,"Container for all components and functions which act upon the whole network." -SubNetwork,sub_networks +SubNetwork,sub_networks,"Subsets of buses and passive branches (i.e. lines and transformers) that are connected (i.e. synchronous areas)." Bus,buses,"Electrically fundamental node where x-port objects attach." Carrier,carriers,"Energy carrier, such as AC, DC, heat, wind, PV or coal. Buses have direct carriers and Generators indicate their primary energy carriers. The Carrier can track properties relevant for global constraints, such as CO2 emissions." GlobalConstraint,global_constraints,"Constraints for OPF that affect many components, such as CO2 emission constraints." -Line,lines,"Lines include distribution and transmission lines, overhead lines and cables." -LineType,line_types,"Standard line types with per length values for impedances." -Transformer,transformers,"2-winding transformer." -TransformerType,transformer_types,"Standard 2-winding transformer types." -Link,links,"Link between two buses with controllable active power - can be used for a transport power flow model OR as a simplified version of point-to-point DC connection OR as a lossy energy converter. NB: for a lossless bi-directional HVDC or transport link, set p_min_pu = -1 and efficiency = 1. NB: It is assumed that the links neither produce nor consume reactive power." -Load,loads,"PQ power consumer." -Generator,generators,"Power generator." -StorageUnit,storage_units,"Storage unit with fixed nominal-energy-to-nominal-power ratio." -Store,stores,"Generic store, whose capacity may be optimised." -ShuntImpedance,shunt_impedances,"Shunt y = g + jb." +Line,lines,"Lines include distribution and transmission lines, overhead lines and cables.",passive_branch +LineType,line_types,"Standard line types with per length values for impedances.",standard_type +Transformer,transformers,"2-winding transformer.",passive_branch +TransformerType,transformer_types,"Standard 2-winding transformer types.",standard_type +Link,links,"Link between two buses with controllable active power - can be used for a transport power flow model OR as a simplified version of point-to-point DC connection OR as a lossy energy converter. NB: for a lossless bi-directional HVDC or transport link, set p_min_pu = -1 and efficiency = 1. NB: It is assumed that the links neither produce nor consume reactive power.",controllable_branch +Load,loads,"PQ power consumer.",controllable_one_port +Generator,generators,"Power generator.",controllable_one_port +StorageUnit,storage_units,"Storage unit with fixed nominal-energy-to-nominal-power ratio.",controllable_one_port +Store,stores,"Generic store, whose capacity may be optimised.",controllable_one_port +ShuntImpedance,shunt_impedances,"Shunt y = g + jb.",passive_one_port diff --git a/pypsa/components.py b/pypsa/components.py index e6169e75f..8a19d46fc 100644 --- a/pypsa/components.py +++ b/pypsa/components.py @@ -1,6 +1,6 @@ -## Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS) +## Copyright 2015-2018 Tom Brown (FIAS), Jonas Hoersch (FIAS) ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as @@ -52,6 +52,7 @@ from .io import (export_to_csv_folder, import_from_csv_folder, export_to_hdf5, import_from_hdf5, + export_to_netcdf, import_from_netcdf, import_from_pypower_ppc, import_components_from_dataframe, import_series_from_dataframe, import_from_pandapower_net) @@ -88,6 +89,19 @@ inf = float("inf") +components = pd.read_csv(os.path.join(dir_name, + "components.csv"), + index_col=0) + +component_attrs = Dict() + +for component in components.index: + file_name = os.path.join(dir_name, + component_attrs_dir_name, + components.at[component,"list_name"] + ".csv") + component_attrs[component] = pd.read_csv(file_name, index_col=0, na_values="n/a") + +del component class Basic(object): """Common to every object.""" @@ -125,12 +139,22 @@ class Network(Basic): Parameters ---------- - csv_folder_name : string - Name of folder from which to import CSVs of network data. + import_name : string + Name of HDF5 .h5 store or folder from which to import CSVs of network data. name : string, default "" Network name. ignore_standard_types : boolean, default False If True, do not read in PyPSA standard types into standard types DataFrames. + csv_folder_name : string + Name of folder from which to import CSVs of network data. Overrides import_name. + override_components : pandas.DataFrame + If you want to override the standard PyPSA components in pypsa.components.components, + pass it a DataFrame with index of component name and columns of list_name and + description, following the format of pypsa.components.components. + See git repository examples/new_components/. + override_component_attrs : pypsa.descriptors.Dict of pandas.DataFrame + If you want to override pypsa.component_attrs, follow its format. + See git repository examples/new_components/. kwargs Any remaining attributes to set @@ -140,7 +164,8 @@ class Network(Basic): Examples -------- - >>> nw = pypsa.Network(csv_folder_name=/my/folder) + >>> nw1 = pypsa.Network("my_store.h5") + >>> nw2 = pypsa.Network("/my/folder") """ @@ -157,6 +182,10 @@ class Network(Basic): export_to_hdf5 = export_to_hdf5 + import_from_netcdf = import_from_netcdf + + export_to_netcdf = export_to_netcdf + import_from_pypower_ppc = import_from_pypower_ppc import_from_pandapower_net = import_from_pandapower_net @@ -189,11 +218,20 @@ class Network(Basic): adjacency_matrix = adjacency_matrix - def __init__(self, csv_folder_name=None, name="", ignore_standard_types=False, **kwargs): + def __init__(self, import_name=None, name="", ignore_standard_types=False, + override_components=None, override_component_attrs=None, + **kwargs): + + if 'csv_folder_name' in kwargs: + logger.warning("The argument csv_folder_name for initiating Network() is deprecated, please use import_name instead.") + import_name = kwargs.pop('csv_folder_name') + + # Initialise root logger and set its level, if this has not been done before + logging.basicConfig(level=logging.INFO) - from .__init__ import __version__ as pypsa_version + from . import __version__ as pypsa_version - Basic.__init__(self,name) + Basic.__init__(self, name) #this will be saved on export self.pypsa_version = pypsa_version @@ -204,18 +242,31 @@ def __init__(self, csv_folder_name=None, name="", ignore_standard_types=False, * #corresponds to number of hours represented by each snapshot self.snapshot_weightings = pd.Series(index=self.snapshots,data=1.) - components = pd.read_csv(os.path.join(dir_name, - "components.csv"), - index_col=0) + if override_components is None: + self.components = components + else: + self.components = override_components - self.components = Dict(components.T.to_dict()) + if override_component_attrs is None: + self.component_attrs = component_attrs + else: + self.component_attrs = override_component_attrs - for component in self.components.keys(): - file_name = os.path.join(dir_name, - component_attrs_dir_name, - self.components[component]["list_name"] + ".csv") + for c_type in set(self.components.type.unique()) - {np.nan}: + setattr(self, c_type + "_components", + set(self.components.index[self.components.type == c_type])) + + self.one_port_components = self.passive_one_port_components|self.controllable_one_port_components - attrs = pd.read_csv(file_name, index_col=0, na_values="n/a") + self.branch_components = self.passive_branch_components|self.controllable_branch_components + + self.all_components = set(self.components.index) - {"Network"} + + self.components = Dict(self.components.T.to_dict()) + + for component in self.components: + #make copies to prevent unexpected sharing of variables + attrs = self.component_attrs[component].copy() attrs['static'] = (attrs['type'] != 'series') attrs['varying'] = attrs['type'].isin({'series', 'static or series'}) @@ -239,10 +290,13 @@ def __init__(self, csv_folder_name=None, name="", ignore_standard_types=False, * if not ignore_standard_types: self.read_in_default_standard_types() - - if csv_folder_name is not None: - self.import_from_csv_folder(csv_folder_name) - + if import_name is not None: + if import_name[-3:] == ".h5": + self.import_from_hdf5(import_name) + elif import_name[-3:] == ".nc": + self.import_from_netcdf(import_name) + else: + self.import_from_csv_folder(import_name) for key, value in iteritems(kwargs): setattr(self, key, value) @@ -251,7 +305,7 @@ def __init__(self, csv_folder_name=None, name="", ignore_standard_types=False, * def _build_dataframes(self): """Function called when network is created to build component pandas.DataFrames.""" - for component in all_components: + for component in self.all_components: attrs = self.components[component]["attrs"] @@ -275,7 +329,7 @@ def _build_dataframes(self): def read_in_default_standard_types(self): - for std_type in standard_types: + for std_type in self.standard_type_components: list_name = self.components[std_type]["list_name"] @@ -344,7 +398,7 @@ def set_snapshots(self,snapshots): if isinstance(snapshots, pd.DatetimeIndex) and _pd_version < '0.18.0': snapshots = pd.Index(snapshots.values) - for component in all_components: + for component in self.all_components: pnl = self.pnl(component) attrs = self.components[component]["attrs"] @@ -376,19 +430,14 @@ def add(self, class_name, name, **kwargs): """ - if class_name not in self.components: - logger.error("Component class {} not found".format(class_name)) - return None + assert class_name in self.components, "Component class {} not found".format(class_name) cls_df = self.df(class_name) cls_pnl = self.pnl(class_name) name = str(name) - if name in cls_df.index: - logger.error("Failed to add {} component {} because there is already an object with this name in {}".format(class_name, name, self.components[class_name]["list_name"])) - return - + assert name not in cls_df.index, "Failed to add {} component {} because there is already an object with this name in {}".format(class_name, name, self.components[class_name]["list_name"]) attrs = self.components[class_name]["attrs"] @@ -396,7 +445,7 @@ def add(self, class_name, name, **kwargs): #This guarantees that the correct attribute type is maintained obj_df = pd.DataFrame(data=[static_attrs.default],index=[name],columns=static_attrs.index) - new_df = cls_df.append(obj_df) + new_df = cls_df.append(obj_df, sort=False) setattr(self, self.components[class_name]["list_name"], new_df) @@ -551,6 +600,20 @@ def mremove(self, class_name, names): df.drop(df.columns.intersection(names), axis=1, inplace=True) + def _retrieve_overridden_components(self): + + components_index = list(self.components.keys()) + + cols = ["list_name","description","type"] + + override_components = pd.DataFrame([[self.components[i][c] for c in cols] for i in components_index], + columns=cols, + index=components_index) + + override_component_attrs = Dict({i : self.component_attrs[i].copy() for i in components_index}) + + return override_components, override_component_attrs + def copy(self, with_time=True, ignore_standard_types=False): """ @@ -574,12 +637,16 @@ def copy(self, with_time=True, ignore_standard_types=False): """ - network = self.__class__(ignore_standard_types=ignore_standard_types) + override_components, override_component_attrs = self._retrieve_overridden_components() - for component in self.iterate_components(["Bus", "Carrier"] + sorted(all_components - {"Bus","Carrier"})): + network = self.__class__(ignore_standard_types=ignore_standard_types, + override_components=override_components, + override_component_attrs=override_component_attrs) + + for component in self.iterate_components(["Bus", "Carrier"] + sorted(self.all_components - {"Bus","Carrier"})): df = component.df #drop the standard types to avoid them being read in twice - if not ignore_standard_types and component.name in standard_types: + if not ignore_standard_types and component.name in self.standard_type_components: df = component.df.drop(network.components[component.name]["standard_types"].index) import_components_from_dataframe(network, df, component.name) @@ -630,31 +697,32 @@ def __getitem__(self, key): else: time_i = slice(None) - n = self.__class__() + override_components, override_component_attrs = self._retrieve_overridden_components() + n = self.__class__(override_components=override_components, override_component_attrs=override_component_attrs) n.import_components_from_dataframe( pd.DataFrame(self.buses.ix[key]).assign(sub_network=""), "Bus" ) buses_i = n.buses.index - rest_components = all_components - standard_types - one_port_components - branch_components + rest_components = self.all_components - self.standard_type_components - self.one_port_components - self.branch_components for c in rest_components - {"Bus", "SubNetwork"}: n.import_components_from_dataframe(pd.DataFrame(self.df(c)), c) - for c in standard_types: + for c in self.standard_type_components: df = self.df(c).drop(self.components[c]["standard_types"].index) n.import_components_from_dataframe(pd.DataFrame(df), c) - for c in one_port_components: + for c in self.one_port_components: df = self.df(c).loc[lambda df: df.bus.isin(buses_i)] n.import_components_from_dataframe(pd.DataFrame(df), c) - for c in branch_components: + for c in self.branch_components: df = self.df(c).loc[lambda df: df.bus0.isin(buses_i) & df.bus1.isin(buses_i)] n.import_components_from_dataframe(pd.DataFrame(df), c) n.set_snapshots(self.snapshots[time_i]) - for c in all_components: + for c in self.all_components: i = n.df(c).index try: npnl = n.pnl(c) @@ -677,23 +745,23 @@ def __getitem__(self, key): #beware, this turns bools like s_nom_extendable into objects because of #presence of links without s_nom_extendable def branches(self): - return pd.concat((self.df(c) for c in branch_components), - keys=branch_components) + return pd.concat((self.df(c) for c in self.branch_components), + keys=self.branch_components, sort=False) def passive_branches(self): - return pd.concat((self.df(c) for c in passive_branch_components), - keys=passive_branch_components) + return pd.concat((self.df(c) for c in self.passive_branch_components), + keys=self.passive_branch_components, sort=False) def controllable_branches(self): - return pd.concat((self.df(c) for c in controllable_branch_components), - keys=controllable_branch_components) + return pd.concat((self.df(c) for c in self.controllable_branch_components), + keys=self.controllable_branch_components, sort=False) def determine_network_topology(self): """ Build sub_networks from topology. """ - adjacency_matrix = self.adjacency_matrix(passive_branch_components) + adjacency_matrix = self.adjacency_matrix(self.passive_branch_components) n_components, labels = csgraph.connected_components(adjacency_matrix, directed=False) # remove all old sub_networks @@ -722,12 +790,12 @@ def determine_network_topology(self): self.buses.loc[:, "sub_network"] = labels.astype(str) - for c in self.iterate_components(passive_branch_components): + for c in self.iterate_components(self.passive_branch_components): c.df["sub_network"] = c.df.bus0.map(self.buses["sub_network"]) def iterate_components(self, components=None, skip_empty=True): if components is None: - components = all_components + components = self.all_components return (Component(name=c, list_name=self.components[c]["list_name"], @@ -751,13 +819,13 @@ def consistency_check(self): """ - for c in self.iterate_components(one_port_components): + for c in self.iterate_components(self.one_port_components): missing = c.df.index[~c.df.bus.isin(self.buses.index)] if len(missing) > 0: logger.warning("The following %s have buses which are not defined:\n%s", c.list_name, missing) - for c in self.iterate_components(branch_components): + for c in self.iterate_components(self.branch_components): for attr in ["bus0","bus1"]: missing = c.df.index[~c.df[attr].isin(self.buses.index)] if len(missing) > 0: @@ -765,7 +833,7 @@ def consistency_check(self): c.list_name, attr, missing) - for c in self.iterate_components(passive_branch_components): + for c in self.iterate_components(self.passive_branch_components): for attr in ["x","r"]: bad = c.df.index[c.df[attr] == 0.] if len(bad) > 0: @@ -785,7 +853,7 @@ def consistency_check(self): c.list_name, bad) - for c in self.iterate_components(all_components): + for c in self.iterate_components(self.all_components): for attr in c.attrs.index[c.attrs.varying & c.attrs.static]: attr_df = c.pnl[attr] @@ -806,7 +874,7 @@ def consistency_check(self): static_attrs = ['p_nom', 's_nom', 'e_nom'] varying_attrs = ['p_max_pu', 'e_max_pu'] - for c in self.iterate_components(all_components - {'TransformerType'}): + for c in self.iterate_components(self.all_components - {'TransformerType'}): varying_attr = c.attrs.index[c.attrs.varying].intersection(varying_attrs) static_attr = c.attrs.index[c.attrs.static].intersection(static_attrs) @@ -919,7 +987,7 @@ def transformers_i(self): def branches_i(self): types = [] names = [] - for c in self.iterate_components(passive_branch_components): + for c in self.iterate_components(self.network.passive_branch_components): types += len(c.ind) * [c.name] names += list(c.ind) return pd.MultiIndex.from_arrays([types, names], names=('type', 'name')) @@ -969,17 +1037,3 @@ def iterate_components(self, components=None, skip_empty=True): c = Component(*c[:-1], ind=getattr(self, c.list_name + '_i')()) if not (skip_empty and len(c.ind) == 0): yield c - - -standard_types = {"LineType", "TransformerType"} - -passive_one_port_components = {"ShuntImpedance"} -controllable_one_port_components = {"Load", "Generator", "StorageUnit", "Store"} -one_port_components = passive_one_port_components|controllable_one_port_components - -passive_branch_components = {"Line", "Transformer"} -controllable_branch_components = {"Link"} -branch_components = passive_branch_components|controllable_branch_components - -#i.e. everything except "Network" -all_components = branch_components|one_port_components|standard_types|{"Bus", "SubNetwork", "Carrier", "GlobalConstraint"} diff --git a/pypsa/contingency.py b/pypsa/contingency.py index d2198a984..ba688a11a 100644 --- a/pypsa/contingency.py +++ b/pypsa/contingency.py @@ -103,8 +103,6 @@ def network_lpf_contingency(network, snapshots=None, branch_outages=None): """ - from .components import passive_branch_components - if snapshots is None: snapshots = network.snapshots @@ -126,7 +124,7 @@ def network_lpf_contingency(network, snapshots=None, branch_outages=None): p0_base = pd.Series(index=passive_branches.index) - for c in passive_branch_components: + for c in network.passive_branch_components: pnl = network.pnl(c) p0_base[c] = pnl.p0.loc[snapshot] diff --git a/pypsa/descriptors.py b/pypsa/descriptors.py index cb9ee9247..fe7dc9cc4 100644 --- a/pypsa/descriptors.py +++ b/pypsa/descriptors.py @@ -1,6 +1,6 @@ -## Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS) +## Copyright 2015-2018 Tom Brown (FIAS), Jonas Hoersch (FIAS) ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as @@ -37,14 +37,13 @@ from weakref import WeakKeyDictionary from collections import OrderedDict +from itertools import repeat import networkx as nx import pandas as pd import numpy as np import re -import inspect - import logging logger = logging.getLogger(__name__) @@ -90,6 +89,13 @@ def __init__(self, data=None, **attr): else: raise ImportError("NetworkX version {} is too old. At least 1.10 is needed.".format(nx.__version__)) +if _nx_version >= '2.0': + def degree(G): + return G.degree() +else: + def degree(G): + return G.degree_iter() + class Dict(dict): """ Dict is a subclass of dict, which allows you to get AND SET @@ -154,6 +160,8 @@ def get_switchable_as_dense(network, component, attr, snapshots=None, inds=None) network : pypsa.Network component : string Component object name, e.g. 'Generator' or 'Link' + attr : string + Attribute name snapshots : pandas.Index Restrict to these snapshots rather than network.snapshots. inds : pandas.Index @@ -173,6 +181,7 @@ def get_switchable_as_dense(network, component, attr, snapshots=None, inds=None) pnl = network.pnl(component) index = df.index + varying_i = pnl[attr].columns fixed_i = df.index.difference(varying_i) @@ -186,7 +195,63 @@ def get_switchable_as_dense(network, component, attr, snapshots=None, inds=None) pd.DataFrame(np.repeat([df.loc[fixed_i, attr].values], len(snapshots), axis=0), index=snapshots, columns=fixed_i), pnl[attr].loc[snapshots, varying_i] - ], axis=1).reindex(columns=index)) + ], axis=1, sort=False).reindex(columns=index)) + +def get_switchable_as_iter(network, component, attr, snapshots, inds=None): + """ + Return an iterator over snapshots for a time-varying component + attribute with values for all non-time-varying components filled + in with the default values for the attribute. + + Parameters + ---------- + network : pypsa.Network + component : string + Component object name, e.g. 'Generator' or 'Link' + attr : string + Attribute name + snapshots : pandas.Index + Restrict to these snapshots rather than network.snapshots. + inds : pandas.Index + Restrict to these items rather than all of network.{generators,..}.index + + Returns + ------- + pandas.DataFrame + + Examples + -------- + >>> get_switchable_as_iter(network, 'Generator', 'p_max_pu', snapshots) + +""" + + df = network.df(component) + pnl = network.pnl(component) + + index = df.index + varying_i = pnl[attr].columns + fixed_i = df.index.difference(varying_i) + + if inds is not None: + inds = pd.Index(inds) + index = inds.intersection(index) + varying_i = inds.intersection(varying_i) + fixed_i = inds.intersection(fixed_i) + + # Short-circuit only fixed + if len(varying_i) == 0: + return repeat(df.loc[fixed_i, attr], len(snapshots)) + + def is_same_indices(i1, i2): return len(i1) == len(i2) and (i1 == i2).all() + if is_same_indices(fixed_i.append(varying_i), index): + def reindex_maybe(s): return s + else: + def reindex_maybe(s): return s.reindex(index) + + return ( + reindex_maybe(df.loc[fixed_i, attr].append(pnl[attr].loc[sn, varying_i])) + for sn in snapshots + ) def allocate_series_dataframes(network, series): @@ -218,3 +283,23 @@ def allocate_series_dataframes(network, series): for attr in attributes: pnl[attr] = pnl[attr].reindex(columns=df.index, fill_value=network.components[component]["attrs"].at[attr,"default"]) + +def free_output_series_dataframes(network, components=None): + if components is None: + components = network.all_components + + for component in components: + attrs = network.components[component]['attrs'] + pnl = network.pnl(component) + + for attr in attrs.index[attrs['varying'] & (attrs['status'] == 'Output')]: + pnl[attr] = pd.DataFrame(index=network.snapshots, columns=[]) + +def zsum(s, *args, **kwargs): + """ + pandas 0.21.0 changes sum() behavior so that the result of applying sum + over an empty DataFrame is NaN. + + Meant to be set as pd.Series.zsum = zsum. + """ + return 0 if s.empty else s.sum(*args, **kwargs) diff --git a/pypsa/graph.py b/pypsa/graph.py index d56d134c0..938c1a1fb 100644 --- a/pypsa/graph.py +++ b/pypsa/graph.py @@ -23,20 +23,45 @@ from .descriptors import OrderedGraph -def graph(network, branch_components=None, weight=None): - """Build networkx graph.""" +def graph(network, branch_components=None, weight=None, inf_weight=False): + """ + Build NetworkX graph. + + Arguments + --------- + network : Network|SubNetwork + + branch_components : [str] + Components to use as branches. The default are + passive_branch_components in the case of a SubNetwork and + branch_components in the case of a Network. + + weight : str + Branch attribute to use as weight + + inf_weight : bool|float + How to treat infinite weights (default: False). True keeps the infinite + weight. False skips edges with infinite weight. If a float is given it + is used instead. + + Returns + ------- + graph : OrderedGraph + NetworkX graph + """ + from . import components if isinstance(network, components.Network): if branch_components is None: - branch_components = components.branch_components + branch_components = network.branch_components buses_i = network.buses.index elif isinstance(network, components.SubNetwork): if branch_components is None: - branch_components = components.passive_branch_components + branch_components = network.network.passive_branch_components buses_i = network.buses_i() else: - raise TypeError("build_graph must be called with a Network or a SubNetwork") + raise TypeError("graph must be called with a Network or a SubNetwork") graph = OrderedGraph() @@ -51,8 +76,12 @@ def gen_edges(): if weight is None: data = {} else: - data = dict(weight=getattr(branch, weight)) - if np.isinf(data['weight']): continue + data = dict(weight=getattr(branch, weight, 0)) + if np.isinf(data['weight']) and inf_weight is not True: + if inf_weight is False: + continue + else: + data['weight'] = inf_weight yield (branch.bus0, branch.bus1, (c.name, branch.Index), data) @@ -81,16 +110,17 @@ def adjacency_matrix(network, branch_components=None, busorder=None, weights=Non adjacency_matrix : sp.sparse.coo_matrix Directed adjacency matrix """ + from . import components if isinstance(network, components.Network): if branch_components is None: - branch_components = components.branch_components + branch_components = network.branch_components if busorder is None: busorder = network.buses.index elif isinstance(network, components.SubNetwork): if branch_components is None: - branch_components = components.passive_branch_components + branch_components = network.network.passive_branch_components if busorder is None: busorder = network.buses_i() else: @@ -106,7 +136,7 @@ def adjacency_matrix(network, branch_components=None, busorder=None, weights=Non sel = slice(None) no_branches = len(c.df) else: - sel = t.ind + sel = c.ind no_branches = len(c.ind) bus0_inds.append(busorder.get_indexer(c.df.loc[sel, "bus0"])) bus1_inds.append(busorder.get_indexer(c.df.loc[sel, "bus1"])) @@ -146,12 +176,12 @@ def incidence_matrix(network, branch_components=None, busorder=None): if isinstance(network, components.Network): if branch_components is None: - branch_components = components.branch_components + branch_components = network.branch_components if busorder is None: busorder = network.buses.index elif isinstance(network, components.SubNetwork): if branch_components is None: - branch_components = components.passive_branch_components + branch_components = network.network.passive_branch_components if busorder is None: busorder = network.buses_i() else: diff --git a/pypsa/io.py b/pypsa/io.py index f8ae69955..93486caf0 100644 --- a/pypsa/io.py +++ b/pypsa/io.py @@ -16,159 +16,429 @@ """Functions for importing and exporting data. """ - # make the code as Python 3 compatible as possible from __future__ import division, absolute_import -from six import iteritems +from six import iteritems, iterkeys, string_types from six.moves import filter, range - __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), GNU GPL 3" import logging logger = logging.getLogger(__name__) -from textwrap import dedent import os +from textwrap import dedent +from glob import glob import pandas as pd import pypsa import numpy as np - - -def export_to_csv_folder(network, csv_folder_name, encoding=None, export_standard_types=False): +try: + import xarray as xr + has_xarray = True +except ImportError: + has_xarray = False + +class ImpExper(object): + ds = None + + def __enter__(self): + if self.ds is not None: + self.ds = self.ds.__enter__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.finish() + + if self.ds is not None: + self.ds.__exit__(exc_type, exc_val, exc_tb) + + def finish(self): + pass + +class Exporter(ImpExper): + def remove_static(self, list_name): + pass + + def remove_series(self, list_name, attr): + pass + +class Importer(ImpExper): + pass + +class ImporterCSV(Importer): + def __init__(self, csv_folder_name, encoding): + self.csv_folder_name = csv_folder_name + self.encoding = encoding + + assert os.path.isdir(csv_folder_name), "Directory {} does not exist.".format(csv_folder_name) + + def get_attributes(self): + fn = os.path.join(self.csv_folder_name, "network.csv") + if not os.path.isfile(fn): return None + return dict(pd.read_csv(fn, encoding=self.encoding).iloc[0]) + + def get_snapshots(self): + fn = os.path.join(self.csv_folder_name, "snapshots.csv") + return pd.read_csv(fn, index_col=0, encoding=self.encoding, parse_dates=True) + + def get_static(self, list_name): + fn = os.path.join(self.csv_folder_name, list_name + ".csv") + return (pd.read_csv(fn, index_col=0, encoding=self.encoding) + if os.path.isfile(fn) else None) + + def get_series(self, list_name): + for fn in os.listdir(self.csv_folder_name): + if fn.startswith(list_name+"-") and fn.endswith(".csv"): + attr = fn[len(list_name)+1:-4] + df = pd.read_csv(os.path.join(self.csv_folder_name, fn), + index_col=0, encoding=self.encoding, parse_dates=True) + yield attr, df + +class ExporterCSV(Exporter): + def __init__(self, csv_folder_name, encoding): + self.csv_folder_name = csv_folder_name + self.encoding = encoding + + #make sure directory exists + if not os.path.isdir(csv_folder_name): + logger.warning("Directory {} does not exist, creating it" + .format(csv_folder_name)) + os.mkdir(csv_folder_name) + + def save_attributes(self, attrs): + name = attrs.pop('name') + df = pd.DataFrame(attrs, index=pd.Index([name], name='name')) + fn = os.path.join(self.csv_folder_name, "network.csv") + df.to_csv(fn, encoding=self.encoding) + + def save_snapshots(self, snapshots): + fn = os.path.join(self.csv_folder_name, "snapshots.csv") + snapshots.to_csv(fn, encoding=self.encoding) + + def save_static(self, list_name, df): + fn = os.path.join(self.csv_folder_name, list_name + ".csv") + df.to_csv(fn, encoding=self.encoding) + + def save_series(self, list_name, attr, df): + fn = os.path.join(self.csv_folder_name, list_name + "-" + attr + ".csv") + df.to_csv(fn, encoding=self.encoding) + + def remove_static(self, list_name): + fns = glob(os.path.join(self.csv_folder_name, list_name) + "*.csv") + if fns: + for fn in fns: os.unlink(fn) + logger.warning("Stale csv file(s) {} removed".format(', '.join(fns))) + + def remove_series(self, list_name, attr): + fn = os.path.join(self.csv_folder_name, list_name + "-" + attr + ".csv") + if os.path.exists(fn): + os.unlink(fn) + +class ImporterHDF5(Importer): + def __init__(self, path): + self.ds = pd.HDFStore(path, mode='r') + self.index = {} + + def get_attributes(self): + return dict(self.ds["/network"].reset_index().iloc[0]) + + def get_snapshots(self): + return self.ds["/snapshots"] if "/snapshots" in self.ds else None + + def get_static(self, list_name): + if "/" + list_name not in self.ds: + return None + + if self.pypsa_version is None or self.pypsa_version < [0, 13, 1]: + df = self.ds["/" + list_name] + else: + df = self.ds["/" + list_name].set_index('name') + + self.index[list_name] = df.index + return df + + def get_series(self, list_name): + for tab in self.ds: + if tab.startswith('/' + list_name + '_t/'): + attr = tab[len('/' + list_name + '_t/'):] + df = self.ds[tab] + if self.pypsa_version is not None and self.pypsa_version > [0, 13, 0]: + df.columns = self.index[list_name][df.columns] + yield attr, df + +class ExporterHDF5(Exporter): + def __init__(self, path, **kwargs): + self.ds = pd.HDFStore(path, mode='w', **kwargs) + self.index = {} + + def save_attributes(self, attrs): + name = attrs.pop('name') + self.ds.put('/network', + pd.DataFrame(attrs, index=pd.Index([name], name='name')), + format='table', index=False) + + def save_snapshots(self, snapshots): + self.ds.put('/snapshots', snapshots, format='table', index=False) + + def save_static(self, list_name, df): + df.index.name = 'name' + self.index[list_name] = df.index + df = df.reset_index() + self.ds.put('/' + list_name, df, format='table', index=False) + + def save_series(self, list_name, attr, df): + df.columns = self.index[list_name].get_indexer(df.columns) + self.ds.put('/' + list_name + '_t/' + attr, df, format='table', index=False) + +if has_xarray: + class ImporterNetCDF(Importer): + def __init__(self, path): + self.path = path + if isinstance(path, string_types): + self.ds = xr.open_dataset(path) + else: + self.ds = path + + def __enter__(self): + if isinstance(self.path, string_types): + super(ImporterNetCDF, self).__init__() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if isinstance(self.path, string_types): + super(ImporterNetCDF, self).__exit__(exc_type, exc_val, exc_tb) + + def get_attributes(self): + return {attr[len('network_'):]: val + for attr, val in iteritems(self.ds.attrs) + if attr.startswith('network_')} + + def get_snapshots(self): + return self.get_static('snapshots', 'snapshots') + + def get_static(self, list_name, index_name=None): + t = list_name + '_' + i = len(t) + if index_name is None: + index_name = list_name + '_i' + if index_name not in self.ds.coords: + return None + index = self.ds.coords[index_name].to_index().rename('name') + df = pd.DataFrame(index=index) + for attr in iterkeys(self.ds.data_vars): + if attr.startswith(t) and attr[i:i+2] != 't_': + df[attr[i:]] = self.ds[attr].to_pandas() + return df + + def get_series(self, list_name): + t = list_name + '_t_' + for attr in iterkeys(self.ds.data_vars): + if attr.startswith(t): + df = self.ds[attr].to_pandas() + df.index.name = 'name' + df.columns.name = 'name' + yield attr[len(t):], df + + class ExporterNetCDF(Exporter): + def __init__(self, path, least_significant_digit=None): + self.path = path + self.least_significant_digit = least_significant_digit + self.ds = xr.Dataset() + + def save_attributes(self, attrs): + self.ds.attrs.update(('network_' + attr, val) + for attr, val in iteritems(attrs)) + + def save_snapshots(self, snapshots): + snapshots.index.name = 'snapshots' + for attr in snapshots.columns: + self.ds['snapshots_' + attr] = snapshots[attr] + + def save_static(self, list_name, df): + df.index.name = list_name + '_i' + self.ds[list_name + '_i'] = df.index + for attr in df.columns: + self.ds[list_name + '_' + attr] = df[attr] + + def save_series(self, list_name, attr, df): + df.index.name = 'snapshots' + df.columns.name = list_name + '_t_' + attr + '_i' + self.ds[list_name + '_t_' + attr] = df + if self.least_significant_digit is not None: + print(self.least_significant_digit) + self.ds.encoding.update({ + 'zlib': True, + 'least_significant_digit': self.least_significant_digit + }) + + def finish(self): + if self.path is not None: + self.ds.to_netcdf(self.path) + +def _export_to_exporter(network, exporter, basename, export_standard_types=False): """ - Export network and components to a folder of CSVs. + Export to exporter. Both static and series attributes of components are exported, but only if they have non-default values. - If csv_folder_name does not already exist, it is created. - Parameters ---------- - csv_folder_name : string - Name of folder to which to export. - encoding : str, default None - Encoding to use for UTF when reading (ex. 'utf-8'). `List of Python - standard encodings - `_ + exporter : Exporter + Initialized exporter instance + basename : str + Basename, used for logging export_standard_types : boolean, default False If True, then standard types are exported too (upon reimporting you should then set "ignore_standard_types" when initialising the netowrk). - - Examples - -------- - >>> export_to_csv(network,csv_folder_name) - OR - >>> network.export_to_csv(csv_folder_name) """ - #exportable component types #what about None???? - nan is float? - allowed_types = [float,int,str,bool] + list(np.typeDict.values()) - - #make sure directory exists - if not os.path.isdir(csv_folder_name): - logger.warning("Directory {} does not exist, creating it".format(csv_folder_name)) - os.mkdir(csv_folder_name) - + allowed_types = (float,int,bool) + string_types + tuple(np.typeDict.values()) #first export network properties - - columns = [attr for attr in dir(network) if type(getattr(network,attr)) in allowed_types and attr != "name" and attr[:2] != "__"] - index = [network.name] - df = pd.DataFrame(index=index,columns=columns,data = [[getattr(network,col) for col in columns]]) - df.index.name = "name" - - df.to_csv(os.path.join(csv_folder_name,"network.csv"),encoding=encoding) + attrs = dict((attr, getattr(network, attr)) + for attr in dir(network) + if (not attr.startswith("__") and + isinstance(getattr(network,attr), allowed_types))) + exporter.save_attributes(attrs) #now export snapshots - - df = pd.DataFrame(index=network.snapshots) - df["weightings"] = network.snapshot_weightings - df.index.name = "name" - - df.to_csv(os.path.join(csv_folder_name,"snapshots.csv"),encoding=encoding) - - #now export all other components + snapshots = pd.DataFrame(dict(weightings=network.snapshot_weightings), + index=pd.Index(network.snapshots, name="name")) + exporter.save_snapshots(snapshots) exported_components = [] - - for component in pypsa.components.all_components - {"SubNetwork"}: + for component in network.all_components - {"SubNetwork"}: list_name = network.components[component]["list_name"] attrs = network.components[component]["attrs"] + df = network.df(component) pnl = network.pnl(component) - - if not export_standard_types and component in pypsa.components.standard_types: + if not export_standard_types and component in network.standard_type_components: df = df.drop(network.components[component]["standard_types"].index) - - #first do static attributes - filename = os.path.join(csv_folder_name,list_name+".csv") + # first do static attributes df.index.name = "name" if df.empty: - if os.path.exists(filename): - os.unlink(filename) - - fns = [os.path.basename(filename)] - for attr in attrs.index[attrs.varying]: - fn = os.path.join(csv_folder_name,list_name+'-'+attr+'.csv') - if os.path.exists(fn): - os.unlink(fn) - fns.append(os.path.basename(fn)) - - logger.warning("Stale csv file(s) {} removed".format(', '.join(fns))) - + exporter.remove_static(list_name) continue col_export = [] for col in df.columns: - #do not export derived attributes - if col in ["sub_network","r_pu","x_pu","g_pu","b_pu"]: + # do not export derived attributes + if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu"]: continue - if col in attrs.index and pd.isnull(attrs.at[col,"default"]) and pd.isnull(df[col]).all(): + if col in attrs.index and pd.isnull(attrs.at[col, "default"]) and pd.isnull(df[col]).all(): continue if (col in attrs.index and df[col].dtype == attrs.at[col, 'dtype'] - and (df[col] == attrs.at[col,"default"]).all()): + and (df[col] == attrs.at[col, "default"]).all()): continue col_export.append(col) - df[col_export].to_csv(filename,encoding=encoding) - + exporter.save_static(list_name, df[col_export]) #now do varying attributes for attr in pnl: if attr not in attrs.index: col_export = pnl[attr].columns else: - default = attrs.at[attr,"default"] + default = attrs.at[attr, "default"] if pd.isnull(default): col_export = pnl[attr].columns[(~pd.isnull(pnl[attr])).any()] else: col_export = pnl[attr].columns[(pnl[attr] != default).any()] - filename = os.path.join(csv_folder_name,list_name+"-" + attr + ".csv") if len(col_export) > 0: - pnl[attr].loc[:,col_export].to_csv(filename,encoding=encoding) + df = pnl[attr][col_export] + exporter.save_series(list_name, attr, df) else: - if os.path.exists(filename): - os.unlink(filename) - logger.warning("Stale csv file {} removed" - .format(os.path.basename(filename))) + exporter.remove_series(list_name, attr) exported_components.append(list_name) - logger.info("Exported network {} has {}".format(os.path.basename(csv_folder_name), ", ".join(exported_components))) + logger.info("Exported network {} has {}".format(basename, ", ".join(exported_components))) + +def import_from_csv_folder(network, csv_folder_name, encoding=None, skip_time=False): + """ + Import network data from CSVs in a folder. + + The CSVs must follow the standard form, see pypsa/examples. + + Parameters + ---------- + csv_folder_name : string + Name of folder + encoding : str, default None + Encoding to use for UTF when reading (ex. 'utf-8'). `List of Python + standard encodings + `_ + skip_time : bool, default False + Skip reading in time dependent attributes + """ + + basename = os.path.basename(csv_folder_name) + with ImporterCSV(csv_folder_name, encoding=encoding) as importer: + _import_from_importer(network, importer, basename=basename, skip_time=skip_time) + +def export_to_csv_folder(network, csv_folder_name, encoding=None, export_standard_types=False): + """ + Export network and components to a folder of CSVs. + + Both static and series attributes of components are exported, but only + if they have non-default values. + + If csv_folder_name does not already exist, it is created. + + Parameters + ---------- + csv_folder_name : string + Name of folder to which to export. + encoding : str, default None + Encoding to use for UTF when reading (ex. 'utf-8'). `List of Python + standard encodings + `_ + export_standard_types : boolean, default False + If True, then standard types are exported too (upon reimporting you + should then set "ignore_standard_types" when initialising the netowrk). + + Examples + -------- + >>> export_to_csv(network,csv_folder_name) + OR + >>> network.export_to_csv(csv_folder_name) + """ + + basename = os.path.basename(csv_folder_name) + with ExporterCSV(csv_folder_name=csv_folder_name, encoding=encoding) as exporter: + _export_to_exporter(network, exporter, basename=basename, + export_standard_types=export_standard_types) + +def import_from_hdf5(network, path, skip_time=False): + """ + Import network data from HDF5 store at `path`. + + Parameters + ---------- + path : string + Name of HDF5 store + skip_time : bool, default False + Skip reading in time dependent attributes + """ + + basename = os.path.basename(path) + with ImporterHDF5(path) as importer: + _import_from_importer(network, importer, basename=basename, skip_time=skip_time) def export_to_hdf5(network, path, export_standard_types=False, **kwargs): """ @@ -196,154 +466,143 @@ def export_to_hdf5(network, path, export_standard_types=False, **kwargs): kwargs.setdefault('complevel', 4) - with pd.HDFStore(path, mode='w', **kwargs) as store: - #first export network properties + basename = os.path.basename(path) + with ExporterHDF5(path, **kwargs) as exporter: + _export_to_exporter(network, exporter, basename=basename, + export_standard_types=export_standard_types) - #exportable component types - #what about None???? - nan is float? - allowed_types = [float,int,str,bool] + list(np.typeDict.values()) +def import_from_netcdf(network, path, skip_time=False): + """ + Import network data from netCDF file or xarray Dataset at `path`. - columns = [attr for attr in dir(network) - if (attr != "name" and attr[:2] != "__" and - type(getattr(network,attr)) in allowed_types)] - index = pd.Index([network.name], name="name") - store.put('/network', - pd.DataFrame(index=index, columns=columns, - data=[[getattr(network, col) for col in columns]]), - format='table', index=False) + Parameters + ---------- + path : string|xr.Dataset + Path to netCDF dataset or instance of xarray Dataset + skip_time : bool, default False + Skip reading in time dependent attributes + """ - #now export snapshots + assert has_xarray, "xarray must be installed for netCDF support." - store.put('/snapshots', - pd.DataFrame(dict(weightings=network.snapshot_weightings), - index=pd.Index(network.snapshots, name="name")), - format='table', index=False) + basename = os.path.basename(path) if isinstance(path, string_types) else None + with ImporterNetCDF(path=path) as importer: + _import_from_importer(network, importer, basename=basename, + skip_time=skip_time) - #now export all other components +def export_to_netcdf(network, path=None, export_standard_types=False, + least_significant_digit=None): + """Export network and components to a netCDF file. - exported_components = [] - for component in pypsa.components.all_components - {"SubNetwork"}: + Both static and series attributes of components are exported, but only + if they have non-default values. - list_name = network.components[component]["list_name"] - attrs = network.components[component]["attrs"] + If path does not already exist, it is created. - df = network.df(component) - pnl = network.pnl(component) + If no path is passed, no file is exported, but the xarray.Dataset + is still returned. - if not export_standard_types and component in pypsa.components.standard_types: - df = df.drop(network.components[component]["standard_types"].index) + Be aware that this cannot export boolean attributes on the Network + class, e.g. network.my_bool = False is not supported by netCDF. - #first do static attributes - df.index.name = "name" - if df.empty: - continue + Parameters + ---------- + path : string|None + Name of netCDF file to which to export (if it exists, it is overwritten); + if None is passed, no file is exported. + least_significant_digit + This is passed to the netCDF exporter, but currently makes no difference + to file size or float accuracy. We're working on improving this... - col_export = [] - for col in df.columns: - #do not export derived attributes - if col in ["sub_network", "r_pu", "x_pu", "g_pu", "b_pu"]: - continue - if col in attrs.index and pd.isnull(attrs.at[col, "default"]) and pd.isnull(df[col]).all(): - continue - if (col in attrs.index - and df[col].dtype == attrs.at[col, 'dtype'] - and (df[col] == attrs.at[col, "default"]).all()): - continue - - col_export.append(col) - - store.put('/' + list_name, df[col_export], format='table', index=False) - - #now do varying attributes - for attr in pnl: - if attr not in attrs.index: - col_export = pnl[attr].columns - else: - default = attrs.at[attr, "default"] + Returns + ------- + ds : xarray.Dataset - if pd.isnull(default): - col_export = pnl[attr].columns[(~pd.isnull(pnl[attr])).any()] - else: - col_export = pnl[attr].columns[(pnl[attr] != default).any()] + Examples + -------- + >>> export_to_netcdf(network, "my_file.nc") + OR + >>> network.export_to_netcdf("my_file.nc") - df = pnl[attr][col_export] - if not df.empty: - store.put('/' + list_name + '_t/' + attr, df, format='table', index=False) + """ - exported_components.append(list_name) + assert has_xarray, "xarray must be installed for netCDF support." - logger.info("Exported network {} has {}".format(os.path.basename(path), ", ".join(exported_components))) + basename = os.path.basename(path) if path is not None else None + with ExporterNetCDF(path, least_significant_digit) as exporter: + _export_to_exporter(network, exporter, basename=basename, + export_standard_types=export_standard_types) + return exporter.ds -def import_from_hdf5(network, path, skip_time=False): +def _import_from_importer(network, importer, basename, skip_time=False): """ - Import network data from HDF5 store at `path`. + Import network data from importer. Parameters ---------- - path : string - Name of HDF5 store + skip_time : bool + Skip importing time """ - with pd.HDFStore(path, mode='r') as store: - df = store['/network'] - logger.debug("/network") - logger.debug(df) - network.name = df.index[0] + attrs = importer.get_attributes() + + current_pypsa_version = [int(s) for s in network.pypsa_version.split(".")] + pypsa_version = None + + if attrs is not None: + network.name = attrs.pop('name') - ##https://docs.python.org/3/tutorial/datastructures.html#comparing-sequences-and-other-types - current_pypsa_version = [int(s) for s in network.pypsa_version.split(".")] try: - pypsa_version = [int(s) for s in df.at[network.name, 'pypsa_version'].split(".")] - df = df.drop('pypsa_version', axis=1) + pypsa_version = [int(s) for s in attrs.pop("pypsa_version").split(".")] except KeyError: pypsa_version = None - if pypsa_version is None or pypsa_version < current_pypsa_version: - logger.warning(dedent(""" + for attr, val in iteritems(attrs): + setattr(network, attr, val) + + ##https://docs.python.org/3/tutorial/datastructures.html#comparing-sequences-and-other-types + if pypsa_version is None or pypsa_version < current_pypsa_version: + logger.warning(dedent(""" Importing PyPSA from older version of PyPSA than current version {}. Please read the release notes at https://pypsa.org/doc/release_notes.html carefully to prepare your network for import. - """).format(network.pypsa_version)) - - for col in df.columns: - setattr(network, col, df[col][network.name]) + """).format(network.pypsa_version)) - #if there is snapshots.csv, read in snapshot data + importer.pypsa_version = pypsa_version + importer.current_pypsa_version = current_pypsa_version - if '/snapshots' in store: - df = store['/snapshots'] - - network.set_snapshots(df.index) - if "weightings" in df.columns: - network.snapshot_weightings = df["weightings"].reindex(network.snapshots) + # if there is snapshots.csv, read in snapshot data + df = importer.get_snapshots() + if df is not None: + network.set_snapshots(df.index) + if "weightings" in df.columns: + network.snapshot_weightings = df["weightings"].reindex(network.snapshots) - imported_components = [] + imported_components = [] - #now read in other components; make sure buses and carriers come first - for component in ["Bus", "Carrier"] + sorted(pypsa.components.all_components - {"Bus", "Carrier", "SubNetwork"}): - list_name = network.components[component]["list_name"] + # now read in other components; make sure buses and carriers come first + for component in ["Bus", "Carrier"] + sorted(network.all_components - {"Bus", "Carrier", "SubNetwork"}): + list_name = network.components[component]["list_name"] - if '/' + list_name not in store: - if component == "Bus": - logger.error("Error, no buses found") - return - else: - continue + df = importer.get_static(list_name) + if df is None: + if component == "Bus": + logger.error("Error, no buses found") + return + else: + continue - df = store['/' + list_name] - import_components_from_dataframe(network, df, component) + import_components_from_dataframe(network, df, component) - if not skip_time: - for attr in store: - if attr.startswith('/' + list_name + '_t/'): - attr_name = attr[len('/' + list_name + '_t/'):] - import_series_from_dataframe(network, store[attr], component, attr_name) + if not skip_time: + for attr, df in importer.get_series(list_name): + import_series_from_dataframe(network, df, component, attr) - logger.debug(getattr(network,list_name)) + logger.debug(getattr(network,list_name)) - imported_components.append(list_name) + imported_components.append(list_name) - logger.info("Imported network {} has {}".format(os.path.basename(path), ", ".join(imported_components))) + logger.info("Imported network{} has {}".format(" " + basename, ", ".join(imported_components))) def import_components_from_dataframe(network, dataframe, cls_name): """ @@ -404,7 +663,10 @@ def import_components_from_dataframe(network, dataframe, cls_name): cls_name, missing) non_static_attrs_in_df = non_static_attrs.index.intersection(dataframe.columns) - new_df = pd.concat((network.df(cls_name), dataframe.drop(non_static_attrs_in_df, axis=1))) + old_df = network.df(cls_name) + new_df = dataframe.drop(non_static_attrs_in_df, axis=1) + if not old_df.empty: + new_df = pd.concat((old_df, new_df), sort=False) if not new_df.index.is_unique: logger.error("Error, new components for {} are not unique".format(cls_name)) @@ -450,14 +712,14 @@ def import_series_from_dataframe(network, dataframe, cls_name, attr): if len(diff) > 0: logger.warning("Components {} for attribute {} of {} are not in main components dataframe {}".format(diff,attr,cls_name,list_name)) - diff = network.snapshots.difference(dataframe.index) - if len(diff): - logger.warning("Snapshots {} are missing from {} of {}".format(diff,attr,cls_name)) - - attr_series = network.components[cls_name]["attrs"].loc[attr] columns = dataframe.columns + diff = network.snapshots.difference(dataframe.index) + if len(diff): + logger.warning("Snapshots {} are missing from {} of {}. Filling with default value '{}'".format(diff,attr,cls_name,attr_series["default"])) + dataframe = dataframe.reindex(network.snapshots, fill_value=attr_series["default"]) + if not attr_series.static: pnl[attr] = pnl[attr].reindex(columns=df.index|columns, fill_value=attr_series.default) else: @@ -467,95 +729,6 @@ def import_series_from_dataframe(network, dataframe, cls_name, attr): -def import_from_csv_folder(network, csv_folder_name, encoding=None, skip_time=False): - """ - Import network data from CSVs in a folder. - - The CSVs must follow the standard form, see pypsa/examples. - - Parameters - ---------- - csv_folder_name : string - Name of folder - encoding : str, default None - Encoding to use for UTF when reading (ex. 'utf-8'). `List of Python - standard encodings - `_ - skip_time : bool, default False - Skip reading in time dependent attributes - """ - - if not os.path.isdir(csv_folder_name): - logger.error("Directory {} does not exist.".format(csv_folder_name)) - return - - #if there is network.csv, read in network data - - file_name = os.path.join(csv_folder_name,"network.csv") - - if os.path.isfile(file_name): - df = pd.read_csv(file_name,index_col=0,encoding=encoding) - logger.debug("networks.csv:") - logger.debug(df) - network.name = df.index[0] - - ##https://docs.python.org/3/tutorial/datastructures.html#comparing-sequences-and-other-types - current_pypsa_version = [int(s) for s in network.pypsa_version.split(".")] - pypsa_version = None - for col in df.columns: - if col == "pypsa_version": - pypsa_version = [int(s) for s in df.at[network.name,"pypsa_version"].split(".")] - else: - setattr(network,col,df[col][network.name]) - - if pypsa_version is None or pypsa_version < current_pypsa_version: - logger.warning("Importing PyPSA from older version of PyPSA than current version {}.\n\ - Please read the release notes at https://pypsa.org/doc/release_notes.html\n\ - carefully to prepare your network for import.".format(network.pypsa_version)) - - #if there is snapshots.csv, read in snapshot data - - file_name = os.path.join(csv_folder_name,"snapshots.csv") - - if os.path.isfile(file_name): - df = pd.read_csv(file_name, index_col=0, encoding=encoding, parse_dates=True) - network.set_snapshots(df.index) - if "weightings" in df.columns: - network.snapshot_weightings = df["weightings"].reindex(network.snapshots) - - imported_components = [] - - #now read in other components; make sure buses and carriers come first - for component in ["Bus", "Carrier"] + sorted(pypsa.components.all_components - {"Bus","Carrier","SubNetwork"}): - - list_name = network.components[component]["list_name"] - - file_name = os.path.join(csv_folder_name,list_name+".csv") - - if not os.path.isfile(file_name): - if component == "Bus": - logger.error("Error, no buses found") - return - else: - continue - - df = pd.read_csv(file_name,index_col=0,encoding=encoding) - - import_components_from_dataframe(network,df,component) - - if not skip_time: - file_attrs = [n for n in os.listdir(csv_folder_name) if n.startswith(list_name+"-") and n.endswith(".csv")] - - for file_name in file_attrs: - df = pd.read_csv(os.path.join(csv_folder_name,file_name), index_col=0, encoding=encoding, parse_dates=True) - import_series_from_dataframe(network,df,component,file_name[len(list_name)+1:-4]) - - logger.debug(getattr(network,list_name)) - - imported_components.append(list_name) - - logger.info("Imported network {} has {}".format(os.path.basename(csv_folder_name), ", ".join(imported_components))) - def import_from_pypower_ppc(network, ppc, overwrite_zero_s_nom=None): """ Import network from PYPOWER PPC dictionary format version 2. @@ -756,13 +929,13 @@ def import_from_pandapower_net(network, net): "q_set" : -(net.sgen.scaling*net.sgen.q_kvar).values/1e3, "bus" : net.bus.name.loc[net.sgen.bus].values, "control" : "PQ"}, - index=net.sgen.name))) + index=net.sgen.name)), sort=False) d["Generator"] = pd.concat((d["Generator"],pd.DataFrame({"control" : "Slack", "p_set" : 0., "q_set" : 0., "bus" : net.bus.name.loc[net.ext_grid.bus].values}, - index=net.ext_grid.name.fillna("External Grid")))) + index=net.ext_grid.name.fillna("External Grid"))), sort=False) d["Bus"].loc[net.bus.name.loc[net.ext_grid.bus].values,"v_mag_pu_set"] = net.ext_grid.vm_pu.values diff --git a/pypsa/networkclustering.py b/pypsa/networkclustering.py index b69bc7afe..2e58c35c3 100644 --- a/pypsa/networkclustering.py +++ b/pypsa/networkclustering.py @@ -40,22 +40,24 @@ from . import components, io +def _normed(s): + tot = s.sum() + if tot == 0: + return 1. + else: + return s/tot + def _flatten_multiindex(m, join=' '): if m.nlevels <= 1: return m levels = map(m.get_level_values, range(m.nlevels)) return reduce(lambda x, y: x+join+y, levels, next(levels)) -def _consense(x): - v = x.iat[0] - assert ((x == v).all() or x.isnull().all()) - return v - def _make_consense(component, attr): def consense(x): v = x.iat[0] assert ((x == v).all() or x.isnull().all()), ( "In {} cluster {} the values of attribute {} do not agree:\n{}" - .format(component, attr, x.name, x) + .format(component, x.name, attr, x) ) return v return consense @@ -65,40 +67,52 @@ def _haversine(coords): a = np.sin((lat[1]-lat[0])/2.)**2 + np.cos(lat[0]) * np.cos(lat[1]) * np.sin((lon[0] - lon[1])/2.)**2 return 6371.000 * 2 * np.arctan2( np.sqrt(a), np.sqrt(1-a) ) -def aggregategenerators(network, busmap, with_time=True): +def aggregategenerators(network, busmap, with_time=True, carriers=None, custom_strategies=dict()): + if carriers is None: + carriers = network.generators.carrier.unique() + + gens_agg_b = network.generators.carrier.isin(carriers) attrs = network.components["Generator"]["attrs"] - generators = network.generators.assign(bus=lambda df: df.bus.map(busmap)) + generators = (network.generators.loc[gens_agg_b] + .assign(bus=lambda df: df.bus.map(busmap))) columns = (set(attrs.index[attrs.static & attrs.status.str.startswith('Input')]) | {'weight'}) & set(generators.columns) grouper = [generators.bus, generators.carrier] weighting = generators.weight.groupby(grouper, axis=0).transform(lambda x: (x/x.sum()).fillna(1.)) - generators['p_nom_max'] /= weighting - + generators['capital_cost'] *= weighting strategies = {'p_nom_min':np.min,'p_nom_max': np.min, 'weight': np.sum, 'p_nom': np.sum, 'p_nom_opt': np.sum, 'marginal_cost': np.mean, 'capital_cost': np.mean} - strategies.update(zip(columns.difference(strategies), repeat(_consense))) - + strategies.update(custom_strategies) + if strategies['p_nom_max'] is np.min: + generators['p_nom_max'] /= weighting + + strategies.update((attr, _make_consense('Generator', attr)) + for attr in columns.difference(strategies)) new_df = generators.groupby(grouper, axis=0).agg(strategies) new_df.index = _flatten_multiindex(new_df.index).rename("name") + new_df = pd.concat([new_df, + network.generators.loc[~gens_agg_b] + .assign(bus=lambda df: df.bus.map(busmap))], axis=0, sort=False) + new_pnl = dict() if with_time: for attr, df in iteritems(network.generators_t): - if not df.empty: + pnl_gens_agg_b = df.columns.to_series().map(gens_agg_b) + df_agg = df.loc[:, pnl_gens_agg_b] + if not df_agg.empty: if attr == 'p_max_pu': - df = df.multiply(weighting.loc[df.columns], axis=1) - pnl_df = df.groupby(grouper, axis=1).sum() + df_agg = df_agg.multiply(weighting.loc[df_agg.columns], axis=1) + pnl_df = df_agg.groupby(grouper, axis=1).sum() pnl_df.columns = _flatten_multiindex(pnl_df.columns).rename("name") - new_pnl[attr] = pnl_df + new_pnl[attr] = pd.concat([df.loc[:, ~pnl_gens_agg_b], pnl_df], axis=1, sort=False) return new_df, new_pnl -def aggregateoneport(network, busmap, component, with_time=True): - +def aggregateoneport(network, busmap, component, with_time=True, custom_strategies=dict()): attrs = network.components[component]["attrs"] old_df = getattr(network, network.components[component]["list_name"]).assign(bus=lambda df: df.bus.map(busmap)) - - columns = set(attrs.index[attrs.static]) & set(old_df.columns) + columns = set(attrs.index[attrs.static & attrs.status.str.startswith('Input')]) & set(old_df.columns) if ('carrier' in columns and 'max_hours' in columns): grouper = [old_df.bus, old_df.carrier, old_df.max_hours] @@ -107,22 +121,22 @@ def aggregateoneport(network, busmap, component, with_time=True): else: grouper = old_df.bus - strategies = {attr: (np.sum - if attr in {'p', 'q', 'p_set', 'q_set', 'state_of_charge', - 'p_nom', 'p_nom_max', 'p_nom_min', 'p_nom_opt'} - - else np.mean - if attr in {'marginal_cost', 'capital_cost', 'efficiency', - 'efficiency_dispatch', 'standing_loss', 'efficiency_store'} - - else np.min - if attr in {'p_min_pu'} - - else _consense) - + def aggregate_max_hours(max_hours): + if (max_hours == max_hours.iloc[0]).all(): + return max_hours.iloc[0] + else: + return (max_hours * _normed(old_df.p_nom.reindex(max_hours.index))).sum() + default_strategies = dict(p=np.sum, q=np.sum, p_set=np.sum, q_set=np.sum, + p_nom=np.sum, p_nom_max=np.sum, p_nom_min=np.sum) + #max_hours=aggregate_max_hours) + strategies = {attr: default_strategies.get(attr, _make_consense(component, attr)) for attr in columns} - + strategies.update(custom_strategies) + if 'efficiency_dispatch' in columns: + strategies.update({'efficiency_dispatch':np.mean, 'standing_loss':np.mean, 'efficiency_store':np.mean}) + if 'capital_cost' in columns: + strategies.update({'capital_cost':np.mean}) new_df = old_df.groupby(grouper).agg(strategies) if 'max_hours' in columns: @@ -136,14 +150,12 @@ def aggregateoneport(network, busmap, component, with_time=True): new_df.set_index(['bus', 'carrier', 'max_hours']) new_df.index = _flatten_multiindex(new_df.index).rename("name") - new_pnl = dict() if with_time: old_pnl = network.pnl(component) for attr, df in iteritems(old_pnl): if not df.empty: pnl_df = df.groupby(grouper, axis=1).sum() - if 'max_hours' in columns: pnl_df.columns.set_levels(pnl_df.columns.levels[2].astype(object), level=2, inplace=True) pnl_df.columns.set_levels(pnl_df.columns.levels[2].astype(str), level=2, inplace=True) @@ -175,7 +187,7 @@ def aggregatelines(network, buses, interlines, line_length_factor=1.0): positive_order = interlines.bus0_s < interlines.bus1_s interlines_p = interlines[positive_order] interlines_n = interlines[~ positive_order].rename(columns={"bus0_s":"bus1_s", "bus1_s":"bus0_s"}) - interlines_c = pd.concat((interlines_p,interlines_n)) + interlines_c = pd.concat((interlines_p,interlines_n), sort=False) attrs = network.components["Line"]["attrs"] columns = set(attrs.index[attrs.static & attrs.status.str.startswith('Input')]).difference(('name', 'bus0', 'bus1')) @@ -185,8 +197,7 @@ def aggregatelines(network, buses, interlines, line_length_factor=1.0): for attr in (columns | {'sub_network'} - {'r', 'x', 'g', 'b', 'terrain_factor', 's_nom', 's_nom_min', 's_nom_max', 's_nom_extendable', - 'capital_cost', 'length', 'v_ang_min', - 'v_ang_max'}) + 'length', 'v_ang_min', 'v_ang_max'}) } def aggregatelinegroup(l): @@ -215,7 +226,8 @@ def aggregatelinegroup(l): s_nom_min=l['s_nom_min'].sum(), s_nom_max=l['s_nom_max'].sum(), s_nom_extendable=l['s_nom_extendable'].any(), - s_nom_total = l['s_nom_total'].sum(), + s_nom_total=l['s_nom_total'].sum(), + num_parallel=l['num_parallel'].sum(), capital_cost=costs, length=length_s, sub_network=consense['sub_network'](l['sub_network']), @@ -230,7 +242,7 @@ def aggregatelinegroup(l): linemap_p = interlines_p.join(lines['name'], on=['bus0_s', 'bus1_s'])['name'] linemap_n = interlines_n.join(lines['name'], on=['bus0_s', 'bus1_s'])['name'] - linemap = pd.concat((linemap_p,linemap_n)) + linemap = pd.concat((linemap_p,linemap_n), sort=False) return lines, linemap_p, linemap_n, linemap @@ -257,7 +269,9 @@ def get_buses_linemap_and_lines(network, busmap, line_length_factor=1.0, bus_str def get_clustering_from_busmap(network, busmap, with_time=True, line_length_factor=1.0, aggregate_generators_weighted=False, aggregate_one_ports={}, - bus_strategies=dict()): + aggregate_generators_carriers=None, + bus_strategies=dict(), one_port_strategies=dict(), + generator_strategies=dict()): buses, linemap, linemap_p, linemap_n, lines = get_buses_linemap_and_lines(network, busmap, line_length_factor, bus_strategies) @@ -270,11 +284,13 @@ def get_clustering_from_busmap(network, busmap, with_time=True, line_length_fact network_c.set_snapshots(network.snapshots) network_c.snapshot_weightings = network.snapshot_weightings.copy() - one_port_components = components.one_port_components.copy() + one_port_components = network.one_port_components.copy() if aggregate_generators_weighted: one_port_components.remove("Generator") - generators, generators_pnl = aggregategenerators(network, busmap, with_time=with_time) + generators, generators_pnl = aggregategenerators(network, busmap, with_time=with_time, + carriers=aggregate_generators_carriers, + custom_strategies=generator_strategies) io.import_components_from_dataframe(network_c, generators, "Generator") if with_time: for attr, df in iteritems(generators_pnl): @@ -283,7 +299,8 @@ def get_clustering_from_busmap(network, busmap, with_time=True, line_length_fact for one_port in aggregate_one_ports: one_port_components.remove(one_port) - new_df, new_pnl = aggregateoneport(network, busmap, component=one_port, with_time=with_time) + new_df, new_pnl = aggregateoneport(network, busmap, component=one_port, with_time=with_time, + custom_strategies=one_port_strategies.get(one_port, {})) io.import_components_from_dataframe(network_c, new_df, one_port) for attr, df in iteritems(new_pnl): io.import_series_from_dataframe(network_c, df, one_port, attr) @@ -351,11 +368,12 @@ def length_clustering(network, length): from sklearn.cluster import spectral_clustering as sk_spectral_clustering def busmap_by_spectral_clustering(network, n_clusters, **kwds): - lines = network.lines.loc[:,['bus0', 'bus1']].assign(weight=1./network.lines.x).set_index(['bus0','bus1']) - G = OrderedGraph() + lines = network.lines.loc[:,['bus0', 'bus1']].assign(weight=network.lines.num_parallel).set_index(['bus0','bus1']) + lines.weight+=0.1 + G = nx.Graph() G.add_nodes_from(network.buses.index) G.add_edges_from((u,v,dict(weight=w)) for (u,v),w in lines.itertuples()) - return pd.Series(sk_spectral_clustering(nx.adjacency_matrix(G), n_clusters, **kwds) + 1, + return pd.Series(list(map(str,sk_spectral_clustering(nx.adjacency_matrix(G), n_clusters, **kwds) + 1)), index=network.buses.index) def spectral_clustering(network, n_clusters=8, **kwds): @@ -372,19 +390,20 @@ def spectral_clustering(network, n_clusters=8, **kwds): # available using pip as python-louvain import community - def busmap_by_louvain(network, level=-1): - lines = network.lines.loc[:,['bus0', 'bus1']].assign(weight=1./network.lines.x).set_index(['bus0','bus1']) + def busmap_by_louvain(network): + lines = network.lines.loc[:,['bus0', 'bus1']].assign(weight=network.lines.num_parallel).set_index(['bus0','bus1']) + lines.weight+=0.1 G = nx.Graph() G.add_nodes_from(network.buses.index) G.add_edges_from((u,v,dict(weight=w)) for (u,v),w in lines.itertuples()) - dendrogram = community.generate_dendrogram(G) - if level < 0: - level += len(dendrogram) - return pd.Series(community.partition_at_level(dendrogram, level=level), - index=network.buses.index) - - def louvain_clustering(network, level=-1, **kwds): - busmap = busmap_by_louvain(network, level=level) + b=community.best_partition(G) + list_cluster=[] + for i in b: + list_cluster.append(str(b[i])) + return pd.Series(list_cluster,index=network.buses.index) + + def louvain_clustering(network, **kwds): + busmap = busmap_by_louvain(network) return get_clustering_from_busmap(network, busmap) except ImportError: @@ -411,8 +430,11 @@ def busmap_by_kmeans(network, bus_weightings, n_clusters, buses_i=None, load_clu Series of integer weights for buses, indexed by bus names. n_clusters : int Final number of clusters desired. + buses_i : None|pandas.Index + If not None (default), subset of buses to cluster. kwargs - Any remaining arguments to be passed to KMeans (e.g. n_init, n_jobs) + Any remaining arguments to be passed to KMeans (e.g. n_init, + n_jobs). Returns ------- @@ -435,11 +457,12 @@ def busmap_by_kmeans(network, bus_weightings, n_clusters, buses_i=None, load_clu busmap_array = np.loadtxt(load_cluster) kmeans = KMeans(init=busmap_array, n_clusters=n_clusters, n_init=n_init, max_iter=max_iter, tol=tol, n_jobs=n_jobs, ** kwargs) kmeans.fit(points) + else: - kmeans = KMeans(init='k-means++', n_clusters=n_clusters, n_init=n_init, max_iter=max_iter, tol=tol, n_jobs=n_jobs, ** kwargs) + kmeans = KMeans(init='k-means++', n_clusters=n_clusters, ** kwargs) kmeans.fit(points) np.savetxt("cluster_coord_k_%i_result" % (n_clusters), kmeans.cluster_centers_) - print("Inertia of k-means = ", kmeans.inertia_) + busmap = pd.Series(data=kmeans.predict(network.buses.loc[buses_i, ["x","y"]]), index=buses_i).astype(str) @@ -514,7 +537,7 @@ def rectangular_grid_clustering(network, divisions): ################ # Reduce stubs/dead-ends, i.e. nodes with valency 1, iteratively to remove tree-like structures -def busmap_by_stubs(network): +def busmap_by_stubs(network, matching_attrs=None): """Create a busmap by reducing stubs and stubby trees (i.e. sequentially reducing dead-ends). @@ -522,6 +545,9 @@ def busmap_by_stubs(network): ---------- network : pypsa.Network + matching_attrs : None|[str] + bus attributes clusters have to agree on + Returns ------- busmap : pandas.Series @@ -530,28 +556,26 @@ def busmap_by_stubs(network): """ - busmap = pd.Series(network.buses.index,network.buses.index) + busmap = pd.Series(network.buses.index, network.buses.index) - network = network.copy(with_time=False) + G = network.graph() - count = 0 + def attrs_match(u, v): + return (matching_attrs is None or + (network.buses.loc[u, matching_attrs] == + network.buses.loc[v, matching_attrs]).all()) while True: - old_count = count - logger.info("{} buses".format(len(network.buses))) - graph = network.graph() - for u in graph.node: - neighbours = list(graph.adj[u].keys()) + stubs = [] + for u in G.node: + neighbours = list(G.adj[u].keys()) if len(neighbours) == 1: - neighbour = neighbours[0] - count +=1 - lines = list(graph.adj[u][neighbour].keys()) - for line in lines: - network.remove(*line) - network.remove("Bus",u) - busmap[busmap==u] = neighbour - logger.info("{} deleted".format(count)) - if old_count == count: + v, = neighbours + if attrs_match(u, v): + busmap[busmap == u] = v + stubs.append(u) + G.remove_nodes_from(stubs) + if len(stubs) == 0: break return busmap diff --git a/pypsa/opf.py b/pypsa/opf.py index 05ac47c71..d7fb2a7eb 100644 --- a/pypsa/opf.py +++ b/pypsa/opf.py @@ -1,6 +1,6 @@ -## Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David +## Copyright 2015-2018 Tom Brown (FIAS), Jonas Hoersch (FIAS), David ## Schlachtberger (FIAS) ## This program is free software; you can redistribute it and/or @@ -22,14 +22,14 @@ # make the code as Python 3 compatible as possible from __future__ import division, absolute_import -from six import iteritems, string_types +from six import iteritems, itervalues, string_types __author__ = "Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)" __copyright__ = "Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS), GNU GPL 3" -import pandas as pd import numpy as np +import pandas as pd from scipy.sparse.linalg import spsolve from pyomo.environ import (ConcreteModel, Var, Objective, NonNegativeReals, Constraint, Reals, @@ -60,7 +60,10 @@ class PersistentSolver(): pass patch_optsolver_free_model_before_solving, patch_optsolver_record_memusage_before_solving, empty_network, free_pyomo_initializers) -from .descriptors import get_switchable_as_dense, allocate_series_dataframes +from .descriptors import (get_switchable_as_dense, get_switchable_as_iter, + allocate_series_dataframes, zsum) + +pd.Series.zsum = zsum @@ -269,44 +272,62 @@ def gen_p_nom_bounds(model, gen_name): ## Deal with ramp limits without unit commitment ## - sns = snapshots[1:] - - ru_gens = network.generators.index[~network.generators.ramp_limit_up.isnull()] + ru_gens = network.generators.index[network.generators.ramp_limit_up.notnull()] ru = {} for gen in ru_gens: - for i,sn in enumerate(sns): + for sn, sn_prev in zip(snapshots[1:], snapshots[:-1]): if network.generators.at[gen, "p_nom_extendable"]: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]]), (-network.generators.at[gen, "ramp_limit_up"], network.model.generator_p_nom[gen])]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev]), + (-network.generators.at[gen, "ramp_limit_up"], + network.model.generator_p_nom[gen])]) elif not network.generators.at[gen, "committable"]: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]])], -network.generators.at[gen, "ramp_limit_up"]*network.generators.at[gen, "p_nom"]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev])], + -network.generators.at[gen, "ramp_limit_up"]*network.generators.at[gen, "p_nom"]) else: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]]), ((network.generators.at[gen, "ramp_limit_start_up"] - network.generators.at[gen, "ramp_limit_up"])*network.generators.at[gen, "p_nom"], network.model.generator_status[gen,snapshots[i]]), (-network.generators.at[gen, "ramp_limit_start_up"]*network.generators.at[gen, "p_nom"], network.model.generator_status[gen,sn])]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev]), + ((network.generators.at[gen, "ramp_limit_start_up"] - network.generators.at[gen, "ramp_limit_up"])*network.generators.at[gen, "p_nom"], + network.model.generator_status[gen,sn_prev]), + (-network.generators.at[gen, "ramp_limit_start_up"]*network.generators.at[gen, "p_nom"], + network.model.generator_status[gen,sn])]) ru[gen,sn] = LConstraint(lhs,"<=") - l_constraint(network.model, "ramp_up", ru, list(ru_gens), sns) + l_constraint(network.model, "ramp_up", ru, list(ru_gens), snapshots[1:]) - rd_gens = network.generators.index[~network.generators.ramp_limit_down.isnull()] + rd_gens = network.generators.index[network.generators.ramp_limit_down.notnull()] rd = {} for gen in rd_gens: - for i,sn in enumerate(sns): + for sn, sn_prev in zip(snapshots[1:], snapshots[:-1]): if network.generators.at[gen, "p_nom_extendable"]: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]]), (network.generators.at[gen, "ramp_limit_down"], network.model.generator_p_nom[gen])]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev]), + (network.generators.at[gen, "ramp_limit_down"], + network.model.generator_p_nom[gen])]) elif not network.generators.at[gen, "committable"]: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]])], network.generators.loc[gen, "ramp_limit_down"]*network.generators.at[gen, "p_nom"]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev])], + network.generators.loc[gen, "ramp_limit_down"]*network.generators.at[gen, "p_nom"]) else: - lhs = LExpression([(1, network.model.generator_p[gen,sn]), (-1, network.model.generator_p[gen,snapshots[i]]), ((network.generators.at[gen, "ramp_limit_down"] - network.generators.at[gen, "ramp_limit_shut_down"])*network.generators.at[gen, "p_nom"], network.model.generator_status[gen,sn]), (network.generators.at[gen, "ramp_limit_shut_down"]*network.generators.at[gen, "p_nom"], network.model.generator_status[gen,snapshots[i]])]) + lhs = LExpression([(1, network.model.generator_p[gen,sn]), + (-1, network.model.generator_p[gen,sn_prev]), + ((network.generators.at[gen, "ramp_limit_down"] - network.generators.at[gen, "ramp_limit_shut_down"])*network.generators.at[gen, "p_nom"], + network.model.generator_status[gen,sn]), + (network.generators.at[gen, "ramp_limit_shut_down"]*network.generators.at[gen, "p_nom"], + network.model.generator_status[gen,sn_prev])]) rd[gen,sn] = LConstraint(lhs,">=") - l_constraint(network.model, "ramp_down", rd, list(rd_gens), sns) + l_constraint(network.model, "ramp_down", rd, list(rd_gens), snapshots[1:]) @@ -428,7 +449,7 @@ def su_p_lower(model,su_name,snapshot): soc[su,sn] = [[],"==",0.] - elapsed_hours = network.snapshot_weightings[sn] + elapsed_hours = 1# network.snapshot_weightings[sn] if i == 0 and not sus.at[su,"cyclic_state_of_charge"]: previous_state_of_charge = sus.at[su,"state_of_charge_initial"] @@ -591,8 +612,8 @@ def define_link_flows(network,snapshots): fixed_links_i = network.links.index[~ network.links.p_nom_extendable] - p_max_pu = get_switchable_as_dense(network, 'Link', 'p_max_pu') - p_min_pu = get_switchable_as_dense(network, 'Link', 'p_min_pu') + p_max_pu = get_switchable_as_dense(network, 'Link', 'p_max_pu', snapshots) + p_min_pu = get_switchable_as_dense(network, 'Link', 'p_min_pu', snapshots) fixed_lower = p_min_pu.loc[:,fixed_links_i].multiply(network.links.loc[fixed_links_i, 'p_nom']) fixed_upper = p_max_pu.loc[:,fixed_links_i].multiply(network.links.loc[fixed_links_i, 'p_nom']) @@ -661,8 +682,8 @@ def define_passive_branch_flows_with_angles(network,snapshots): bt = branch[0] bn = branch[1] sub = passive_branches.at[branch,"sub_network"] - attribute = "r_pu" if network.sub_networks.at[sub,"carrier"] == "DC" else "x_pu" - y = 1/(passive_branches.at[branch,attribute]*(passive_branches.at[branch,"tap_ratio"] if bt == "Transformer" else 1.)) + attribute = "r_pu_eff" if network.sub_networks.at[sub,"carrier"] == "DC" else "x_pu_eff" + y = 1/ passive_branches.at[ branch, attribute] for sn in snapshots: lhs = LExpression([(y,network.model.voltage_angles[bus0,sn]), (-y,network.model.voltage_angles[bus1,sn]), @@ -708,6 +729,40 @@ def define_passive_branch_flows_with_PTDF(network,snapshots,ptdf_tolerance=0.): list(passive_branches.index), snapshots) +def define_sub_network_cycle_constraints( subnetwork, snapshots, passive_branch_p, attribute): + """ Constructs cycle_constraints for a particular subnetwork + """ + + sub_network_cycle_constraints = {} + sub_network_cycle_index = [] + + matrix = subnetwork.C.tocsc() + branches = subnetwork.branches() + + for col_j in range( matrix.shape[1] ): + cycle_is = matrix.getcol(col_j).nonzero()[0] + + if len(cycle_is) == 0: continue + + sub_network_cycle_index.append((subnetwork.name, col_j)) + + + branch_idx_attributes = [] + + for cycle_i in cycle_is: + branch_idx = branches.index[cycle_i] + attribute_value = 1e5 * branches.at[ branch_idx, attribute] * subnetwork.C[ cycle_i, col_j] + branch_idx_attributes.append( (branch_idx, attribute_value)) + + for snapshot in snapshots: + expression_list = [ (attribute_value, + passive_branch_p[branch_idx[0], branch_idx[1], snapshot]) for (branch_idx, attribute_value) in branch_idx_attributes] + + lhs = LExpression(expression_list) + sub_network_cycle_constraints[subnetwork.name,col_j,snapshot] = LConstraint(lhs,"==",LExpression()) + + return( sub_network_cycle_index, sub_network_cycle_constraints) + def define_passive_branch_flows_with_cycles(network,snapshots): for sub_network in network.sub_networks.obj: @@ -728,22 +783,16 @@ def define_passive_branch_flows_with_cycles(network,snapshots): cycle_index = [] cycle_constraints = {} - for sn in network.sub_networks.obj: - - branches = sn.branches() - attribute = "r_pu" if network.sub_networks.at[sn.name,"carrier"] == "DC" else "x_pu" + for subnetwork in network.sub_networks.obj: + branches = subnetwork.branches() + attribute = "r_pu_eff" if network.sub_networks.at[subnetwork.name,"carrier"] == "DC" else "x_pu_eff" - for j in range(sn.C.shape[1]): + sub_network_cycle_index, sub_network_cycle_constraints = define_sub_network_cycle_constraints( subnetwork, + snapshots, + network.model.passive_branch_p, attribute) - cycle_is = sn.C[:,j].nonzero()[0] - cycle_index.append((sn.name, j)) - - for snapshot in snapshots: - lhs = LExpression([(branches.at[branches.index[i],attribute]* - (branches.at[branches.index[i],"tap_ratio"] if branches.index[i][0] == "Transformer" else 1.)*sn.C[i,j], - network.model.passive_branch_p[branches.index[i][0],branches.index[i][1],snapshot]) - for i in cycle_is]) - cycle_constraints[sn.name,j,snapshot] = LConstraint(lhs,"==",LExpression()) + cycle_index.extend( sub_network_cycle_index) + cycle_constraints.update( sub_network_cycle_constraints) l_constraint(network.model, "cycle_constraints", cycle_constraints, cycle_index, snapshots) @@ -753,22 +802,22 @@ def define_passive_branch_flows_with_cycles(network,snapshots): flows = {} - for sn in network.sub_networks.obj: - branches = sn.branches() - buses = sn.buses() + for subnetwork in network.sub_networks.obj: + branches = subnetwork.branches() + buses = subnetwork.buses() for i,branch in enumerate(branches.index): bt = branch[0] bn = branch[1] - cycle_is = sn.C[i,:].nonzero()[1] - tree_is = sn.T[i,:].nonzero()[1] + cycle_is = subnetwork.C[i,:].nonzero()[1] + tree_is = subnetwork.T[i,:].nonzero()[1] if len(cycle_is) + len(tree_is) == 0: logger.error("The cycle formulation does not support infinite impedances, yet.") for snapshot in snapshots: - expr = LExpression([(sn.C[i,j], network.model.cycles[sn.name,j,snapshot]) + expr = LExpression([(subnetwork.C[i,j], network.model.cycles[subnetwork.name,j,snapshot]) for j in cycle_is]) - lhs = expr + sum(sn.T[i,j]*network._p_balance[buses.index[j],snapshot] + lhs = expr + sum(subnetwork.T[i,j]*network._p_balance[buses.index[j],snapshot] for j in tree_is) rhs = LExpression([(1,network.model.passive_branch_p[bt,bn,snapshot])]) @@ -779,7 +828,9 @@ def define_passive_branch_flows_with_cycles(network,snapshots): list(passive_branches.index), snapshots) + def define_passive_branch_flows_with_kirchhoff(network,snapshots,skip_vars=False): + """ define passive branch flows with the kirchoff method """ for sub_network in network.sub_networks.obj: find_tree(sub_network) @@ -798,24 +849,17 @@ def define_passive_branch_flows_with_kirchhoff(network,snapshots,skip_vars=False cycle_index = [] cycle_constraints = {} - for sn in network.sub_networks.obj: - - branches = sn.branches() - attribute = "r_pu" if network.sub_networks.at[sn.name,"carrier"] == "DC" else "x_pu" + for subnetwork in network.sub_networks.obj: - for j in range(sn.C.shape[1]): + branches = subnetwork.branches() + attribute = "r_pu_eff" if network.sub_networks.at[subnetwork.name,"carrier"] == "DC" else "x_pu_eff" - cycle_is = sn.C[:,j].nonzero()[0] - if len(cycle_is) == 0: continue + sub_network_cycle_index, sub_network_cycle_constraints = define_sub_network_cycle_constraints( subnetwork, + snapshots, + network.model.passive_branch_p, attribute) - cycle_index.append((sn.name, j)) - - for snapshot in snapshots: - lhs = LExpression([(branches.at[branches.index[i],attribute]* - (branches.at[branches.index[i],"tap_ratio"] if branches.index[i][0] == "Transformer" else 1.)*sn.C[i,j], - network.model.passive_branch_p[branches.index[i][0], branches.index[i][1], snapshot]) - for i in cycle_is]) - cycle_constraints[sn.name,j,snapshot] = LConstraint(lhs,"==",LExpression()) + cycle_index.extend( sub_network_cycle_index) + cycle_constraints.update( sub_network_cycle_constraints) l_constraint(network.model, "cycle_constraints", cycle_constraints, cycle_index, snapshots) @@ -826,13 +870,17 @@ def define_passive_branch_constraints(network,snapshots): extendable_branches = passive_branches[passive_branches.s_nom_extendable] fixed_branches = passive_branches[~ passive_branches.s_nom_extendable] + + s_max_pu = pd.concat({c : get_switchable_as_dense(network, c, 's_max_pu', snapshots) + for c in network.passive_branch_components}, axis=1, sort=False) + flow_upper = {(b[0],b[1],sn) : [[(1,network.model.passive_branch_p[b[0],b[1],sn])], - "<=", fixed_branches.at[b,"s_nom"]] + "<=", s_max_pu.at[sn,b]*fixed_branches.at[b,"s_nom"]] for b in fixed_branches.index for sn in snapshots} flow_upper.update({(b[0],b[1],sn) : [[(1,network.model.passive_branch_p[b[0],b[1],sn]), - (-1,network.model.passive_branch_s_nom[b[0],b[1]])],"<=",0] + (-s_max_pu.at[sn,b],network.model.passive_branch_s_nom[b[0],b[1]])],"<=",0] for b in extendable_branches.index for sn in snapshots}) @@ -840,12 +888,12 @@ def define_passive_branch_constraints(network,snapshots): list(passive_branches.index), snapshots) flow_lower = {(b[0],b[1],sn) : [[(1,network.model.passive_branch_p[b[0],b[1],sn])], - ">=", -fixed_branches.at[b,"s_nom"]] + ">=", -s_max_pu.at[sn,b]*fixed_branches.at[b,"s_nom"]] for b in fixed_branches.index for sn in snapshots} flow_lower.update({(b[0],b[1],sn): [[(1,network.model.passive_branch_p[b[0],b[1],sn]), - (1,network.model.passive_branch_s_nom[b[0],b[1]])],">=",0] + (s_max_pu.at[sn,b],network.model.passive_branch_s_nom[b[0],b[1]])],">=",0] for b in extendable_branches.index for sn in snapshots}) @@ -864,7 +912,7 @@ def define_nodal_balances(network,snapshots): for bus in network.buses.index for sn in snapshots} - efficiency = get_switchable_as_dense(network, 'Link', 'efficiency') + efficiency = get_switchable_as_dense(network, 'Link', 'efficiency', snapshots) for cb in network.links.index: bus0 = network.links.at[cb,"bus0"] @@ -874,6 +922,14 @@ def define_nodal_balances(network,snapshots): network._p_balance[bus0,sn].variables.append((-1,network.model.link_p[cb,sn])) network._p_balance[bus1,sn].variables.append((efficiency.at[sn,cb],network.model.link_p[cb,sn])) + #Add any other buses to which the links are attached + for i in [int(col[3:]) for col in network.links.columns if col[:3] == "bus" and col not in ["bus0","bus1"]]: + efficiency = get_switchable_as_dense(network, 'Link', 'efficiency{}'.format(i), snapshots) + for cb in network.links.index[network.links["bus{}".format(i)] != ""]: + bus = network.links.at[cb, "bus{}".format(i)] + for sn in snapshots: + network._p_balance[bus,sn].variables.append((efficiency.at[sn,cb],network.model.link_p[cb,sn])) + for gen in network.generators.index: bus = network.generators.at[gen,"bus"] @@ -881,7 +937,7 @@ def define_nodal_balances(network,snapshots): for sn in snapshots: network._p_balance[bus,sn].variables.append((sign,network.model.generator_p[gen,sn])) - load_p_set = get_switchable_as_dense(network, 'Load', 'p_set') + load_p_set = get_switchable_as_dense(network, 'Load', 'p_set', snapshots) for load in network.loads.index: bus = network.loads.at[load,"bus"] sign = network.loads.at[load,"sign"] @@ -1011,25 +1067,32 @@ def define_linear_objective(network,snapshots): sdc_gens_i = network.generators.index[~network.generators.p_nom_extendable & network.generators.committable & (network.generators.shut_down_cost > 0)] + marginal_cost_it = zip(get_switchable_as_iter(network, 'Generator', 'marginal_cost', snapshots), + get_switchable_as_iter(network, 'StorageUnit', 'marginal_cost', snapshots), + get_switchable_as_iter(network, 'Store', 'marginal_cost', snapshots), + get_switchable_as_iter(network, 'Link', 'marginal_cost', snapshots)) + objective = LExpression() - for sn in snapshots: + for sn, marginal_cost in zip(snapshots, marginal_cost_it): + gen_mc, su_mc, st_mc, link_mc = marginal_cost + weight = network.snapshot_weightings[sn] for gen in network.generators.index: - coefficient = network.generators.at[gen, "marginal_cost"] * weight + coefficient = gen_mc.at[gen] * weight objective.variables.extend([(coefficient, model.generator_p[gen, sn])]) for su in network.storage_units.index: - coefficient = network.storage_units.at[su, "marginal_cost"] * weight + coefficient = su_mc.at[su] * weight objective.variables.extend([(coefficient, model.storage_p_dispatch[su,sn])]) for store in network.stores.index: - coefficient = network.stores.at[store, "marginal_cost"] * weight + coefficient = st_mc.at[store] * weight objective.variables.extend([(coefficient, model.store_p[store,sn])]) for link in network.links.index: - coefficient = network.links.at[link, "marginal_cost"] * weight + coefficient = link_mc.at[link] * weight objective.variables.extend([(coefficient, model.link_p[link,sn])]) @@ -1037,23 +1100,23 @@ def define_linear_objective(network,snapshots): objective.variables.extend([(extendable_generators.at[gen,"capital_cost"], model.generator_p_nom[gen]) for gen in extendable_generators.index]) - objective.constant -= (extendable_generators.capital_cost * extendable_generators.p_nom).sum() + objective.constant -= (extendable_generators.capital_cost * extendable_generators.p_nom).zsum() objective.variables.extend([(ext_sus.at[su,"capital_cost"], model.storage_p_nom[su]) for su in ext_sus.index]) - objective.constant -= (ext_sus.capital_cost*ext_sus.p_nom).sum() + objective.constant -= (ext_sus.capital_cost*ext_sus.p_nom).zsum() objective.variables.extend([(ext_stores.at[store,"capital_cost"], model.store_e_nom[store]) for store in ext_stores.index]) - objective.constant -= (ext_stores.capital_cost*ext_stores.e_nom).sum() + objective.constant -= (ext_stores.capital_cost*ext_stores.e_nom).zsum() objective.variables.extend([(extendable_passive_branches.at[b,"capital_cost"], model.passive_branch_s_nom[b]) for b in extendable_passive_branches.index]) - objective.constant -= (extendable_passive_branches.capital_cost * extendable_passive_branches.s_nom).sum() + objective.constant -= (extendable_passive_branches.capital_cost * extendable_passive_branches.s_nom).zsum() objective.variables.extend([(extendable_links.at[b,"capital_cost"], model.link_p_nom[b]) for b in extendable_links.index]) - objective.constant -= (extendable_links.capital_cost * extendable_links.p_nom).sum() + objective.constant -= (extendable_links.capital_cost * extendable_links.p_nom).zsum() ## Unit commitment costs @@ -1065,10 +1128,8 @@ def define_linear_objective(network,snapshots): l_objective(model,objective) -def extract_optimisation_results(network, snapshots, formulation="angles"): - - from .components import \ - passive_branch_components, branch_components, controllable_one_port_components +def extract_optimisation_results(network, snapshots, formulation="angles", free_pyomo=True, + extra_postprocessing=None): if isinstance(snapshots, pd.DatetimeIndex) and _pd_version < '0.18.0': # Work around pandas bug #12050 (https://github.com/pydata/pandas/issues/12050) @@ -1081,40 +1142,60 @@ def extract_optimisation_results(network, snapshots, formulation="angles"): 'Bus': ['p', 'v_ang', 'v_mag_pu', 'marginal_price'], 'Line': ['p0', 'p1', 'mu_lower', 'mu_upper'], 'Transformer': ['p0', 'p1', 'mu_lower', 'mu_upper'], - 'Link': ['p0', 'p1', 'mu_lower', 'mu_upper']}) + 'Link': ["p"+col[3:] for col in network.links.columns if col[:3] == "bus"] + +['mu_lower', 'mu_upper']}) #get value of objective function - network.objective = network.results["Problem"][0]["Lower bound"] + network.objective = network.results["Problem"][0]["Upper bound"] model = network.model duals = pd.Series(list(model.dual.values()), index=pd.Index(list(model.dual.keys()))) - def as_series(indexedvar): - return pd.Series(indexedvar.get_values()) + if free_pyomo: + model.dual.clear() + + def clear_indexedvar(indexedvar): + for v in itervalues(indexedvar._data): + v.clear() + + def get_values(indexedvar, free=free_pyomo): + s = pd.Series(indexedvar.get_values()) + if free: + clear_indexedvar(indexedvar) + return s def set_from_series(df, series): df.loc[snapshots] = series.unstack(0).reindex(columns=df.columns) + def get_shadows(constraint, multiind=True): + if len(constraint) == 0: return pd.Series() + + index = list(constraint.keys()) + if multiind: + index = pd.MultiIndex.from_tuples(index) + cdata = pd.Series(list(constraint.values()), index=index) + return cdata.map(duals) + if len(network.generators): - set_from_series(network.generators_t.p, as_series(model.generator_p)) + set_from_series(network.generators_t.p, get_values(model.generator_p)) if len(network.storage_units): set_from_series(network.storage_units_t.p, - as_series(model.storage_p_dispatch) - - as_series(model.storage_p_store)) + get_values(model.storage_p_dispatch) + - get_values(model.storage_p_store)) set_from_series(network.storage_units_t.state_of_charge, - as_series(model.state_of_charge)) + get_values(model.state_of_charge)) if (network.storage_units_t.inflow.max() > 0).any(): set_from_series(network.storage_units_t.spill, - as_series(model.storage_p_spill)) + get_values(model.storage_p_spill)) network.storage_units_t.spill.fillna(0, inplace=True) #p_spill doesn't exist if inflow=0 if len(network.stores): - set_from_series(network.stores_t.p, as_series(model.store_p)) - set_from_series(network.stores_t.e, as_series(model.store_e)) + set_from_series(network.stores_t.p, get_values(model.store_p)) + set_from_series(network.stores_t.e, get_values(model.store_e)) if len(network.loads): load_p_set = get_switchable_as_dense(network, 'Load', 'p_set', snapshots) @@ -1125,25 +1206,27 @@ def set_from_series(df, series): pd.concat({c.name: c.pnl.p.loc[snapshots].multiply(c.df.sign, axis=1) .groupby(c.df.bus, axis=1).sum() - for c in network.iterate_components(controllable_one_port_components)}) \ + for c in network.iterate_components(network.controllable_one_port_components)}, + sort=False) \ .sum(level=1) \ - .reindex_axis(network.buses_t.p.columns, axis=1, fill_value=0.) + .reindex(columns=network.buses_t.p.columns, fill_value=0.) # passive branches - passive_branches = as_series(model.passive_branch_p) - for c in network.iterate_components(passive_branch_components): + passive_branches = get_values(model.passive_branch_p) + flow_lower = get_shadows(model.flow_lower) + flow_upper = get_shadows(model.flow_upper) + for c in network.iterate_components(network.passive_branch_components): set_from_series(c.pnl.p0, passive_branches.loc[c.name]) c.pnl.p1.loc[snapshots] = - c.pnl.p0.loc[snapshots] - set_from_series(c.pnl.mu_lower, pd.Series(list(model.flow_lower.values()), - index=pd.MultiIndex.from_tuples(list(model.flow_lower.keys()))).map(duals)[c.name]) - set_from_series(c.pnl.mu_upper, -pd.Series(list(model.flow_upper.values()), - index=pd.MultiIndex.from_tuples(list(model.flow_upper.keys()))).map(duals)[c.name]) + set_from_series(c.pnl.mu_lower, flow_lower[c.name]) + set_from_series(c.pnl.mu_upper, -flow_upper[c.name]) + del flow_lower, flow_upper # active branches if len(network.links): - set_from_series(network.links_t.p0, as_series(model.link_p)) + set_from_series(network.links_t.p0, get_values(model.link_p)) efficiency = get_switchable_as_dense(network, 'Link', 'efficiency', snapshots) @@ -1157,12 +1240,20 @@ def set_from_series(df, series): .groupby(network.links.bus1, axis=1).sum() .reindex(columns=network.buses_t.p.columns, fill_value=0.)) - set_from_series(network.links_t.mu_lower, pd.Series(list(model.link_p_lower.values()), - index=pd.MultiIndex.from_tuples(list(model.link_p_lower.keys()))).map(duals)) - set_from_series(network.links_t.mu_upper, -pd.Series(list(model.link_p_upper.values()), - index=pd.MultiIndex.from_tuples(list(model.link_p_upper.keys()))).map(duals)) + #Add any other buses to which the links are attached + for i in [int(col[3:]) for col in network.links.columns if col[:3] == "bus" and col not in ["bus0","bus1"]]: + efficiency = get_switchable_as_dense(network, 'Link', 'efficiency{}'.format(i), snapshots) + p_name = "p{}".format(i) + links = network.links.index[network.links["bus{}".format(i)] != ""] + network.links_t[p_name].loc[snapshots, links] = - network.links_t.p0.loc[snapshots, links]*efficiency.loc[snapshots, links] + network.buses_t.p.loc[snapshots] -= (network.links_t[p_name].loc[snapshots, links] + .groupby(network.links["bus{}".format(i)], axis=1).sum() + .reindex(columns=network.buses_t.p.columns, fill_value=0.)) + set_from_series(network.links_t.mu_lower, get_shadows(model.link_p_lower)) + set_from_series(network.links_t.mu_upper, - get_shadows(model.link_p_upper)) + if len(network.buses): if formulation in {'angles', 'kirchhoff'}: set_from_series(network.buses_t.marginal_price, @@ -1175,7 +1266,7 @@ def set_from_series(df, series): if formulation == "angles": set_from_series(network.buses_t.v_ang, - as_series(model.voltage_angles)) + get_values(model.voltage_angles)) elif formulation in ["ptdf","cycles","kirchhoff"]: for sn in network.sub_networks.obj: network.buses_t.v_ang.loc[snapshots,sn.slack_bus] = 0. @@ -1192,21 +1283,21 @@ def set_from_series(df, series): network.generators.p_nom_opt = network.generators.p_nom network.generators.loc[network.generators.p_nom_extendable, 'p_nom_opt'] = \ - as_series(network.model.generator_p_nom) + get_values(network.model.generator_p_nom) network.storage_units.p_nom_opt = network.storage_units.p_nom network.storage_units.loc[network.storage_units.p_nom_extendable, 'p_nom_opt'] = \ - as_series(network.model.storage_p_nom) + get_values(network.model.storage_p_nom) network.stores.e_nom_opt = network.stores.e_nom network.stores.loc[network.stores.e_nom_extendable, 'e_nom_opt'] = \ - as_series(network.model.store_e_nom) + get_values(network.model.store_e_nom) - s_nom_extendable_passive_branches = as_series(model.passive_branch_s_nom) - for c in network.iterate_components(passive_branch_components): + s_nom_extendable_passive_branches = get_values(model.passive_branch_s_nom) + for c in network.iterate_components(network.passive_branch_components): c.df['s_nom_opt'] = c.df.s_nom if c.df.s_nom_extendable.any(): c.df.loc[c.df.s_nom_extendable, 's_nom_opt'] = s_nom_extendable_passive_branches.loc[c.name] @@ -1214,11 +1305,10 @@ def set_from_series(df, series): network.links.p_nom_opt = network.links.p_nom network.links.loc[network.links.p_nom_extendable, "p_nom_opt"] = \ - as_series(network.model.link_p_nom) + get_values(network.model.link_p_nom) try: - network.global_constraints.loc[:,"mu"] = -pd.Series(list(model.global_constraints.values()), - index=list(model.global_constraints.keys())).map(duals) + network.global_constraints.loc[:,"mu"] = - get_shadows(model.global_constraints, multiind=False) except (AttributeError, KeyError) as e: logger.warning("Could not read out global constraint shadow prices") @@ -1230,7 +1320,11 @@ def set_from_series(df, series): if len(fixed_committable_gens_i) > 0: network.generators_t.status.loc[snapshots,fixed_committable_gens_i] = \ - as_series(model.generator_status).unstack(0) + get_values(model.generator_status).unstack(0) + + if extra_postprocessing is not None: + extra_postprocessing(network, snapshots, duals) + def network_lopf_build_model(network, snapshots=None, skip_pre=False, formulation="angles", ptdf_tolerance=0.): @@ -1330,7 +1424,9 @@ def network_lopf_prepare_solver(network, solver_name="glpk", solver_io=None): return network.opt -def network_lopf_solve(network, snapshots=None, formulation="angles", solver_options={}, keep_files=False, free_memory={}): + +def network_lopf_solve(network, snapshots=None, formulation="angles", solver_options={},solver_logfile=None, keep_files=False, + free_memory={'pyomo'},extra_postprocessing=None): """ Solve linear optimal power flow for a group of snapshots and extract results. @@ -1346,12 +1442,20 @@ def network_lopf_solve(network, snapshots=None, formulation="angles", solver_opt solver_options : dictionary A dictionary with additional options that get passed to the solver. (e.g. {'threads':2} tells gurobi to use only 2 cpus) + solver_logfile : None|string + If not None, sets the logfile option of the solver. keep_files : bool, default False Keep the files that pyomo constructs from OPF problem construction, e.g. .lp file - useful for debugging - free_memory : set, default {} - Any subset of {'pypsa'}. Stash time series data and/or pyomo model away - while the solver runs. + free_memory : set, default {'pyomo'} + Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series + data away while the solver runs (as a pickle to disk) and/or free + `pyomo` data after the solution has been extracted. + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user to + extract further information about the solution, such as additional shadow prices. Returns ------- @@ -1372,11 +1476,11 @@ def network_lopf_solve(network, snapshots=None, formulation="angles", solver_opt if 'pypsa' in free_memory: with empty_network(network): - network.results = network.opt.solve(*args, suffixes=["dual"], keepfiles=keep_files, options=solver_options) + network.results = network.opt.solve(*args, suffixes=["dual"], keepfiles=keep_files, logfile=solver_logfile, options=solver_options) else: - network.results = network.opt.solve(*args, suffixes=["dual"], keepfiles=keep_files, options=solver_options) + network.results = network.opt.solve(*args, suffixes=["dual"], keepfiles=keep_files, logfile=solver_logfile, options=solver_options) - if logger.level > 0: + if logger.isEnabledFor(logging.INFO): network.results.write() status = network.results["Solver"][0]["Status"].key @@ -1384,20 +1488,24 @@ def network_lopf_solve(network, snapshots=None, formulation="angles", solver_opt if status == "ok" and termination_condition == "optimal": logger.info("Optimization successful") - extract_optimisation_results(network,snapshots,formulation) + extract_optimisation_results(network, snapshots, formulation, + free_pyomo='pyomo' in free_memory, + extra_postprocessing=extra_postprocessing) elif status == "warning" and termination_condition == "other": - logger.warn("WARNING! Optimization might be sub-optimal. Writing output anyway") - extract_optimisation_results(network,snapshots,formulation) + logger.warning("WARNING! Optimization might be sub-optimal. Writing output anyway") + extract_optimisation_results(network, snapshots, formulation, + free_pyomo='pyomo' in free_memory, + extra_postprocessing=extra_postprocessing) else: logger.error("Optimisation failed with status %s and terminal condition %s" - % (status,termination_condition)) + % (status, termination_condition)) return status, termination_condition def network_lopf(network, snapshots=None, solver_name="glpk", solver_io=None, - skip_pre=False, extra_functionality=None, solver_options={}, + skip_pre=False, extra_functionality=None, solver_logfile=None, solver_options={}, keep_files=False, formulation="angles", ptdf_tolerance=0., - free_memory={}): + free_memory={},extra_postprocessing=None): """ Linear optimal power flow for a group of snapshots. @@ -1421,6 +1529,8 @@ def network_lopf(network, snapshots=None, solver_name="glpk", solver_io=None, the model building is complete, but before it is sent to the solver. It allows the user to add/change constraints and add/change the objective function. + solver_logfile : None|string + If not None, sets the logfile option of the solver. solver_options : dictionary A dictionary with additional options that get passed to the solver. (e.g. {'threads':2} tells gurobi to use only 2 cpus) @@ -1432,8 +1542,15 @@ def network_lopf(network, snapshots=None, solver_name="glpk", solver_io=None, one of ["angles","cycles","kirchhoff","ptdf"] ptdf_tolerance : float Value below which PTDF entries are ignored - free_memory : set, default {} - Any subset of {'pypsa'}. Stash time series data away. + free_memory : set, default {'pyomo'} + Any subset of {'pypsa', 'pyomo'}. Allows to stash `pypsa` time-series + data away while the solver runs (as a pickle to disk) and/or free + `pyomo` data after the solution has been extracted. + extra_postprocessing : callable function + This function must take three arguments + `extra_postprocessing(network,snapshots,duals)` and is called after + the model has solved and the results are extracted. It allows the user to + extract further information about the solution, such as additional shadow prices. Returns ------- @@ -1442,12 +1559,16 @@ def network_lopf(network, snapshots=None, solver_name="glpk", solver_io=None, snapshots = _as_snapshots(network, snapshots) - network_lopf_build_model(network, snapshots, skip_pre=skip_pre, formulation=formulation, ptdf_tolerance=ptdf_tolerance) + network_lopf_build_model(network, snapshots, skip_pre=skip_pre, + formulation=formulation, ptdf_tolerance=ptdf_tolerance) if extra_functionality is not None: extra_functionality(network,snapshots) - network_lopf_prepare_solver(network, solver_name=solver_name, solver_io=solver_io) - - return network_lopf_solve(network, snapshots, formulation=formulation, solver_options=solver_options, keep_files=keep_files, free_memory=free_memory) + network_lopf_prepare_solver(network, solver_name=solver_name, + solver_io=solver_io) + return network_lopf_solve(network, snapshots, formulation=formulation, + solver_logfile=solver_logfile, solver_options=solver_options, + keep_files=keep_files, free_memory=free_memory, + extra_postprocessing=extra_postprocessing) diff --git a/pypsa/opt.py b/pypsa/opt.py index 0fe9998b3..d866aef86 100644 --- a/pypsa/opt.py +++ b/pypsa/opt.py @@ -145,6 +145,25 @@ def __init__(self,lhs=None,sense="==",rhs=None): def __repr__(self): return "{} {} {}".format(self.lhs, self.sense, self.rhs) +try: + from pyomo.core.base import expr_coopr3 + + def _build_sum_expression(variables, constant=0.): + expr = expr_coopr3._SumExpression() + expr._args = [item[1] for item in variables] + expr._coef = [item[0] for item in variables] + expr._const = constant + return expr +except ImportError: + from pyomo.core.expr import expr_pyomo5 + + def _build_sum_expression(variables, constant=0.): + expr = expr_pyomo5.LinearExpression() + expr.linear_vars = [item[1] for item in variables] + expr.linear_coefs = [item[0] for item in variables] + expr.constant = constant + return expr + def l_constraint(model,name,constraints,*args): """A replacement for pyomo's Constraint that quickly builds linear @@ -201,10 +220,8 @@ def l_constraint(model,name,constraints,*args): constant = c[2] v._data[i] = pyomo.core.base.constraint._GeneralConstraintData(None,v) - v._data[i]._body = pyomo.core.base.expr_coopr3._SumExpression() - v._data[i]._body._args = [item[1] for item in variables] - v._data[i]._body._coef = [item[0] for item in variables] - v._data[i]._body._const = 0. + v._data[i]._body = _build_sum_expression(variables) + if sense == "==": v._data[i]._equality = True v._data[i]._lower = pyomo.core.base.numvalue.NumericConstant(constant) @@ -254,11 +271,7 @@ def l_objective(model,objective=None): #initialise with a dummy model.objective = Objective(expr = 0.) - - model.objective._expr = pyomo.core.base.expr_coopr3._SumExpression() - model.objective._expr._args = [item[1] for item in objective.variables] - model.objective._expr._coef = [item[0] for item in objective.variables] - model.objective._expr._const = objective.constant + model.objective._expr = _build_sum_expression(objective.variables, constant=objective.constant) def free_pyomo_initializers(obj): obj.construct() @@ -334,10 +347,9 @@ def empty_model(model): @contextmanager def empty_network(network): logger.debug("Storing pypsa timeseries to disk") - from .components import all_components panels = {} - for c in all_components: + for c in network.all_components: attr = network.components[c]["list_name"] + "_t" panels[attr] = getattr(network, attr) setattr(network, attr, None) diff --git a/pypsa/pf.py b/pypsa/pf.py index 94b2410a0..565ffddcc 100644 --- a/pypsa/pf.py +++ b/pypsa/pf.py @@ -1,4 +1,4 @@ -## Copyright 2015-2017 Tom Brown (FIAS), Jonas Hoersch (FIAS) +## Copyright 2015-2018 Tom Brown (FIAS), Jonas Hoersch (FIAS) ## This program is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as @@ -43,7 +43,9 @@ from itertools import chain import time -from .descriptors import get_switchable_as_dense, allocate_series_dataframes, Dict +from .descriptors import get_switchable_as_dense, allocate_series_dataframes, Dict, zsum, degree + +pd.Series.zsum = zsum def _as_snapshots(network, snapshots): if snapshots is None: @@ -65,7 +67,8 @@ def _allocate_pf_outputs(network, linear=False): 'Bus': ['p', 'v_ang', 'v_mag_pu'], 'Line': ['p0', 'p1'], 'Transformer': ['p0', 'p1'], - 'Link': ['p0', 'p1']} + 'Link': ["p"+col[3:] for col in network.links.columns if col[:3] == "bus"]} + if not linear: for component, attrs in to_allocate.items(): @@ -98,7 +101,12 @@ def _network_prepare_and_run_pf(network, snapshots, skip_pre, linear=False, **kw if not network.links.empty: p_set = get_switchable_as_dense(network, 'Link', 'p_set', snapshots) network.links_t.p0.loc[snapshots] = p_set.loc[snapshots] - network.links_t.p1.loc[snapshots] = -p_set.loc[snapshots].multiply(network.links.efficiency) + for i in [int(col[3:]) for col in network.links.columns if col[:3] == "bus" and col != "bus0"]: + eff_name = "efficiency" if i == 1 else "efficiency{}".format(i) + p_name = "p{}".format(i) + efficiency = get_switchable_as_dense(network, 'Link', eff_name, snapshots) + links = network.links.index[network.links["bus{}".format(i)] != ""] + network.links_t['p{}'.format(i)].loc[snapshots, links] = -network.links_t.p0.loc[snapshots, links]*efficiency.loc[snapshots, links] itdf = pd.DataFrame(index=snapshots, columns=network.sub_networks.index, dtype=int) difdf = pd.DataFrame(index=snapshots, columns=network.sub_networks.index) @@ -170,7 +178,7 @@ def newton_raphson_sparse(f, guess, dfdx, x_tol=1e-10, lim_iter=100): logger.debug("Error at iteration %d: %f", n_iter, diff) if diff > x_tol: - logger.warn("Warning, we didn't reach the required tolerance within %d iterations, error is at %f. See the section \"Troubleshooting\" in the documentation for tips to fix this. ", n_iter, diff) + logger.warning("Warning, we didn't reach the required tolerance within %d iterations, error is at %f. See the section \"Troubleshooting\" in the documentation for tips to fix this. ", n_iter, diff) elif not np.isnan(diff): converged = True @@ -206,8 +214,6 @@ def sub_network_pf(sub_network, snapshots=None, skip_pre=False, x_tol=1e-6, use_ # _sub_network_prepare_pf(sub_network, snapshots, skip_pre, calculate_Y) network = sub_network.network - from .components import passive_branch_components, controllable_branch_components, controllable_one_port_components - if not skip_pre: calculate_dependent_values(network) find_bus_controls(sub_network) @@ -223,7 +229,7 @@ def sub_network_pf(sub_network, snapshots=None, skip_pre=False, x_tol=1e-6, use_ for n in ("q", "p"): # allow all one ports to dispatch as set - for c in sub_network.iterate_components(controllable_one_port_components): + for c in sub_network.iterate_components(network.controllable_one_port_components): c_n_set = get_switchable_as_dense(network, c.name, n + '_set', snapshots, c.ind) c.pnl[n].loc[snapshots, c.ind] = c_n_set @@ -232,14 +238,14 @@ def sub_network_pf(sub_network, snapshots=None, skip_pre=False, x_tol=1e-6, use_ sum([((c.pnl[n].loc[snapshots, c.ind] * c.df.loc[c.ind, 'sign']) .groupby(c.df.loc[c.ind, 'bus'], axis=1).sum() .reindex(columns=buses_o, fill_value=0.)) - for c in sub_network.iterate_components(controllable_one_port_components)]) + for c in sub_network.iterate_components(network.controllable_one_port_components)]) if n == "p": network.buses_t[n].loc[snapshots, buses_o] += sum( [(- c.pnl[n+str(i)].loc[snapshots].groupby(c.df["bus"+str(i)], axis=1).sum() .reindex(columns=buses_o, fill_value=0)) - for c in network.iterate_components(controllable_branch_components) - for i in [0,1]]) + for c in network.iterate_components(network.controllable_branch_components) + for i in [int(col[3:]) for col in c.df.columns if col[:3] == "bus"]]) def f(guess): network.buses_t.v_ang.loc[now,sub_network.pvpqs] = guess[:len(sub_network.pvpqs)] @@ -336,7 +342,7 @@ def dfdx(guess): #add voltages to branches buses_indexer = buses_o.get_indexer branch_bus0 = []; branch_bus1 = [] - for c in sub_network.iterate_components(passive_branch_components): + for c in sub_network.iterate_components(network.passive_branch_components): branch_bus0 += list(c.df.loc[c.ind, 'bus0']) branch_bus1 += list(c.df.loc[c.ind, 'bus1']) v0 = V[:,buses_indexer(branch_bus0)] @@ -350,7 +356,7 @@ def dfdx(guess): s0 = pd.DataFrame(v0*np.conj(i0), columns=branches_i, index=snapshots) s1 = pd.DataFrame(v1*np.conj(i1), columns=branches_i, index=snapshots) - for c in sub_network.iterate_components(passive_branch_components): + for c in sub_network.iterate_components(network.passive_branch_components): s0t = s0.loc[:,c.name] s1t = s1.loc[:,c.name] c.pnl.p0.loc[snapshots,s0t.columns] = s0t.values.real @@ -412,28 +418,25 @@ def apply_line_types(network): """ lines_with_types_b = network.lines.type != "" - - if lines_with_types_b.sum() == 0: + if lines_with_types_b.zsum() == 0: return - for attr in ["r","x"]: - attr_per_length = network.line_types[attr + "_per_length"] - network.lines.loc[lines_with_types_b,attr] = ( - network.lines.loc[lines_with_types_b, "type"].map(attr_per_length) - * network.lines.loc[lines_with_types_b, "length"] - / network.lines.loc[lines_with_types_b, "num_parallel"] - ) + missing_types = (pd.Index(network.lines.loc[lines_with_types_b, 'type'].unique()) + .difference(network.line_types.index)) + assert missing_types.empty, ("The type(s) {} do(es) not exist in network.line_types" + .format(", ".join(missing_types))) - factor = 2*np.pi*1e-9*network.lines.loc[lines_with_types_b, "type"].map(network.line_types.f_nom) - c_per_length = network.line_types["c_per_length"] - network.lines.loc[lines_with_types_b, "b"] = ( - factor - * network.lines.loc[lines_with_types_b, "type"].map(c_per_length) - * network.lines.loc[lines_with_types_b, "length"] - * network.lines.loc[lines_with_types_b, "num_parallel"] - ) + # Get a copy of the lines data + l = (network.lines.loc[lines_with_types_b, ["type", "length", "num_parallel"]] + .join(network.line_types, on='type')) + for attr in ["r","x"]: + l[attr] = l[attr + "_per_length"] * l["length"] / l["num_parallel"] + l["b"] = 2*np.pi*1e-9*l["f_nom"] * l["c_per_length"] * l["length"] * l["num_parallel"] + # now set calculated values on live lines + for attr in ["r", "x", "b"]: + network.lines.loc[lines_with_types_b, attr] = l[attr] @@ -443,50 +446,42 @@ def apply_transformer_types(network): """ - trafos = network.transformers - trafos_with_types_b = trafos.type != "" - - if trafos_with_types_b.sum() == 0: + trafos_with_types_b = network.transformers.type != "" + if trafos_with_types_b.zsum() == 0: return - trafos.loc[trafos_with_types_b, "r"] = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["vscr"])/100. + missing_types = (pd.Index(network.transformers.loc[trafos_with_types_b, 'type'].unique()) + .difference(network.transformer_types.index)) + assert missing_types.empty, ("The type(s) {} do(es) not exist in network.transformer_types" + .format(", ".join(missing_types))) - z = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["vsc"])/100. + # Get a copy of the transformers data + # (joining pulls in "phase_shift", "s_nom", "tap_side" from TransformerType) + t = (network.transformers.loc[trafos_with_types_b, ["type", "tap_position", "num_parallel"]] + .join(network.transformer_types, on='type')) - trafos.loc[trafos_with_types_b, "x"] = np.sqrt(z**2 - trafos.loc[trafos_with_types_b, "r"]**2) - - for attr in ["phase_shift","s_nom"]: - trafos.loc[trafos_with_types_b, attr] = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types[attr]) + t["r"] = t["vscr"] /100. + t["x"] = np.sqrt((t["vsc"]/100.)**2 - t["r"]**2) #NB: b and g are per unit of s_nom - trafos.loc[trafos_with_types_b, "g"] = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["pfe"])/(1000. * trafos.loc[trafos_with_types_b, "s_nom"]) - - i0 = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["i0"])/100. - - b_minus_squared = i0**2 - trafos.loc[trafos_with_types_b, "g"]**2 + t["g"] = t["pfe"]/(1000. * t["s_nom"]) #for some bizarre reason, some of the standard types in pandapower have i0^2 < g^2 - - b_minus_squared[b_minus_squared < 0.] = 0. - - trafos.loc[trafos_with_types_b, "b"] = - np.sqrt(b_minus_squared) - + t["b"] = - np.sqrt(((t["i0"]/100.)**2 - t["g"]**2).clip(lower=0)) for attr in ["r","x"]: - trafos.loc[trafos_with_types_b, attr] = trafos.loc[trafos_with_types_b, attr]/trafos.loc[trafos_with_types_b, "num_parallel"] + t[attr] /= t["num_parallel"] for attr in ["b","g"]: - trafos.loc[trafos_with_types_b, attr] = trafos.loc[trafos_with_types_b, attr]*trafos.loc[trafos_with_types_b, "num_parallel"] - + t[attr] *= t["num_parallel"] #deal with tap positions - trafos.loc[trafos_with_types_b, "tap_ratio"] = 1 + ( - trafos.loc[trafos_with_types_b, "tap_position"] - - trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["tap_neutral"])) * ( - trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["tap_step"])/100.) + t["tap_ratio"] = 1. + (t["tap_position"] - t["tap_neutral"]) * (t["tap_step"]/100.) - trafos.loc[trafos_with_types_b, "tap_side"] = trafos.loc[trafos_with_types_b, "type"].map(network.transformer_types["tap_side"]) + # now set calculated values on live transformers + for attr in ["r", "x", "g", "b", "phase_shift", "s_nom", "tap_side", "tap_ratio"]: + network.transformers.loc[trafos_with_types_b, attr] = t[attr] #TODO: status, rate_A @@ -505,7 +500,7 @@ def apply_transformer_t_model(network): ts_b = (network.transformers.model == "t") & (y_shunt != 0.) - if ts_b.sum() == 0: + if ts_b.zsum() == 0: return za,zb,zc = wye_to_delta(z_series.loc[ts_b]/2,z_series.loc[ts_b]/2,1/y_shunt.loc[ts_b]) @@ -528,12 +523,17 @@ def calculate_dependent_values(network): network.lines["r_pu"] = network.lines.r/(network.lines.v_nom**2) network.lines["b_pu"] = network.lines.b*network.lines.v_nom**2 network.lines["g_pu"] = network.lines.g*network.lines.v_nom**2 + network.lines["x_pu_eff"] = network.lines["x_pu"] + network.lines["r_pu_eff"] = network.lines["r_pu"] + #convert transformer impedances from base power s_nom to base = 1 MVA network.transformers["x_pu"] = network.transformers.x/network.transformers.s_nom network.transformers["r_pu"] = network.transformers.r/network.transformers.s_nom network.transformers["b_pu"] = network.transformers.b*network.transformers.s_nom network.transformers["g_pu"] = network.transformers.g*network.transformers.s_nom + network.transformers["x_pu_eff"] = network.transformers["x_pu"]* network.transformers["tap_ratio"] + network.transformers["r_pu_eff"] = network.transformers["r_pu"]* network.transformers["tap_ratio"] apply_transformer_t_model(network) @@ -548,7 +548,7 @@ def find_slack_bus(sub_network): gens = sub_network.generators() if len(gens) == 0: - logger.warn("No generators in sub-network {}, better hope power is already balanced".format(sub_network.name)) + logger.warning("No generators in sub-network {}, better hope power is already balanced".format(sub_network.name)) sub_network.slack_generator = None sub_network.slack_bus = sub_network.buses_i()[0] @@ -592,9 +592,11 @@ def find_bus_controls(sub_network): network.buses.loc[buses_i, "control"] = "PQ" #find all buses with one or more gens with PV - pvs = gens[gens.control == 'PV'].index.to_series().groupby(gens.bus).first() - network.buses.loc[pvs.index, "control"] = "PV" - network.buses.loc[pvs.index, "generator"] = pvs + pvs = gens[gens.control == 'PV'].index.to_series() + if len(pvs) > 0: + pvs = pvs.groupby(gens.bus).first() + network.buses.loc[pvs.index, "control"] = "PV" + network.buses.loc[pvs.index, "generator"] = pvs network.buses.loc[sub_network.slack_bus, "control"] = "Slack" network.buses.loc[sub_network.slack_bus, "generator"] = sub_network.slack_generator @@ -612,8 +614,6 @@ def find_bus_controls(sub_network): def calculate_B_H(sub_network,skip_pre=False): """Calculate B and H matrices for AC or DC sub-networks.""" - from .components import passive_branch_components - network = sub_network.network if not skip_pre: @@ -621,20 +621,19 @@ def calculate_B_H(sub_network,skip_pre=False): find_bus_controls(sub_network) if network.sub_networks.at[sub_network.name,"carrier"] == "DC": - attribute="r_pu" + attribute="r_pu_eff" else: - attribute="x_pu" + attribute="x_pu_eff" #following leans heavily on pypower.makeBdc #susceptances - b = 1./np.concatenate([(c.df.loc[c.ind, attribute]*c.df.loc[c.ind, "tap_ratio"]).values if c.name == "Transformer" - else c.df.loc[c.ind, attribute].values - for c in sub_network.iterate_components(passive_branch_components)]) + b = 1./np.concatenate([(c.df.loc[c.ind, attribute]).values \ + for c in sub_network.iterate_components(network.passive_branch_components)]) if np.isnan(b).any(): - logger.warn("Warning! Some series impedances are zero - this will cause a singularity in LPF!") + logger.warning("Warning! Some series impedances are zero - this will cause a singularity in LPF!") b_diag = csr_matrix((b, (r_[:len(b)], r_[:len(b)]))) #incidence matrix @@ -648,7 +647,7 @@ def calculate_B_H(sub_network,skip_pre=False): sub_network.p_branch_shift = -b*np.concatenate([(c.df.loc[c.ind, "phase_shift"]).values*np.pi/180. if c.name == "Transformer" else np.zeros((len(c.ind),)) - for c in sub_network.iterate_components(passive_branch_components)]) + for c in sub_network.iterate_components(network.passive_branch_components)]) sub_network.p_bus_shift = sub_network.K * sub_network.p_branch_shift @@ -700,7 +699,7 @@ def calculate_Y(sub_network,skip_pre=False): calculate_dependent_values(sub_network.network) if sub_network.network.sub_networks.at[sub_network.name,"carrier"] != "AC": - logger.warn("Non-AC networks not supported for Y!") + logger.warning("Non-AC networks not supported for Y!") return branches = sub_network.branches() @@ -790,7 +789,7 @@ def aggregate_multi_graph(sub_network): attr_mean = ["capital_cost","length","terrain_factor"] for attr in attr_inv: - aggregated[attr] = 1/(1/lines[attr]).sum() + aggregated[attr] = 1./(1./lines[attr]).sum() for attr in attr_sum: aggregated[attr] = lines[attr].sum() @@ -828,11 +827,11 @@ def find_tree(sub_network, weight='x_pu'): branches_i = branches_bus0.index buses_i = sub_network.buses_i() - graph = sub_network.graph(weight=weight) + graph = sub_network.graph(weight=weight, inf_weight=1.) sub_network.tree = nx.minimum_spanning_tree(graph) #find bus with highest degree to use as slack - tree_slack_bus, slack_degree = max(sub_network.tree.degree_iter(), key=itemgetter(1)) + tree_slack_bus, slack_degree = max(degree(sub_network.tree), key=itemgetter(1)) logger.info("Tree slack bus is %s with degree %d.", tree_slack_bus, slack_degree) #determine which buses are supplied in tree through branch from slack @@ -863,7 +862,7 @@ def find_cycles(sub_network, weight='x_pu'): branches_i = branches_bus0.index #reduce to a non-multi-graph for cycles with > 2 edges - mgraph = sub_network.graph(weight=weight) + mgraph = sub_network.graph(weight=weight, inf_weight=False) graph = nx.OrderedGraph(mgraph) cycles = nx.cycle_basis(graph) @@ -919,10 +918,6 @@ def sub_network_lpf(sub_network, snapshots=None, skip_pre=False): logger.info("Performing linear load-flow on %s sub-network %s for snapshot(s) %s", sub_network.network.sub_networks.at[sub_network.name,"carrier"], sub_network, snapshots) - from .components import \ - one_port_components, controllable_one_port_components, \ - passive_branch_components, controllable_branch_components - network = sub_network.network @@ -942,7 +937,7 @@ def sub_network_lpf(sub_network, snapshots=None, skip_pre=False): network.shunt_impedances.g_pu.loc[shunt_impedances_i].values # allow all one ports to dispatch as set - for c in sub_network.iterate_components(controllable_one_port_components): + for c in sub_network.iterate_components(network.controllable_one_port_components): c_p_set = get_switchable_as_dense(network, c.name, 'p_set', snapshots, c.ind) c.pnl.p.loc[snapshots, c.ind] = c_p_set @@ -951,12 +946,12 @@ def sub_network_lpf(sub_network, snapshots=None, skip_pre=False): sum([((c.pnl.p.loc[snapshots, c.ind] * c.df.loc[c.ind, 'sign']) .groupby(c.df.loc[c.ind, 'bus'], axis=1).sum() .reindex(columns=buses_o, fill_value=0.)) - for c in sub_network.iterate_components(one_port_components)] + for c in sub_network.iterate_components(network.one_port_components)] + [(- c.pnl["p"+str(i)].loc[snapshots].groupby(c.df["bus"+str(i)], axis=1).sum() .reindex(columns=buses_o, fill_value=0)) - for c in network.iterate_components(controllable_branch_components) - for i in [0,1]]) + for c in network.iterate_components(network.controllable_branch_components) + for i in [int(col[3:]) for col in c.df.columns if col[:3] == "bus"]]) if not skip_pre and len(branches_i) > 0: calculate_B_H(sub_network, skip_pre=True) @@ -968,7 +963,7 @@ def sub_network_lpf(sub_network, snapshots=None, skip_pre=False): flows = pd.DataFrame(v_diff * sub_network.H.T, columns=branches_i, index=snapshots) + sub_network.p_branch_shift - for c in sub_network.iterate_components(passive_branch_components): + for c in sub_network.iterate_components(network.passive_branch_components): f = flows.loc[:, c.name] c.pnl.p0.loc[snapshots, f.columns] = f c.pnl.p1.loc[snapshots, f.columns] = -f @@ -981,7 +976,7 @@ def sub_network_lpf(sub_network, snapshots=None, skip_pre=False): network.buses_t.v_mag_pu.loc[snapshots, buses_o] = 1. # set slack bus power to pick up remained - slack_adjustment = (- network.buses_t.p.loc[snapshots, buses_o[1:]].sum(axis=1) + slack_adjustment = (- network.buses_t.p.loc[snapshots, buses_o[1:]].sum(axis=1).fillna(0.) - network.buses_t.p.loc[snapshots, buses_o[0]]) network.buses_t.p.loc[snapshots, buses_o[0]] += slack_adjustment diff --git a/pypsa/plot.py b/pypsa/plot.py index 0ed073959..17d1ff908 100644 --- a/pypsa/plot.py +++ b/pypsa/plot.py @@ -23,7 +23,7 @@ from __future__ import division from __future__ import absolute_import import six -from six import iteritems +from six import iteritems, string_types import pandas as pd import numpy as np @@ -41,20 +41,20 @@ import matplotlib.pyplot as plt from matplotlib.patches import Wedge from matplotlib.collections import LineCollection, PatchCollection -except: +except ImportError: plt_present = False basemap_present = True try: from mpl_toolkits.basemap import Basemap -except: +except ImportError: basemap_present = False pltly_present = True try: import plotly.offline as pltly -except: +except ImportError: pltly_present = False @@ -135,11 +135,13 @@ def compute_bbox_with_margins(margin, x, y): y = y + np.random.uniform(low=-jitter, high=jitter, size=len(y)) if basemap and basemap_present: + resolution = 'l' if isinstance(basemap, bool) else basemap + if boundaries is None: (x1, y1), (x2, y2) = compute_bbox_with_margins(margin, x, y) else: x1, x2, y1, y2 = boundaries - bmap = Basemap(resolution='l', epsg=network.srid, + bmap = Basemap(resolution=resolution, epsg=network.srid, llcrnrlat=y1, urcrnrlat=y2, llcrnrlon=x1, urcrnrlon=x2, ax=ax) bmap.drawcountries() @@ -163,7 +165,10 @@ def compute_bbox_with_margins(margin, x, y): for b_i in bus_sizes.index.levels[0]: s = bus_sizes.loc[b_i] radius = s.sum()**0.5 - ratios = s/s.sum() + if radius == 0.0: + ratios = s + else: + ratios = s/s.sum() start = 0.25 for i, ratio in ratios.iteritems(): @@ -175,11 +180,8 @@ def compute_bbox_with_margins(margin, x, y): ax.add_collection(bus_collection) else: c = pd.Series(bus_colors, index=network.buses.index) - if c.dtype == np.dtype('O'): - c.fillna("b", inplace=True) - c = list(c.values) s = pd.Series(bus_sizes, index=network.buses.index, dtype="float").fillna(10) - bus_collection = ax.scatter(x, y, c=c, s=s, cmap=bus_cmap) + bus_collection = ax.scatter(x, y, c=c, s=s, cmap=bus_cmap, edgecolor='face') def as_branch_series(ser): if isinstance(ser, dict) and set(ser).issubset(branch_components): @@ -269,7 +271,7 @@ def as_branch_series(ser): def iplot(network, fig=None, bus_colors='blue', bus_colorscale=None, bus_colorbar=None, bus_sizes=10, bus_text=None, line_colors='green', line_widths=2, line_text=None, title="", - branch_components=['Line', 'Link'], iplot=True): + branch_components=['Line', 'Link'], iplot=True, jitter=None): """ Plot the network buses and lines interactively using plotly. @@ -305,6 +307,9 @@ def iplot(network, fig=None, bus_colors='blue', Branch components to be plotted, defaults to Line and Link. iplot : bool, default True Automatically do an interactive plot of the figure. + jitter : None|float + Amount of random noise to add to bus positions to distinguish + overlapping buses Returns ------- @@ -323,8 +328,14 @@ def iplot(network, fig=None, bus_colors='blue', if bus_text is None: bus_text = 'Bus ' + network.buses.index - bus_trace = dict(x=network.buses.x, - y=network.buses.y, + x = network.buses.x + y = network.buses.y + + if jitter is not None: + x = x + np.random.uniform(low=-jitter, high=jitter, size=len(x)) + y = y + np.random.uniform(low=-jitter, high=jitter, size=len(y)) + + bus_trace = dict(x=x, y=y, text=bus_text, type="scatter", mode="markers", @@ -368,8 +379,8 @@ def as_branch_series(ser): for c in network.iterate_components(branch_components): l_defaults = defaults_for_branches[c.name] l_widths = line_widths.get(c.name, l_defaults['width']) - l_nums = None l_colors = line_colors.get(c.name, l_defaults['color']) + l_nums = None if line_text is None: l_text = c.name + ' ' + c.df.index @@ -383,22 +394,23 @@ def as_branch_series(ser): else: l_colors.fillna(l_defaults['color'], inplace=True) - x0 = c.df.bus0.map(network.buses.x) - x1 = c.df.bus1.map(network.buses.x) + x0 = c.df.bus0.map(x) + x1 = c.df.bus1.map(x) - y0 = c.df.bus0.map(network.buses.y) - y1 = c.df.bus1.map(network.buses.y) + y0 = c.df.bus0.map(y) + y1 = c.df.bus1.map(y) for line in c.df.index: + color = l_colors if isinstance(l_colors, string_types) else l_colors[line] + width = l_widths if isinstance(l_widths, (int, float)) else l_widths[line] + shapes.append(dict(type='line', - x0=x0[line], - y0=y0[line], - x1=x1[line], - y1=y1[line], - opacity=0.7, - line=dict(color=l_colors[line], - width=l_widths[line]) - )) + x0=x0[line], + y0=y0[line], + x1=x1[line], + y1=y1[line], + opacity=0.7, + line=dict(color=color, width=width))) shape_traces.append(dict(x=0.5*(x0+x1), y=0.5*(y0+y1), @@ -406,8 +418,7 @@ def as_branch_series(ser): type="scatter", mode="markers", hoverinfo="text", - marker=dict(opacity=0.)) - ) + marker=dict(opacity=0.))) fig['data'].extend([bus_trace]+shape_traces) diff --git a/requirements.yml b/requirements.yml new file mode 100644 index 000000000..b76a7b961 --- /dev/null +++ b/requirements.yml @@ -0,0 +1,15 @@ +name: pypsa +channels: + - conda-forge +dependencies: + - python + - six + - numpy + - pyomo + - scipy + - pandas>=0.19.0 + - matplotlib + - networkx>=1.10 + - pyomo + - coincbc + - glpk diff --git a/requirements_dev.yml b/requirements_dev.yml new file mode 100644 index 000000000..ab86172f2 --- /dev/null +++ b/requirements_dev.yml @@ -0,0 +1,12 @@ +name: pypsa + +channels: + - conda-forge + +dependencies: + - pytest + - pytest-cov + - twine + - pip: + - pypower + - pandapower diff --git a/setup.py b/setup.py index 43ede9e8d..b4cd5ad19 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ setup( name='pypsa', - version='0.11.0fork', + version='0.13.2fork', author='Tom Brown (FIAS), Jonas Hoersch (FIAS), David Schlachtberger (FIAS)', author_email='brown@fias.uni-frankfurt.de', description='Python for Power Systems Analysis', @@ -20,7 +20,7 @@ license='GPLv3', packages=find_packages(exclude=['doc', 'test']), include_package_data=True, - install_requires=['numpy','pyomo','scipy','pandas>=0.19.0','networkx>=1.10'], + install_requires=['numpy','pyomo>=5.3','scipy','pandas>=0.19.0','networkx>=1.10'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', diff --git a/test/test_ac_dc_lopf.py b/test/test_ac_dc_lopf.py index e63991d06..21fc9013b 100644 --- a/test/test_ac_dc_lopf.py +++ b/test/test_ac_dc_lopf.py @@ -22,14 +22,13 @@ def test_lopf(): + csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/ac-dc-meshed/ac-dc-data") - csv_folder_name = "../examples/ac-dc-meshed/ac-dc-data" - - network = pypsa.Network(csv_folder_name=csv_folder_name) + network = pypsa.Network(csv_folder_name) results_folder_name = os.path.join(csv_folder_name,"results-lopf") - network_r = pypsa.Network(csv_folder_name=results_folder_name) + network_r = pypsa.Network(results_folder_name) #test results were generated with GLPK; solution should be unique, @@ -39,7 +38,7 @@ def test_lopf(): snapshots = network.snapshots for formulation, free_memory in product(["angles", "cycles", "kirchhoff", "ptdf"], - [{}, {"pypsa"}, {"pypsa", "pyomo-hack"}]): + [{}, {"pypsa"}]): network.lopf(snapshots=snapshots,solver_name=solver_name,formulation=formulation, free_memory=free_memory) print(network.generators_t.p.loc[:,network.generators.index]) print(network_r.generators_t.p.loc[:,network.generators.index]) diff --git a/test/test_ac_dc_lpf.py b/test/test_ac_dc_lpf.py index df7aaa62c..d3774876f 100644 --- a/test/test_ac_dc_lpf.py +++ b/test/test_ac_dc_lpf.py @@ -21,15 +21,13 @@ def test_lpf(): + csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/ac-dc-meshed/ac-dc-data") + network = pypsa.Network(csv_folder_name) - csv_folder_name = "../examples/ac-dc-meshed/ac-dc-data" + results_folder_name = os.path.join(csv_folder_name, "results-lpf") - network = pypsa.Network(csv_folder_name=csv_folder_name) - - results_folder_name = os.path.join(csv_folder_name,"results-lpf") - - network_r = pypsa.Network(csv_folder_name=results_folder_name) + network_r = pypsa.Network(results_folder_name) for snapshot in network.snapshots[:2]: network.lpf(snapshot) diff --git a/test/test_lpf_against_pypower.py b/test/test_lpf_against_pypower.py index 0546c1312..e313c2a86 100644 --- a/test/test_lpf_against_pypower.py +++ b/test/test_lpf_against_pypower.py @@ -8,13 +8,20 @@ from pypower.api import ppoption, runpf, case30 as case +from pypower.ppver import ppver +from distutils.version import StrictVersion +pypower_version = StrictVersion(ppver()['Version']) + import pandas as pd import numpy as np +import pytest +@pytest.mark.skipif(pypower_version <= '5.0.0', + reason="PyPOWER 5.0.0 is broken with recent numpy and unmaintained since Aug 2017.") def test_pypower_case(): #ppopt is a dictionary with the details of the optimization routine to run diff --git a/test/test_opf_storage.py b/test/test_opf_storage.py index 265bd2266..62a21df35 100644 --- a/test/test_opf_storage.py +++ b/test/test_opf_storage.py @@ -22,9 +22,9 @@ def test_opf(): - csv_folder_name = "../examples/opf-storage-hvdc/opf-storage-data" + csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/opf-storage-hvdc/opf-storage-data") - network = pypsa.Network(csv_folder_name=csv_folder_name) + network = pypsa.Network(csv_folder_name) #test results were generated with GLPK and other solvers may differ solver_name = "glpk" diff --git a/test/test_pf_against_pypower.py b/test/test_pf_against_pypower.py index a92c9ca00..6413fe949 100644 --- a/test/test_pf_against_pypower.py +++ b/test/test_pf_against_pypower.py @@ -5,14 +5,20 @@ from pypower.api import ppoption, runpf, case118 as case +from pypower.ppver import ppver +from distutils.version import StrictVersion +pypower_version = StrictVersion(ppver()['Version']) import pandas as pd import numpy as np +import pytest +@pytest.mark.skipif(pypower_version <= '5.0.0', + reason="PyPOWER 5.0.0 is broken with recent numpy and unmaintained since Aug 2017.") def test_pypower_case(): #ppopt is a dictionary with the details of the optimization routine to run @@ -55,7 +61,7 @@ def test_pypower_case(): network.pf() #compare branch flows - for c in network.iterate_components(pypsa.components.passive_branch_components): + for c in network.iterate_components(network.passive_branch_components): for si in ["p0","p1","q0","q1"]: si_pypsa = getattr(c.pnl,si).loc["now"].values si_pypower = results_df['branch'][si][c.df.original_index].values diff --git a/test/test_sclopf_scigrid.py b/test/test_sclopf_scigrid.py index e68b5fd97..ae700ed1b 100644 --- a/test/test_sclopf_scigrid.py +++ b/test/test_sclopf_scigrid.py @@ -1,17 +1,14 @@ from __future__ import print_function, division from __future__ import absolute_import -import pypsa - +import os import numpy as np - +import pypsa def test_sclopf(): + csv_folder_name = os.path.join(os.path.dirname(__file__), "../examples/scigrid-de/scigrid-with-load-gen-trafos/") - - csv_folder_name = "../examples/scigrid-de/scigrid-with-load-gen-trafos/" - - network = pypsa.Network(csv_folder_name=csv_folder_name) + network = pypsa.Network(csv_folder_name) #test results were generated with GLPK and other solvers may differ solver_name = "cbc" diff --git a/test/test_unit_commitment.py b/test/test_unit_commitment.py index d99d85725..cb10760a6 100644 --- a/test/test_unit_commitment.py +++ b/test/test_unit_commitment.py @@ -134,3 +134,8 @@ def test_minimum_down_time(): expected_dispatch = np.array([[3000,0,0,8000],[0,800,3000,0]],dtype=float).T np.testing.assert_array_almost_equal(nu.generators_t.p.values,expected_dispatch) + +if __name__ == "__main__": + test_minimum_down_time() + test_minimum_up_time() + test_part_load() diff --git a/website/animations/index.org b/website/animations/index.org new file mode 100644 index 000000000..6b2e86a15 --- /dev/null +++ b/website/animations/index.org @@ -0,0 +1,8 @@ +#+TITLE: Python for Power System Analysis: Animations +#+OPTIONS: toc:nil no default TOC + +PyPSA datasets can be animated with [[https://github.com/PyPSA/PyPSA-animation][PyPSA-animation]]. + +Here is a live example: + +- [[./pypsa-eur-30/][PyPSA-Eur-30]]: Europe with 95% renewable electricity diff --git a/website/animations/pypsa-eur-30 b/website/animations/pypsa-eur-30 new file mode 120000 index 000000000..bb23ff027 --- /dev/null +++ b/website/animations/pypsa-eur-30 @@ -0,0 +1 @@ +/home/tom/energy/playground/pypsa-animation/ \ No newline at end of file diff --git a/website/examples/index.org b/website/examples/index.org index 3cfb1036d..d969a534b 100644 --- a/website/examples/index.org +++ b/website/examples/index.org @@ -58,20 +58,26 @@ repository]]. buses are connected with transmission lines and there are electrical generators at two of the nodes. - [[./power-to-gas-boiler-chp.html][*Power-to-Gas with Gas Boiler and Combined-Heat-and-Power unit*]] + - [[./chp-fixed-heat-power-ratio.html][*Demonstration of Link with multiple outputs: Combined-Heat-and-Power (CHP) with fixed heat-power ratio*]] - [[./power-to-heat-water-tank.html][*Power-to-Heat with Water Tank*]] - [[./battery-electric-vehicle-charging.html][*Transport: Charging Battery Electric Vehicle with Solar Panel*]] - [[./chained-hydro-reservoirs.html][*Chained Hydroelectric Reservoirs*]] + - [[./biomass-synthetic-fuels-carbon-management.html][*Biomass, synthetic fuels and carbon management*]] - [[./replace-generator-storage-units-with-store.html][*Replacing Generators and Storage Units with Fundamental Stores and Links*]] - This notebook demonstrates how generators and storage units can be replaced by more fundamental components (Stores and Links), and how their parameters map to each other. -- [[./logging-demo.html][Logging]] - How to control the level of logging that PyPSA reports +- [[./logging-demo.html][*Logging*]] - How to control the level of logging that PyPSA reports back, e.g. error/warning/info/debug messages. +- [[https://github.com/PyPSA/pypsa-eur][*PyPSA-Eur*]]: An Open Optimisation Model of the European Transmission System +- [[https://github.com/PyPSA/pypsa-za][*PyPSA-ZA*]]: PyPSA Model of the South African Energy System +- [[https://zenodo.org/record/1146665][*Sector-Coupled European Dataset*]] - Full dataset and code from [[https://arxiv.org/abs/1801.05290][Synergies of sector coupling and transmission extension in a cost-optimised, highly renewable European energy system]], Energy, Volume 160, Pages 720-739, 2018, [[https://arxiv.org/abs/1801.05290][postprint arXiv:1801.05290]], DOI: [[https://doi.org/10.1016/j.energy.2018.06.222][10.1016/j.energy.2018.06.222]]. - [[https://zenodo.org/record/804337][*European One-Node-Per-Country Dataset*]] - Full dataset and code from [[https://doi.org/10.1016/j.energy.2017.06.004][The Benefits of Cooperation in a Highly Renewable European Electricity Network]], Energy, Volume 134, 1 September 2017, Pages 469-481, [[https://arxiv.org/abs/1704.05492][preprint]]. +- [[https://github.com/PyPSA/WHOBS][*WHOBS*]]: Optimal Wind+Hydrogen+Other+Battery+Solar (WHOBS) electricity systems for European countries - [[https://github.com/openego][*open-Ego project*]] - The project open-eGo aims to develop a transparent, inter-grid-level operating grid planning tool to investigate economic viable grid expansion scenarios considering alternative flexibility options such as storages or redispatch. It uses PyPSA. If you have a nice example of using PyPSA, send your Jupyter notebook to -Tom Brown (brown at fias.uni-frankfurt.de). +Tom Brown (firstname.lastname@kit.edu). diff --git a/website/generate.py b/website/generate.py index 9cb820fdf..0c836436b 100644 --- a/website/generate.py +++ b/website/generate.py @@ -9,6 +9,7 @@ ["examples/index.html","examples"], ["doc/index.html","documentation"], ["publications/index.html","publications"], + ["animations/index.html","animations"], ["forum/index.html","forum"], ] diff --git a/website/index.org b/website/index.org index ed9802a98..f58a0ce18 100644 --- a/website/index.org +++ b/website/index.org @@ -8,23 +8,19 @@ PyPSA stands for "Python for Power System Analysis". It is pronounced PyPSA is a [[http://www.gnu.org/philosophy/free-sw.en.html][free software]] toolbox for simulating and optimising modern power systems that include features such as conventional generators with unit commitment, variable wind and solar generation, storage -units, sector coupling and mixed alternating and direct current +units, coupling to other energy sectors, and mixed alternating and direct current networks. PyPSA is designed to scale well with large networks and long time series. -As of 2017 PyPSA is under heavy development and therefore it is -recommended to use caution when using it in a production environment. -Some APIs may change - the changes in each PyPSA version are listed in -the [[./doc/release_notes.html][release notes]]. -PyPSA was initially developed by the -[[https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/][Renewable -Energy Group]] at [[https://fias.uni-frankfurt.de/][FIAS]] to carry out -simulations for the [[http://condynet.de/][CoNDyNet project]], financed -by the [[https://www.bmbf.de/en/index.html][German Federal Ministry for -Education and Research (BMBF)]] as part of the -[[http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/][Stromnetze -Research Initiative]]. +This project is maintained by the [[https://www.iai.kit.edu/english/2338.php][Energy System Modelling group]] at the +[[https://www.iai.kit.edu/english/index.php][Institute for Automation and Applied Informatics]] at the [[http://www.kit.edu/english/index.php][Karlsruhe +Institute of Technology]]. The group is funded by the [[https://www.helmholtz.de/en/][Helmholtz +Association]] until 2024. Previous versions were developed by the +[[https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/][Renewable Energy Group]] at [[https://fias.uni-frankfurt.de/][FIAS]] to carry out simulations for the +[[http://condynet.de/][CoNDyNet project]], financed by the [[https://www.bmbf.de/en/index.html][German Federal Ministry for +Education and Research (BMBF)]] as part of the [[http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/][Stromnetze Research +Initiative]]. * Download @@ -49,14 +45,14 @@ PyPSA can calculate: - static power flow (using both the full non-linear network equations and the linearised network equations) -- linear optimal power flow (optimisation of power plant and storage +- linear optimal power flow (least-cost optimisation of power plant and storage dispatch within network constraints, using the linear network equations, over several snapshots) - security-constrained linear optimal power flow -- total electricity system investment optimisation (using linear +- total electricity/energy system least-cost investment optimisation (using linear network equations, over several snapshots simultaneously for optimisation of generation and storage dispatch and investment in the - capacities of generation, storage and transmission) + capacities of generation, storage, transmission and other infrastructure) It has models for: @@ -74,29 +70,25 @@ It has models for: - basic components out of which more complicated assets can be built, such as Combined Heat and Power (CHP) units, heat pumps, resistive Power-to-Heat (P2H), Power-to-Gas (P2G), battery electric vehicles - (BEVs), etc.; each of these is demonstrated in the - [[./examples/index.html][examples]] + (BEVs), Fischer-Tropsch, direct air capture (DAC), etc.; each of + these is demonstrated in the [[./examples/index.html][examples]] -Functionality that will definitely be added soon: - -- Multi-year investment optimisation -- Simple RMS simulations with the swing equation -- Distributed active power slack -- Non-linear power flow solution using - [[https://en.wikipedia.org/wiki/Holomorphic_embedding_load_flow_method][analytic - continuation]] in the complex plane following - [[https://github.com/SanPen/GridCal][GridCal]] Functionality that may be added in the future: -- Short-circuit current calculations -- Dynamic RMS simulations -- Small signal stability analysis -- Interactive web-based GUI with SVG -- OPF with the full non-linear network equations -- Dynamic EMT simulations -- Unbalanced load flow -- Port to [[http://julialang.org/][Julia]] +- Multi-year investment optimisation +- Distributed active power slack +- Interactive web-based GUI with SVG +- OPF with the full non-linear network equations +- Port to [[http://julialang.org/][Julia]] + +Other complementary libraries: + +- [[https://github.com/e2nIEE/pandapower][pandapower]] for more detailed modelling of distribution grids, + short-circuit calculations, unbalanced load flow and more +- [[https://github.com/JuliaEnergy/PowerDynamics.jl][PowerDynamics.jl]] for dynamic modelling of power grids at time scales + where differential equations are relevant + * Example scripts as Jupyter notebooks @@ -106,7 +98,13 @@ There are [[./examples/index.html][extensive examples]] available as Jupyter not * Screenshots -The showcase for PyPSA is the [[https://pypsa.org/examples/scigrid-lopf-then-pf-plotly.html][SciGRID example]] which demonstrates + + +Results from a PyPSA simulation can be converted into an interactive +online animation using [[https://github.com/PyPSA/PyPSA-animation][PyPSA-animation]], see the [[https://www.pypsa.org/animations/pypsa-eur-30/][PyPSA-Eur-30 example]]. + + +Another showcase for PyPSA is the [[https://pypsa.org/examples/scigrid-lopf-then-pf-plotly.html][SciGRID example]] which demonstrates interactive plots generated with the [[https://plot.ly/python/][plotly]] library. @@ -145,7 +143,7 @@ interactive plots generated with the [[https://plot.ly/python/][plotly]] library * What PyPSA uses under the hood PyPSA is written and tested to be compatible with both Python 2.7 and -Python 3.5. +Python 3.6. It leans heavily on the following Python packages: @@ -176,13 +174,31 @@ PyPSA has a Google Group [[https://groups.google.com/group/pypsa][forum * Citing PyPSA -If you use PyPSA for your research, we would appreciate it if you -would cite the following preprint paper (which has not yet been -through peer review): - -- T. Brown, J. H\ouml{}rsch, D. Schlachtberger, [[https://arxiv.org/abs/1707.09913][PyPSA: Python for Power System Analysis]], 2017, [[https://arxiv.org/abs/1707.09913][preprint arXiv:1707.09913]] - +If you use PyPSA for your research, we would appreciate it if you +would cite the following paper: + +- T. Brown, J. H\ouml{}rsch, D. Schlachtberger, [[https://arxiv.org/abs/1707.09913][PyPSA: Python for + Power System Analysis]], 2018, [[https://openresearchsoftware.metajnl.com/][Journal of Open Research Software]], 6(1), + [[https://arxiv.org/abs/1707.09913][arXiv:1707.09913]], [[https://doi.org/10.5334/jors.188][DOI: 10.5334/jors.188]] + + +Please use the following BibTeX: + +#+BEGIN_SRC + @article{PyPSA, + author = {T. Brown and J. H\"orsch and D. Schlachtberger}, + title = {{PyPSA: Python for Power System Analysis}}, + journal = {Journal of Open Research Software}, + volume = {6}, + issue = {1}, + number = {4}, + year = {2018}, + eprint = {1707.09913}, + url = {https://doi.org/10.5334/jors.188}, + doi = {10.5334/jors.188} + } +#+END_SRC If you want to cite a specific PyPSA version, each release of PyPSA is stored on [[https://zenodo.org/][Zenodo]] with a release-specific DOI. This can be found diff --git a/website/publications/index.org b/website/publications/index.org index 696344777..d14560cb7 100644 --- a/website/publications/index.org +++ b/website/publications/index.org @@ -1,12 +1,33 @@ #+TITLE: Python for Power System Analysis: Publications #+OPTIONS: toc:nil no default TOC -There is a preprint paper describing PyPSA (which has not yet been through peer review): -- T. Brown, J. H\ouml{}rsch, D. Schlachtberger, [[https://arxiv.org/abs/1707.09913][PyPSA: Python for Power System Analysis]], 2017, [[https://arxiv.org/abs/1707.09913][preprint arXiv:1707.09913]] + If you use PyPSA for your research, we would appreciate it if you -would cite this preprint paper. +would cite the following paper: + +- T. Brown, J. H\ouml{}rsch, D. Schlachtberger, [[https://arxiv.org/abs/1707.09913][PyPSA: Python for + Power System Analysis]], 2018, [[https://openresearchsoftware.metajnl.com/][Journal of Open Research Software]], 6(1), + [[https://arxiv.org/abs/1707.09913][postprint arXiv:1707.09913]], [[https://doi.org/10.5334/jors.188][DOI: 10.5334/jors.188]] + +Please use the following BibTeX: + +#+BEGIN_SRC + @article{PyPSA, + author = {T. Brown and J. H\"orsch and D. Schlachtberger}, + title = {{PyPSA: Python for Power System Analysis}}, + journal = {Journal of Open Research Software}, + volume = {6}, + issue = {1}, + number = {4}, + year = {2018}, + eprint = {1707.09913}, + url = {https://doi.org/10.5334/jors.188}, + doi = {10.5334/jors.188} + } +#+END_SRC + If you want to cite a specific PyPSA version, each release of PyPSA is stored on [[https://zenodo.org/][Zenodo]] with a release-specific DOI. This can be found @@ -16,12 +37,20 @@ linked from the overall PyPSA Zenodo DOI: The following research papers have used PyPSA: -- Markus Groissb\ouml{}ck, Alexandre Gusmao, [[https://arxiv.org/abs/1709.03761][Impact of high renewable penetration scenarios on system reliability: two case studies in the Kingdom of Saudi Arabia]], 2017, [[https://arxiv.org/abs/1709.03761][preprint]] -- J. H\ouml{}rsch, T. Brown, [[https://doi.org/10.1109/EEM.2017.7982024][The role of spatial scale in joint optimisations of generation and transmission for European highly renewable scenarios]], 2017, accepted to [[http://eem2017.com/][14th International Conference on the European Energy Market - EEM 2017]], [[https://arxiv.org/abs/1705.07617][preprint]] -- D. Schlachtberger, T. Brown, S. Schramm, M. Greiner, [[https://doi.org/10.1016/j.energy.2017.06.004][The Benefits of Cooperation in a Highly Renewable European Electricity Network]], Energy, Volume 134, 1 September 2017, Pages 469-481, [[https://arxiv.org/abs/1704.05492][preprint]], [[https://doi.org/10.5281/zenodo.804337][all input data, code, output data on Zenodo]] -- J. H\ouml{}rsch, H. Ronellenfitsch, D. Witthaut, T. Brown, [[https://arxiv.org/abs/1704.01881][Linear Optimal Power Flow Using Cycle Flows]], 2017, [[https://arxiv.org/abs/1704.01881][preprint]] -- Joao Gorenstein Dedecca, Rudi A. Hakvoort, Paulien M. Herder, [[https://doi.org/10.1016/j.energy.2017.02.111][Transmission expansion simulation for the European Northern Seas offshore grid]], Energy, Volume 125, 15 April 2017, Pages 805-824 +- T. Brown, D. Schlachtberger, A. Kies, S. Schramm, M. Greiner, [[https://arxiv.org/abs/1801.05290][Synergies of sector coupling and transmission extension in a cost-optimised, highly renewable European energy system]], *Energy*, Volume 160, Pages 720-739, 2018, [[https://arxiv.org/abs/1801.05290][postprint arXiv:1801.05290]], [[https://doi.org/10.1016/j.energy.2018.06.222][DOI: 10.1016/j.energy.2018.06.222]], [[https://zenodo.org/record/1146665][all input data, code and output data on Zenodo]] +- Joao Gorenstein Dedecca, Sara Lumbreras, Andres Ramos, Rudi A. Hakvoort Paulien M. Herder, [[https://doi.org/10.1016/j.eneco.2018.04.037][Expansion planning of the North Sea offshore grid: Simulation of integrated governance constraints]], *Energy Economics*, Volume 72, May 2018, Pages 376-392, [[https://doi.org/10.1016/j.eneco.2018.04.037][DOI: 10.1016/j.eneco.2018.04.037]] +- Tommaso Nesti, Alessandro Zocca, Bert Zwart, [[https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.120.258301][Emergent Failures and Cascades in Power Grids: A Statistical Physics Perspective]], *Physical Review Letters*, Volume 120, 2018, [[https://doi.org/10.1103/PhysRevLett.120.258301][DOI: 10.1103/PhysRevLett.120.258301]] +- Fabian Hofmann, Mirko Sch\auml{}fer, Tom Brown, Jonas H\ouml{}rsch, Stefan Schramm, Martin Greiner, [[https://arxiv.org/abs/1807.07771][Principal flow patterns across renewable electricity networks]], 2018, [[https://arxiv.org/abs/1807.07771][preprint arXiv:1807.07771]] +- Bo Tranberg, Mirko Sch\auml{}fer, Tom Brown, Jonas H\ouml{}rsch, Martin Greiner, [[https://arxiv.org/abs/1806.02549][Flow-based analysis of storage usage in a low-carbon European electricity scenario]], 2018, [[https://arxiv.org/abs/1806.02549][preprint arXiv:1806.02549]] +- Jonas H\ouml{}rsch, Fabian Hofmann, David Schlachtberger, Tom Brown [[https://arxiv.org/abs/1806.01613][PyPSA-Eur: An Open Optimisation Model of the European Transmission System]], 2018, [[https://arxiv.org/abs/1806.01613][preprint arXiv:1806.01613]] +- Markus Schlott, Alexander Kies, Tom Brown, Stefan Schramm, Martin Greiner, [[https://arxiv.org/abs/1805.11673][The Impact of Climate Change on a Cost-Optimal Highly Renewable European Electricity Network]], 2018, [[https://arxiv.org/abs/1805.11673][preprint arXiv:1805.11673]] +- D. Schlachtberger, T. Brown, M. Sch\auml{}fer, S. Schramm, M. Greiner, [[https://arxiv.org/abs/1803.09711][Cost optimal scenarios of a future highly renewable European electricity system: Exploring the influence of weather data, cost parameters and policy constraints]], 2018, [[https://arxiv.org/abs/1803.09711][preprint arXiv:1803.09711]] +- J. H\ouml{}rsch, Joanne Calitz, [[https://arxiv.org/abs/1710.11199][PyPSA-ZA: Investment and operation co-optimization of integrating wind and solar in South Africa at high spatial and temporal detail]], presented at the [[http://windac-africa.com/][WindAc Africa Conference]], 2017, [[https://arxiv.org/abs/1710.11199][preprint arXiv:1710.11199]], [[https://github.com/FRESNA/pypsa-za][code]] +- Markus Groissb\ouml{}ck, Alexandre Gusmao, [[https://arxiv.org/abs/1709.03761][Impact of high renewable penetration scenarios on system reliability: two case studies in the Kingdom of Saudi Arabia]], 2017, [[https://arxiv.org/abs/1709.03761][preprint arXiv:1709.03761]] +- J. H\ouml{}rsch, T. Brown, [[https://doi.org/10.1109/EEM.2017.7982024][The role of spatial scale in joint optimisations of generation and transmission for European highly renewable scenarios]], 2017, [[http://eem2017.com/][14th International Conference on the European Energy Market - EEM 2017]], [[https://arxiv.org/abs/1705.07617][preprint arXiv:1705.07617]], [[https://doi.org/10.1109/EEM.2017.7982024][DOI: 10.1109/EEM.2017.7982024]] +- D. Schlachtberger, T. Brown, S. Schramm, M. Greiner, [[https://doi.org/10.1016/j.energy.2017.06.004][The Benefits of Cooperation in a Highly Renewable European Electricity Network]], *Energy*, Volume 134, 1 September 2017, Pages 469-481, [[https://arxiv.org/abs/1704.05492][postprint arXiv:1704.05492]], [[https://doi.org/10.1016/j.energy.2017.06.004][DOI: 10.1016/j.energy.2017.06.004]], [[https://doi.org/10.5281/zenodo.804337][all input data, code, output data on Zenodo]] +- J. H\ouml{}rsch, H. Ronellenfitsch, D. Witthaut, T. Brown, [[https://arxiv.org/abs/1704.01881][Linear Optimal Power Flow Using Cycle Flows]], 2017, [[https://www.journals.elsevier.com/electric-power-systems-research][*Electric Power Systems Research*]], Volume 158, pages 126-135, [[https://arxiv.org/abs/1704.01881][postprint arXiv:1704.01881]], [[https://doi.org/10.1016/j.epsr.2017.12.034][DOI: 10.1016/j.epsr.2017.12.034]] +- Joao Gorenstein Dedecca, Rudi A. Hakvoort, Paulien M. Herder, [[https://doi.org/10.1016/j.energy.2017.02.111][Transmission expansion simulation for the European Northern Seas offshore grid]], *Energy*, Volume 125, 15 April 2017, Pages 805-824, [[https://doi.org/10.1016/j.energy.2017.02.111][DOI: 10.1016/j.energy.2017.02.111]] If you have written a paper or report using PyPSA, please send us the -link so we can add it here by emailing Tom Brown (brown at -fias.uni-frankfurt.de). +link so we can add it here by emailing Tom Brown (firstname.lastname@kit.edu). diff --git a/website/theme.css b/website/theme.css index bc05db49f..0fb5d108d 100644 --- a/website/theme.css +++ b/website/theme.css @@ -1,12 +1,17 @@ +@import url(https://fonts.googleapis.com/css?family=Noto+Serif); + +body { + font-family: "Noto Serif", serif; /* wikitribune */ + font-size: 1.1rem; + line-height: 1.8rem; +} + #outer_box { width:760px; margin-left: auto; margin-right: auto; -font-size:120%; -text-align: justify; -line-height: 130%; } @@ -27,26 +32,23 @@ margin: 0 0 20px 40px; h1 { font-size:150%; -line-height:1.4em; } h2 { font-size:120%; -line-height:1.4em; } h3 { font-size:110%; -line-height:1.4em; } #submenu { padding: 0; margin: 0 0 0 0; height: 2em; /* Setting a height makes it act like a block */ - font-size:100%; + font-size:130%; } #submenu li {