Skip to content
Open
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
5178daa
Improve DC Woodbury security analysis performances
geofjamg Dec 31, 2024
4d951e9
Wip
geofjamg Dec 31, 2024
34bb1f5
Wip
geofjamg Dec 31, 2024
e1406d3
Wip
geofjamg Dec 31, 2024
7e94f45
Wip
geofjamg Dec 31, 2024
9bfd926
Wip
geofjamg Dec 31, 2024
747756c
Wip
geofjamg Jan 2, 2025
a280b22
Fix
geofjamg Jan 2, 2025
48808a6
Wip
geofjamg Jan 2, 2025
b47022a
Wip
geofjamg Jan 2, 2025
0774511
Wip
geofjamg Jan 2, 2025
31ed43d
Wip
geofjamg Jan 2, 2025
a30d09c
Wip
geofjamg Jan 2, 2025
a8fa649
Proposal target vector management.
annetill Jan 2, 2025
5759829
Wip
geofjamg Jan 2, 2025
1c0525b
Wip
geofjamg Jan 2, 2025
eb6c3d8
Fix.
annetill Jan 3, 2025
c333e69
Merge remote-tracking branch 'origin/improve_dc_woodbury_perfs' into …
annetill Jan 3, 2025
4ba6add
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Jan 15, 2025
5a49a59
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Jan 17, 2025
4a3f794
Add proposition to add pst actions in NetworkDcVectorState
p-arvy Jan 21, 2025
3b5217d
Add proposition to remove branch result when branch is disabled
p-arvy Jan 21, 2025
fcceedb
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Jan 24, 2025
bb82615
wip
p-arvy Jan 27, 2025
3f9c50b
wip
p-arvy Jan 27, 2025
b847854
wip
p-arvy Jan 27, 2025
26d7b30
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Jan 30, 2025
3d5cda3
wip
p-arvy Jan 31, 2025
7b8e5c0
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Jan 31, 2025
016358d
Avoid creating breanch result if disabled
p-arvy Jan 31, 2025
185187a
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Feb 7, 2025
2ceafa6
Clean
p-arvy Feb 7, 2025
3cf3717
Use NetworkDcVectorState to calculate pre contingency results
p-arvy Feb 7, 2025
5e5da40
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Feb 7, 2025
77f1eba
wip
p-arvy Feb 11, 2025
0d1cdea
Only evaluate violations if most restricted limit is violated
p-arvy Feb 11, 2025
4f68f8e
clean
p-arvy Feb 11, 2025
4307ce6
Merge branch 'refs/heads/main' into improve_dc_woodbury_perfs
p-arvy Feb 11, 2025
daca8e9
Clean
p-arvy Feb 11, 2025
e740167
Add hvdc without power in ConnectivityAnalysisResult
p-arvy Feb 11, 2025
1944147
Refactor WoodburyDcSecurityAnalysis
p-arvy Feb 11, 2025
975e20a
small clean
p-arvy Feb 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,10 @@ protected double ph2() {

protected abstract double eval(double ph1, double ph2, double a1);

public double eval(StateVector sv) {
return eval(ph1(sv), ph2(sv), a1(sv));
}

@Override
public double eval() {
return eval(ph1(), ph2(), a1());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,7 @@
import com.powsybl.openloadflow.dc.equations.DcVariableType;
import com.powsybl.openloadflow.equations.EquationSystem;
import com.powsybl.openloadflow.graph.GraphConnectivity;
import com.powsybl.openloadflow.network.ElementType;
import com.powsybl.openloadflow.network.LfBranch;
import com.powsybl.openloadflow.network.LfBus;
import com.powsybl.openloadflow.network.LfNetwork;
import com.powsybl.openloadflow.network.*;
import com.powsybl.openloadflow.network.impl.PropagatedContingency;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -38,31 +35,38 @@ public final class ConnectivityBreakAnalysis {

public static final class ConnectivityAnalysisResult {

private PropagatedContingency contingency;
private final PropagatedContingency propagatedContingency;

private final LfNetwork network;

private final Set<String> elementsToReconnect;

private final Set<LfBus> disabledBuses;

private final Set<LfBus> slackConnectedComponent;
private final Set<LfBus> slackConnectedComponentBuses; // buses of connected component where the slack is

private final Set<LfBranch> partialDisabledBranches; // branches disabled because of connectivity loss.

private ConnectivityAnalysisResult(Set<String> elementsToReconnect,
GraphConnectivity<LfBus, LfBranch> connectivity,
LfNetwork lfNetwork) {
this.elementsToReconnect = elementsToReconnect;
slackConnectedComponent = connectivity.getConnectedComponent(lfNetwork.getSlackBus());
disabledBuses = connectivity.getVerticesRemovedFromMainComponent();
partialDisabledBranches = connectivity.getEdgesRemovedFromMainComponent();
private final int createdSynchronousComponents;

public ConnectivityAnalysisResult(PropagatedContingency propagatedContingency, LfNetwork network) {
this(propagatedContingency, network, Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), Collections.emptySet(), 0);
}

public PropagatedContingency getPropagatedContingency() {
return contingency;
public ConnectivityAnalysisResult(PropagatedContingency propagatedContingency, LfNetwork network, Set<String> elementsToReconnect,
Set<LfBus> disabledBuses, Set<LfBus> slackConnectedComponentBuses,
Set<LfBranch> partialDisabledBranches, int createdSynchronousComponents) {
this.propagatedContingency = Objects.requireNonNull(propagatedContingency);
this.network = Objects.requireNonNull(network);
this.elementsToReconnect = elementsToReconnect;
this.disabledBuses = disabledBuses;
this.slackConnectedComponentBuses = slackConnectedComponentBuses;
this.partialDisabledBranches = partialDisabledBranches;
this.createdSynchronousComponents = createdSynchronousComponents;
}

public void setPropagatedContingency(PropagatedContingency contingency) {
this.contingency = contingency;
public PropagatedContingency getPropagatedContingency() {
return propagatedContingency;
}

public Set<String> getElementsToReconnect() {
Expand All @@ -73,13 +77,22 @@ public Set<LfBus> getDisabledBuses() {
return disabledBuses;
}

public Set<LfBus> getSlackConnectedComponent() {
return slackConnectedComponent;
public Set<LfBus> getSlackConnectedComponentBuses() {
return slackConnectedComponentBuses;
}

public Set<LfBranch> getPartialDisabledBranches() {
return partialDisabledBranches;
}

public Optional<LfContingency> toLfContingency() {
return propagatedContingency.toLfContingency(network, false, (network, contingencyId, branchesToOpen, relocateSlackBus) -> {
return new PropagatedContingency.ContingencyConnectivityLossImpact(true,
createdSynchronousComponents,
disabledBuses,
Collections.emptySet()); // FIXME
});
}
}

public record ConnectivityBreakAnalysisResults(List<PropagatedContingency> nonBreakingConnectivityContingencies,
Expand Down Expand Up @@ -160,8 +173,9 @@ private static List<ConnectivityAnalysisResult> computeConnectivityData(LfNetwor
} else {
// only compute for factors that have to be computed for this contingency lost
Set<String> elementsToReconnect = computeElementsToReconnect(connectivity, breakingConnectivityElements);
ConnectivityAnalysisResult connectivityAnalysisResult = new ConnectivityAnalysisResult(elementsToReconnect, connectivity, lfNetwork);
connectivityAnalysisResult.setPropagatedContingency(contingency);
int createdSynchronousComponents = connectivity.getNbConnectedComponents() - 1;
ConnectivityAnalysisResult connectivityAnalysisResult = new ConnectivityAnalysisResult(contingency, lfNetwork, elementsToReconnect, connectivity.getVerticesRemovedFromMainComponent(),
connectivity.getConnectedComponent(lfNetwork.getSlackBus()), connectivity.getEdgesRemovedFromMainComponent(), createdSynchronousComponents);
connectivityAnalysisResults.add(connectivityAnalysisResult);
}
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,7 @@
import com.powsybl.math.matrix.LUDecomposition;
import com.powsybl.openloadflow.dc.DcLoadFlowContext;
import com.powsybl.openloadflow.dc.DcLoadFlowParameters;
import com.powsybl.openloadflow.dc.equations.AbstractClosedBranchDcFlowEquationTerm;
import com.powsybl.openloadflow.dc.equations.ClosedBranchSide1DcFlowEquationTerm;
import com.powsybl.openloadflow.dc.equations.DcEquationSystemCreationParameters;
import com.powsybl.openloadflow.dc.equations.DcEquationType;
import com.powsybl.openloadflow.dc.equations.*;
import com.powsybl.openloadflow.equations.Equation;
import com.powsybl.openloadflow.network.*;

Expand Down Expand Up @@ -65,14 +62,15 @@ public WoodburyEngine(DcEquationSystemCreationParameters creationParameters, Lis
* Note that it does not update the state vector and the network at the end (because we don't need it to just evaluate a few equations).
*/
public static double[] runDcLoadFlowWithModifiedTargetVector(DcLoadFlowContext loadFlowContext, DisabledNetwork disabledNetwork, ReportNode reportNode) {
return runDcLoadFlowWithModifiedTargetVector(loadFlowContext, disabledNetwork, reportNode, Collections.emptyList());
return runDcLoadFlowWithModifiedTargetVector(loadFlowContext, disabledNetwork, reportNode, null, Collections.emptyList());
}

/**
* A simplified version of DcLoadFlowEngine that supports on the fly bus and branch disabling, and pst actions.
* Note that it does not update the state vector and the network at the end (because we don't need it to just evaluate a few equations).
*/
public static double[] runDcLoadFlowWithModifiedTargetVector(DcLoadFlowContext loadFlowContext, DisabledNetwork disabledNetwork, ReportNode reportNode, List<LfAction> pstActions) {
public static double[] runDcLoadFlowWithModifiedTargetVector(DcLoadFlowContext loadFlowContext, DisabledNetwork disabledNetwork, ReportNode reportNode,
LfContingency contingency, List<LfAction> pstActions) {
Collection<LfBus> remainingBuses;
if (disabledNetwork.getBuses().isEmpty()) {
remainingBuses = loadFlowContext.getNetwork().getBuses();
Expand All @@ -83,6 +81,7 @@ public static double[] runDcLoadFlowWithModifiedTargetVector(DcLoadFlowContext l

DcLoadFlowParameters parameters = loadFlowContext.getParameters();
if (parameters.isDistributedSlack()) {
// FIXME, distribution keys has changed...
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@p-arvy here is why we stop, as slack distribution keys have changed, the trajet vector has to be updated. Doing it by hand is really complex and in a way a code duplication, so it could be nice if you test a DC network save/restore with the less possible number of fields (only buses, some branches, all branches, which attributes inside).

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay thank you for the feedback @annetill. I will take a look.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hello @annetill, @geofjamg, my understanding is that we want to return to applying the LfContingency, but this time saving/modifying/restoring the least possible number of fields.

In doing so, I have the impression that it is sufficient to apply the generators/loads/hvdcs changes due to the LfContingency, and save/restore the BusDcState of the LfNetwork, as the impact of changes on other elements (lost buses/PSTs) is already accounted for in the RHS override by directly modifying the target vector array (in method WoodburyEngine.runDcLoadFlowWithModifiedTargetVector). What do you think ?

I have pushed a commit in this regard (note that the code is not yet clean). Please feel free to comment.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I also rolled back to have the case distinction when the contingency causes generator/load losses or not (to avoid having to save/apply/restore the modifications). This reduces significantly execution time in case of branch contingencies.

distributeSlack(loadFlowContext.getNetwork(), remainingBuses, parameters.getBalanceType(), parameters.getNetworkParameters().isUseActiveLimits());
}

Expand All @@ -109,6 +108,35 @@ public static double[] runDcLoadFlowWithModifiedTargetVector(DcLoadFlowContext l
.forEach(column -> targetVectorArray[column] = 0);
}

if (contingency != null) {
// apply lost generators
for (LfGenerator generator : contingency.getLostGenerators()) {
LfBus lfBus = generator.getBus();
double lostTargetP = generator.getTargetP();
var eq = loadFlowContext.getEquationSystem().getEquation(lfBus.getNum(), DcEquationType.BUS_TARGET_P);
if (eq.isPresent()) {
int column = eq.get().getColumn();
if (column != -1) { // inactive, could be slack bus
targetVectorArray[column] -= lostTargetP;
}
}
}
// apply lost loads
for (var e : contingency.getLostLoads().entrySet()) {
LfLoad load = e.getKey();
LfBus lfBus = load.getBus();
double lostTargetP = e.getValue().getPowerShift().getActive();
var eq = loadFlowContext.getEquationSystem().getEquation(lfBus.getNum(), DcEquationType.BUS_TARGET_P);
if (eq.isPresent()) {
int column = eq.get().getColumn();
if (column != -1) { // inactive, could be slack bus
targetVectorArray[column] += lostTargetP;
}
}
}
// TODO: apply hvdc without power
}

if (!pstActions.isEmpty()) {
// set transformer phase shift to new shifting value
pstActions.stream()
Expand Down
2 changes: 2 additions & 0 deletions src/main/java/com/powsybl/openloadflow/network/LfBus.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ enum QLimitType {

double getTargetQ();

void invalidateLoadTargetP();

double getLoadTargetP();

double getNonFictitiousLoadTargetP();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,13 @@ public abstract class AbstractLfBranch extends AbstractElement implements LfBran

protected final LfBus bus2;

private final Map<LimitType, List<LfLimit>> limits1 = new EnumMap<>(LimitType.class);
private List<LfLimit> currentLimits1;
private List<LfLimit> activePowerLimits1;
private List<LfLimit> apparentPowerLimits1;

private final Map<LimitType, List<LfLimit>> limits2 = new EnumMap<>(LimitType.class);
private List<LfLimit> currentLimits2;
private List<LfLimit> activePowerLimits2;
private List<LfLimit> apparentPowerLimits2;

protected final PiModel piModel;

Expand Down Expand Up @@ -87,7 +91,7 @@ public Optional<ThreeSides> getOriginalSide() {
* The resulting list will contain the permanent limit
*/
protected static List<LfLimit> createSortedLimitsList(LoadingLimits loadingLimits, LfBus bus, double[] limitReductions) {
LinkedList<LfLimit> sortedLimits = new LinkedList<>();
List<LfLimit> sortedLimits = new ArrayList<>(3);
if (loadingLimits != null) {
double toPerUnit = getScaleForLimitType(loadingLimits.getLimitType(), bus);

Expand All @@ -99,21 +103,21 @@ protected static List<LfLimit> createSortedLimitsList(LoadingLimits loadingLimit
// https://javadoc.io/doc/com.powsybl/powsybl-core/latest/com/powsybl/iidm/network/CurrentLimits.html
double reduction = limitReductions.length == 0 ? 1d : limitReductions[i + 1]; // Temporary limit's reductions are stored starting from index 1 in `limitReductions`
double originalValuePerUnit = temporaryLimit.getValue() * toPerUnit;
sortedLimits.addFirst(LfLimit.createTemporaryLimit(temporaryLimit.getName(), temporaryLimit.getAcceptableDuration(),
sortedLimits.add(0, LfLimit.createTemporaryLimit(temporaryLimit.getName(), temporaryLimit.getAcceptableDuration(),
originalValuePerUnit, reduction));
}
i++;
}
double reduction = limitReductions.length == 0 ? 1d : limitReductions[0];
sortedLimits.addLast(LfLimit.createPermanentLimit(loadingLimits.getPermanentLimit() * toPerUnit, reduction));
sortedLimits.add(LfLimit.createPermanentLimit(loadingLimits.getPermanentLimit() * toPerUnit, reduction));
}
if (sortedLimits.size() > 1) {
// we only make that fix if there is more than a permanent limit attached to the branch.
for (int i = sortedLimits.size() - 1; i > 0; i--) {
// From the permanent limit to the most serious temporary limit.
sortedLimits.get(i).setAcceptableDuration(sortedLimits.get(i - 1).getAcceptableDuration());
}
sortedLimits.getFirst().setAcceptableDuration(0);
sortedLimits.get(0).setAcceptableDuration(0);
}
return sortedLimits;
}
Expand All @@ -133,22 +137,76 @@ public LfBus getBus2() {
return bus2;
}

private List<LfLimit> getLimits1(LimitType type) {
switch (type) {
case ACTIVE_POWER -> {
return activePowerLimits1;
}
case APPARENT_POWER -> {
return apparentPowerLimits1;
}
case CURRENT -> {
return currentLimits1;
}
default -> throw new UnsupportedOperationException(String.format("Getting limits of type %s is not supported.", type));
}
}

private void setLimits1(LimitType type, List<LfLimit> limits) {
switch (type) {
case ACTIVE_POWER -> activePowerLimits1 = limits;
case APPARENT_POWER -> apparentPowerLimits1 = limits;
case CURRENT -> currentLimits1 = limits;
default -> throw new UnsupportedOperationException(String.format("Getting limits of type %s is not supported.", type));
}
}

private List<LfLimit> getLimits2(LimitType type) {
switch (type) {
case ACTIVE_POWER -> {
return activePowerLimits2;
}
case APPARENT_POWER -> {
return apparentPowerLimits2;
}
case CURRENT -> {
return currentLimits2;
}
default -> throw new UnsupportedOperationException(String.format("Getting limits of type %s is not supported.", type));
}
}

private void setLimits2(LimitType type, List<LfLimit> limits) {
switch (type) {
case ACTIVE_POWER -> activePowerLimits2 = limits;
case APPARENT_POWER -> apparentPowerLimits2 = limits;
case CURRENT -> currentLimits2 = limits;
default -> throw new UnsupportedOperationException(String.format("Getting limits of type %s is not supported.", type));
}
}

public <T extends LoadingLimits> List<LfLimit> getLimits1(LimitType type, Supplier<Optional<T>> loadingLimitsSupplier, LimitReductionManager limitReductionManager) {
// It is possible to apply the reductions here since the only supported ContingencyContext for LimitReduction is ALL.
return limits1.computeIfAbsent(type, v -> {
var limits = getLimits1(type);
if (limits == null) {
// It is possible to apply the reductions here since the only supported ContingencyContext for LimitReduction is ALL.
var loadingLimits = loadingLimitsSupplier.get().orElse(null);
return createSortedLimitsList(loadingLimits, bus1,
limits = createSortedLimitsList(loadingLimits, bus1,
getLimitReductions(TwoSides.ONE, limitReductionManager, loadingLimits));
});
setLimits1(type, limits);
}
return limits;
}

public <T extends LoadingLimits> List<LfLimit> getLimits2(LimitType type, Supplier<Optional<T>> loadingLimitsSupplier, LimitReductionManager limitReductionManager) {
// It is possible to apply the reductions here since the only supported ContingencyContext for LimitReduction is ALL.
return limits2.computeIfAbsent(type, v -> {
var limits = getLimits2(type);
if (limits == null) {
// It is possible to apply the reductions here since the only supported ContingencyContext for LimitReduction is ALL.
var loadingLimits = loadingLimitsSupplier.get().orElse(null);
return createSortedLimitsList(loadingLimits, bus2,
limits = createSortedLimitsList(loadingLimits, bus2,
getLimitReductions(TwoSides.TWO, limitReductionManager, loadingLimits));
});
setLimits2(type, limits);
}
return limits;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ public abstract class AbstractLfBus extends AbstractElement implements LfBus {

protected final List<LfLoad> loads = new ArrayList<>();

protected Double loadTargetP;

protected final List<LfBranch> branches = new ArrayList<>();

protected final List<LfHvdc> hvdcs = new ArrayList<>();
Expand Down Expand Up @@ -384,7 +386,10 @@ public void invalidateGenerationTargetP() {
@Override
public double getGenerationTargetP() {
if (generationTargetP == null) {
generationTargetP = generators.stream().mapToDouble(LfGenerator::getTargetP).sum();
generationTargetP = 0.0;
for (LfGenerator generator : generators) {
generationTargetP += generator.getTargetP();
}
}
return generationTargetP;
}
Expand All @@ -405,11 +410,20 @@ public void setGenerationTargetQ(double generationTargetQ) {
}
}

@Override
public void invalidateLoadTargetP() {
loadTargetP = null;
}

@Override
public double getLoadTargetP() {
return loads.stream()
.mapToDouble(load -> load.getTargetP() * load.getLoadModel().flatMap(lm -> lm.getExpTermP(0).map(LfLoadModel.ExpTerm::c)).orElse(1d))
.sum();
if (loadTargetP == null) {
loadTargetP = 0.0;
for (LfLoad load : loads) {
loadTargetP += load.getTargetP() * load.getLoadModel().flatMap(lm -> lm.getExpTermP(0).map(LfLoadModel.ExpTerm::c)).orElse(1d);
}
}
return loadTargetP;
}

@Override
Expand Down
Loading