Arc of current network flow optimization Discussion

In dinic, we will find that every edge in dfs will traverse at least once, so we can not use a certain edge to delete it?

The answer is of course possible, which uses the current arc optimization;

In fact, this optimization study Euler tour in a long time ago when he came into contact with;

After each augmenting path can be seen as a "drained" this path, there will be no further increase since drained wide possible. But if every time scanning these "withered" side is a waste of time. Then we record it "squeeze" to that edge, and then the next time directly from this side to start augmenting, you can save a lot of time. This is the current arc optimization .

To achieve this is to head each time dfs array replication again, and then re-run the new array dfs

#include <bits/stdc++.h>
#define int long long
using namespace std;
struct littlestar{
	int to;
	int nxt;
	int w;
}star[10010];
int head[10010],cur[10010],cnt=1;
void add(int u,int v,int w)
{
	star[++cnt].to=v;
	star[cnt].nxt=head[u];
	star[cnt].w=w;
	head[u]=cnt;
}
int s,t;
queue<int> q;
int n;
int dep[10010];
bool bfs()
{
	for(int i=1;i<=n;i++){
		dep[i]=0;
		cur[i]=head[i];
	}
	while(q.size()) q.pop();
	q.push(s);
	dep[s]=1;
	while(q.size()){
		int u=q.front();
		q.pop();
		for(int i=head[u];i;i=star[i].nxt){
			int v=star[i].to;
			if(!dep[v]&&star[i].w){
				dep[v]=dep[u]+1;
				q.push(v);
				if(v==t) return 1;
			}
		}
	}
	return 0;
}
int m;
int dinic(int u,int flow)
{
	if(u==t) return flow;
	int rest=flow,tmp;
	for(int i=cur[u];i&&rest;i=star[i].nxt){
		cur[u]=i;
		int v=star[i].to;
		if(dep[v]==dep[u]+1&&star[i].w){
			tmp=dinic(v,min(star[i].w,rest));
			if(!tmp) dep[v]=0;
			star[i].w-=tmp;
			star[i^1].w+=tmp;
			rest-=tmp;
		}
	}
	return flow-rest;
}
signed main()
{
	cin>>n>>m>>s>>t;
	for(register int i=1;i<=m;i++){
		int u,v,w;
		scanf("%d%d%d",&u,&v,&w);
		add(u,v,w);
		add(v,u,0);
	}
	int maxflow=0;
	while(bfs()){
		int flow;
		while(flow=dinic(s,LLONG_MAX)) maxflow+=flow;
	}
	cout<<maxflow;
}

 

Guess you like

Origin www.cnblogs.com/kamimxr/p/11607636.html