Pages

Friday, August 7, 2009

TC SRM358 D1 1000 (SharksDinner)

The problem statement can be accessed via:
http://www.topcoder.com/stat?c=problem_statement&pm=7834&rd=10768

The problem states that there are sharks that can eat another provided that all their statistics (skill, speed and intelligence) are at least as good as another. We are asked to find the minimum number of sharks that will remain at the end, in other words, this can be viewed as finding the maximum number of sharks being eaten. This problem easily lends itself to a matching problem, if it weren't for the fact that a shark can eat at most 2 other sharks (instead of just one other shark) then this problem is simply a maximum cardinality bipartite matching problem. However, since the constraint is rather low (allowing a shark to eat at most 2 sharks) we can divide a shark into multiple nodes and solve it as a bipartite matching problem.

However we'll solve it using the maximum flow algorithm (which can also be used to solve the bipartite matching) by making a slight modification. For each shark we give it an "in" node and an "out" node and the capacity between these two nodes is exactly 2 (corresponding to the fact that a shark can only eat at most 2). Using this method allows us to scale the maximum number a shark can eat without increasing the number of nodes (beyond the in and out node expansion). One small note we need to be careful of is sharks which can eat each other. To handle this, a simple modification towards matching edges (i.e. shark A can eat shark B) is made - if shark A and shark B has the same statistics then shark A can eat shark B iff it has a lower index. So you resolve the problem of sharks eating each other cyclically.

Now it's just a matter of constructing the network and running the maximum flow algorithm over it. The final answer is simply the number of sharks in the data set minus the ones that got eaten from our matching.


class SharksDinner {
public:
int minSurvivors(vector <int>, vector <int>, vector <int>);
};

class Shark {
public:
int size;
int speed;
int intel;
Shark(int a, int b, int c) : size(a), speed(b), intel(c) { }
};

bool operator<(const Shark& lhs, const Shark& rhs) {
if (lhs.size != rhs.size) return lhs.size < rhs.size;
if (lhs.speed != rhs.speed) return lhs.speed < rhs.speed;
return lhs.intel < rhs.intel;
}

int augment(int cur, int flow, int ret);

class node {
public:
int num;
int cap;
node() { }
node(int n, int c) : num(n), cap(c) { }
};

bool operator<(const node& lhs, const node& rhs) {
if (lhs.num != rhs.num) return lhs.num < rhs.num;
if (lhs.cap != rhs.cap) return lhs.cap < rhs.cap;
return false;
}

#define INF 9999999
int sz; // the size of the graph (vertices)
int cap[202][202]; // capacity matrix
int used[202][202]; // network flow array
bool visited[202];
vector<vector<node> > adjlist;

int maxflow() {
int flow = 0;
int thisFlow = 0;
memset(visited,false,sizeof(visited));
while ((thisFlow = augment(0, INT_MAX-1, 0)) != 0) {
memset(visited,false,sizeof(visited));
flow += thisFlow;
}
return flow;
}

int augment(int cur, int flow, int ret) {
if (cur == sz-1) return flow;
visited[cur] = true;
for (int k = 0; k < adjlist[cur].size() && ret == 0; k++) {
int i = adjlist[cur][k].num;
int c = adjlist[cur][k].cap;
if (c > used[cur][i] && !visited[i] && (ret = augment(i, min(flow,
c-used[cur][i]), 0)) > 0) used[cur][i] += ret;
else if (used[i][cur] > 0 && !visited[i] && (ret = augment(i,
min(flow, used[i][cur]), 0)) > 0)
used[i][cur] -= ret;
}
return ret;
}

int SharksDinner::minSurvivors(vector <int> size, vector <int> speed,
vector <int> intelligence) {
int res = 0;
vector<Shark> sharkData;
for (int i = 0; i < size.size(); i++) {
sharkData.push_back(Shark(size[i],speed[i],intelligence[i]));
}
adjlist = vector<vector<node> >(sharkData.size() * 3 + 2, vector<node>());
// source
for (int i = 0; i < sharkData.size(); i++) {
adjlist[0].push_back(node(i+1,INF));
}
// sub-nodes
for (int i = 0; i < sharkData.size(); i++) {
adjlist[i+1].push_back(node(i+1+sharkData.size(),2));
}
// end-nodes
for (int i = 0; i < sharkData.size(); i++) {
for (int j = 0; j < sharkData.size(); j++) {
if (i == j) continue;
if (sharkData[i].size < sharkData[j].size ||
sharkData[i].speed < sharkData[j].speed ||
sharkData[i].intel < sharkData[j].intel) continue;
if (sharkData[i].size == sharkData[j].size &&
sharkData[i].speed == sharkData[j].speed &&
sharkData[i].intel == sharkData[j].intel && i > j) continue;
adjlist[i+1+sharkData.size()].push_back(node(j+1+(sharkData.size()*2),INF));
adjlist[j+1+(sharkData.size()*2)].push_back(node(i+1+sharkData.size(),0));
}
}
// sink
for (int i = 0; i < sharkData.size(); i++) {
adjlist[i+1+(sharkData.size()*2)].push_back(node(3*sharkData.size()+1,1));
}
sz = 3 * sharkData.size() + 2;
// run flow
int flow = maxflow();
return res = sharkData.size() - flow;
}

No comments:

Post a Comment