这题的关键在于将 a+b+c=d 转换为 a+b=d-c , 然后将所有a+b 的情况全部求出来排序, 然后从大到小枚举d和c, 二分查找d-c。
Sumsets
Description Given S, a set of integers, find the largest d such that a + b + c = d where a, b, c, and d are distinct elements of S.
Input Several S, each consisting of a line containing an integer 1 <= n <= 1000 indicating the number of elements in S, followed by the elements of S, one per line. Each element of S is a distinct integer between -536870912 and +536870911 inclusive. The last line of input contains 0.
Output For each S, a single line containing d, or a single line containing "no solution".
Sample Input 5 2 3 5 7 12 5 2 16 64 256 1024 0 Sample Output 12 no solution Source |
#include <stdio.h> #include <algorithm> #include <iostream> using namespace std; struct node { int x,y,key; }k[1001000]; int g[1010]; int s; int cmp(node t,node t1) { return t.key<t1.key; } int pan(int i,int j,int mid) { if(k[mid].x!=g[i]&&k[mid].y!=g[i]&&k[mid].x!=g[j]&&k[mid].y!=g[j]) { return 1; } return 0; } int main() { while(scanf("%d",&s)&&s) { for(int i=0;i<s;i++) scanf("%d",&g[i]); int cnt=0; for(int i=0;i<s-1;i++) for(int j=i+1;j<s;j++) { k[cnt].x=g[i]; k[cnt].y=g[j]; k[cnt].key=g[i]+g[j]; cnt++; } sort(k,k+cnt,cmp); sort(g,g+s); int flag=0,tmp,b,d,mid; for(int i=s-1;i>=0;i--) { for(int j=s-1;j>=0;j--) { if(i==j) continue; tmp=g[i]-g[j]; b=0; d=cnt-1; while(b<=d) { mid=(b+d)/2; if(k[mid].key==tmp) { for(int i1=max(0,mid-1);i1<=min(mid+1,cnt-1);i1++) { if(k[i1].key==tmp && pan(i,j,i1)==1) { flag=1; break; } } break; } if(k[mid].key>tmp) d=mid-1; else b=mid+1; } if(flag==1) { printf("%d\n",g[i]); break; } } if(flag==1) break; } if(flag==0) printf("no solution\n"); } return 0; }