question:
Design an O(n^2) time algorithm to find the longest monotonically increasing subsequence of a sequence of n numbers.
Method 1: Longest common subsequence.
Ideas: 1: Copy the array a to b;
2: sort b;
3: De-duplication of array b (note that de-duplication is necessary because monotonically increasing is required), here the hash table is used to de-duplicate.
4: Find the longest common subsequence of a and b arrays.
See https://blog.csdn.net/m0_38015368/article/details/79835163 for the longest common subsequence method
Code:
#include <bits/stdc++.h> using namespace std; int a[1024]; //Original array int b[1024]; //sorted array int ans[1024][1024]; //Record the value of c[i][j] which sub-problem to get int c[1024][1024]; //Length of longest subsequence int n; //array length int m; //length after deduplication void LCS(int i, int j) { if(i == 0 || j == 0) return; else if(ans[i][j] == 1) { LCS(i - 1, j - 1); cout << a[i] << " "; } else if(ans[i][j] == 2) { LCS(i - 1, j); } else { LCS(i, j - 1); } } void LCSLegth() { memset(ans, 0, sizeof(ans)); memset(c, 0, sizeof(c)); for(int i = 1; i <= n; ++i) { for(int j = 1; j <= m; ++j) { if(a[i] == b[j]) { c[i][j] = c[i - 1][j - 1] + 1; ans[i][j] = 1; } else if(c[i - 1][j] > c[i][j - 1]) { c[i][j] = c[i - 1][j]; yrs[i][j] = 2; } else { c[i][j] = c[i][j - 1]; yrs[i][j] = 3; } } } } void HashTable() { int max_ = b[n]; m = 0; int *p = new int(max_); memset(p, 0, sizeof(p)); for(int i = 1; i <= n; ++i) { p[b[i]] = 1; } memset(b, 0, sizeof(b)); for(int i = 0; i <= max_; ++i) { if(p[i] == 1) b[++m] = i; } delete []p; p = NULL; } void solve() { sort(b + 1, b + n); HashTable(); //Remove duplicates // for(int i = 1; i <= m; ++i) // cout << b[i] << " "; // cout << endl; LCSLegth(); LCS(n, m); } void inPut() { scanf("%d", &n); for(int i = 1; i <= n; ++i) { scanf("%d", &a[i]); b[i] = a[i]; } } intmain() { inPut(); solve(); }
Method Two