summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorJustKidding2023-01-29 15:22:45 -0500
committerJustKidding2023-01-29 15:22:45 -0500
commit6c4a0f626e4cd5eff7d62a88ba295631f3015f1e (patch)
tree61ec2dbf0d09618a62df228cf8aef2988ca28610
parent077abeadd4f40804e96052fa4708aea3a0af861a (diff)
downloadaur-6c4a0f626e4cd5eff7d62a88ba295631f3015f1e.tar.gz
add patch
-rw-r--r--.SRCINFO2
-rw-r--r--PKGBUILD18
-rw-r--r--mongodb-4.4.10-boost-1.81.patch317
3 files changed, 331 insertions, 6 deletions
diff --git a/.SRCINFO b/.SRCINFO
index 731234c6eee3..2c96168eaeb3 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -35,6 +35,7 @@ pkgbase = mongodb50
source = mongodb-5.0.2-skip-reqs-check.patch
source = mongodb-5.0.2-boost-1.79.patch
source = mongodb-5.0.5-no-force-lld.patch
+ source = mongodb-4.4.10-boost-1.81.patch
sha256sums = 097152eb18f28aae04e523eb1e1421c391032f3d213ff674657dc610b13c38ae
sha256sums = 3757d548cfb0e697f59b9104f39a344bb3d15f802608085f838cb2495c065795
sha256sums = b7d18726225cd447e353007f896ff7e4cbedb2f641077bce70ab9d292e8f8d39
@@ -46,5 +47,6 @@ pkgbase = mongodb50
sha256sums = 4ff40320e04bf8c3e05cbc662f8ea549a6b8494d1fda64b1de190c88587bfafd
sha256sums = a04aec4f8bd99ad213e31eb45a9e1658695442082e7c4f8c4044f6326eaa1acd
sha256sums = f79f65824f81753d41d2274a6904930db11b06fe08f1442a24c30060cab27e32
+ sha256sums = 7bfeadf2fb7e13bd93c4515faada070410ddd8e276cc947b5b2b2292539051b7
pkgname = mongodb50
diff --git a/PKGBUILD b/PKGBUILD
index cb7ceb2cc13d..d14b447f36f9 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -26,7 +26,8 @@ source=(https://fastdl.mongodb.org/src/mongodb-src-r$pkgver.tar.gz
mongodb-5.0.2-skip-no-exceptions.patch
mongodb-5.0.2-skip-reqs-check.patch
mongodb-5.0.2-boost-1.79.patch
- mongodb-5.0.5-no-force-lld.patch)
+ mongodb-5.0.5-no-force-lld.patch
+ mongodb-4.4.10-boost-1.81.patch)
sha256sums=('097152eb18f28aae04e523eb1e1421c391032f3d213ff674657dc610b13c38ae'
'3757d548cfb0e697f59b9104f39a344bb3d15f802608085f838cb2495c065795'
'b7d18726225cd447e353007f896ff7e4cbedb2f641077bce70ab9d292e8f8d39'
@@ -37,18 +38,23 @@ sha256sums=('097152eb18f28aae04e523eb1e1421c391032f3d213ff674657dc610b13c38ae'
'5b81ebc3ed68b307df76277aca3226feee33a00d8bb396206bdc7a8a1f58f3e4'
'4ff40320e04bf8c3e05cbc662f8ea549a6b8494d1fda64b1de190c88587bfafd'
'a04aec4f8bd99ad213e31eb45a9e1658695442082e7c4f8c4044f6326eaa1acd'
- 'f79f65824f81753d41d2274a6904930db11b06fe08f1442a24c30060cab27e32')
+ 'f79f65824f81753d41d2274a6904930db11b06fe08f1442a24c30060cab27e32'
+ '7bfeadf2fb7e13bd93c4515faada070410ddd8e276cc947b5b2b2292539051b7')
_scons_args=(
- --use-system-pcre # wait for pcre 8.44+ https://jira.mongodb.org/browse/SERVER-40836 and https://jira.mongodb.org/browse/SERVER-42990
+ CC="${CC:-gcc}"
+ CXX="${CXX:-g++}"
+ AR="${AR:-ar}"
+
+ --use-system-pcre
--use-system-snappy
- --use-system-yaml # https://jira.mongodb.org/browse/SERVER-43980
+ --use-system-yaml
--use-system-zlib
--use-system-stemmer
--use-sasl-client
--ssl
--disable-warnings-as-errors
- --use-system-boost # Doesn't compile
+ --use-system-boost
--use-system-zstd
--runtime-hardening=off
)
@@ -125,7 +131,7 @@ build() {
fi
export SCONSFLAGS="$MAKEFLAGS"
- ./buildscripts/scons.py install-devcore "${_scons_args[@]}"
+ ./buildscripts/scons.py "${_scons_args[@]}" install-devcore
}
package() {
diff --git a/mongodb-4.4.10-boost-1.81.patch b/mongodb-4.4.10-boost-1.81.patch
new file mode 100644
index 000000000000..331f0c5b922a
--- /dev/null
+++ b/mongodb-4.4.10-boost-1.81.patch
@@ -0,0 +1,317 @@
+https://bugs.gentoo.org/887037
+
+Workaround https://github.com/boostorg/container/commit/99091420ae553b27345e04279fd19fe24fb684c1
+in Boost 1.81.
+
+Upstream s2 (as in real upstream, not MongoDB) has deviated substantially
+from the version vendored.
+--- a/src/third_party/s2/base/stl_decl_msvc.h
++++ b/src/third_party/s2/base/stl_decl_msvc.h
+@@ -118,8 +118,8 @@ namespace msvchash {
+ class hash_multimap;
+ } // end namespace msvchash
+
+-using msvchash::hash_set;
+-using msvchash::hash_map;
++using msvchash::hash_set = my_hash_set;
++using msvchash::hash_map = my_hash_map;
+ using msvchash::hash;
+ using msvchash::hash_multimap;
+ using msvchash::hash_multiset;
+--- a/src/third_party/s2/base/stl_decl_osx.h
++++ b/src/third_party/s2/base/stl_decl_osx.h
+@@ -68,8 +68,8 @@ using std::string;
+
+ using namespace std;
+ using __gnu_cxx::hash;
+-using __gnu_cxx::hash_set;
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_set = my_hash_set;
++using __gnu_cxx::hash_map = my_hash_map;
+ using __gnu_cxx::select1st;
+
+ /* On Linux (and gdrive on OSX), this comes from places like
+--- a/src/third_party/s2/hash.h
++++ b/src/third_party/s2/hash.h
+@@ -2,10 +2,10 @@
+ #define THIRD_PARTY_S2_HASH_H_
+
+ #include <unordered_map>
+-#define hash_map std::unordered_map
++#define my_hash_map std::unordered_map
+
+ #include <unordered_set>
+-#define hash_set std::unordered_set
++#define my_hash_set std::unordered_set
+
+ #define HASH_NAMESPACE_START namespace std {
+ #define HASH_NAMESPACE_END }
+--- a/src/third_party/s2/s2_test.cc
++++ b/src/third_party/s2/s2_test.cc
+@@ -10,7 +10,7 @@ using std::reverse;
+
+ #include <hash_set>
+ #include <hash_map>
+-using __gnu_cxx::hash_set;
++using __gnu_cxx::hash_set = my_hash_map;
+
+ #include "s2.h"
+ #include "base/logging.h"
+@@ -709,8 +709,8 @@ TEST(S2, Frames) {
+ #if 0
+ TEST(S2, S2PointHashSpreads) {
+ int kTestPoints = 1 << 16;
+- hash_set<size_t> set;
+- hash_set<S2Point> points;
++ my_hash_set<size_t> set;
++ my_hash_set<S2Point> points;
+ hash<S2Point> hasher;
+ S2Point base = S2Point(1, 1, 1);
+ for (int i = 0; i < kTestPoints; ++i) {
+@@ -733,7 +733,7 @@ TEST(S2, S2PointHashCollapsesZero) {
+ double minus_zero = -zero;
+ EXPECT_NE(*reinterpret_cast<uint64 const*>(&zero),
+ *reinterpret_cast<uint64 const*>(&minus_zero));
+- hash_map<S2Point, int> map;
++ my_hash_map<S2Point, int> map;
+ S2Point zero_pt(zero, zero, zero);
+ S2Point minus_zero_pt(minus_zero, minus_zero, minus_zero);
+
+--- a/src/third_party/s2/s2cellid_test.cc
++++ b/src/third_party/s2/s2cellid_test.cc
+@@ -10,7 +10,7 @@ using std::reverse;
+
+ #include <cstdio>
+ #include <hash_map>
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_map = my_hash_map;
+
+ #include <sstream>
+ #include <vector>
+@@ -170,7 +170,7 @@ TEST(S2CellId, Tokens) {
+ static const int kMaxExpandLevel = 3;
+
+ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
+- hash_map<S2CellId, S2CellId>* parent_map) {
++ my_hash_map<S2CellId, S2CellId>* parent_map) {
+ cells->push_back(parent);
+ if (parent.level() == kMaxExpandLevel) return;
+ int i, j, orientation;
+@@ -194,7 +194,7 @@ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
+
+ TEST(S2CellId, Containment) {
+ // Test contains() and intersects().
+- hash_map<S2CellId, S2CellId> parent_map;
++ my_hash_map<S2CellId, S2CellId> parent_map;
+ vector<S2CellId> cells;
+ for (int face = 0; face < 6; ++face) {
+ ExpandCell(S2CellId::FromFacePosLevel(face, 0, 0), &cells, &parent_map);
+--- a/src/third_party/s2/s2loop.cc
++++ b/src/third_party/s2/s2loop.cc
+@@ -120,7 +120,7 @@ bool S2Loop::IsValid(string* err) const {
+ }
+ }
+ // Loops are not allowed to have any duplicate vertices.
+- hash_map<S2Point, int> vmap;
++ my_hash_map<S2Point, int> vmap;
+ for (int i = 0; i < num_vertices(); ++i) {
+ if (!vmap.insert(make_pair(vertex(i), i)).second) {
+ VLOG(2) << "Duplicate vertices: " << vmap[vertex(i)] << " and " << i;
+--- a/src/third_party/s2/s2polygon.cc
++++ b/src/third_party/s2/s2polygon.cc
+@@ -117,7 +117,7 @@ HASH_NAMESPACE_END
+ bool S2Polygon::IsValid(const vector<S2Loop*>& loops, string* err) {
+ // If a loop contains an edge AB, then no other loop may contain AB or BA.
+ if (loops.size() > 1) {
+- hash_map<S2PointPair, pair<int, int> > edges;
++ my_hash_map<S2PointPair, pair<int, int> > edges;
+ for (size_t i = 0; i < loops.size(); ++i) {
+ S2Loop* lp = loops[i];
+ for (int j = 0; j < lp->num_vertices(); ++j) {
+--- a/src/third_party/s2/s2polygonbuilder.cc
++++ b/src/third_party/s2/s2polygonbuilder.cc
+@@ -175,7 +175,7 @@ S2Loop* S2PolygonBuilder::AssembleLoop(S2Point const& v0, S2Point const& v1,
+ // This ensures that only CCW loops are constructed when possible.
+
+ vector<S2Point> path; // The path so far.
+- hash_map<S2Point, int> index; // Maps a vertex to its index in "path".
++ my_hash_map<S2Point, int> index; // Maps a vertex to its index in "path".
+ path.push_back(v0);
+ path.push_back(v1);
+ index[v1] = 1;
+@@ -361,7 +361,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+
+ // First, we build the set of all the distinct vertices in the input.
+ // We need to include the source and destination of every edge.
+- hash_set<S2Point> vertices;
++ my_hash_set<S2Point> vertices;
+ for (EdgeSet::const_iterator i = edges_->begin(); i != edges_->end(); ++i) {
+ vertices.insert(i->first);
+ VertexSet const& vset = i->second;
+@@ -370,7 +370,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+ }
+
+ // Build a spatial index containing all the distinct vertices.
+- for (hash_set<S2Point>::const_iterator i = vertices.begin();
++ for (my_hash_set<S2Point>::const_iterator i = vertices.begin();
+ i != vertices.end(); ++i) {
+ index->Insert(*i);
+ }
+@@ -378,7 +378,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+ // Next, we loop through all the vertices and attempt to grow a maximial
+ // mergeable group starting from each vertex.
+ vector<S2Point> frontier, mergeable;
+- for (hash_set<S2Point>::const_iterator vstart = vertices.begin();
++ for (my_hash_set<S2Point>::const_iterator vstart = vertices.begin();
+ vstart != vertices.end(); ++vstart) {
+ // Skip any vertices that have already been merged with another vertex.
+ if (merge_map->find(*vstart) != merge_map->end()) continue;
+--- a/src/third_party/s2/s2polygonbuilder.h
++++ b/src/third_party/s2/s2polygonbuilder.h
+@@ -262,7 +262,7 @@ class S2PolygonBuilder {
+ // current position to a new position, and also returns a spatial index
+ // containing all of the vertices that do not need to be moved.
+ class PointIndex;
+- typedef hash_map<S2Point, S2Point> MergeMap;
++ typedef my_hash_map<S2Point, S2Point> MergeMap;
+ void BuildMergeMap(PointIndex* index, MergeMap* merge_map);
+
+ // Moves a set of vertices from old to new positions.
+@@ -282,7 +282,7 @@ class S2PolygonBuilder {
+ // once. We could have also used a multiset<pair<S2Point, S2Point> >,
+ // but this representation is a bit more convenient.
+ typedef multiset<S2Point> VertexSet;
+- typedef hash_map<S2Point, VertexSet> EdgeSet;
++ typedef my_hash_map<S2Point, VertexSet> EdgeSet;
+ scoped_ptr<EdgeSet> edges_;
+
+ // Unique collection of the starting (first) vertex of all edges,
+--- a/src/third_party/s2/s2regioncoverer.cc
++++ b/src/third_party/s2/s2regioncoverer.cc
+@@ -321,7 +321,7 @@ void S2RegionCoverer::GetInteriorCellUnion(S2Region const& region,
+
+ void S2RegionCoverer::FloodFill(
+ S2Region const& region, S2CellId const& start, vector<S2CellId>* output) {
+- hash_set<S2CellId> all;
++ my_hash_set<S2CellId> all;
+ vector<S2CellId> frontier;
+ output->clear();
+ all.insert(start);
+--- a/src/third_party/s2/s2regioncoverer_test.cc
++++ b/src/third_party/s2/s2regioncoverer_test.cc
+@@ -11,7 +11,7 @@ using std::swap;
+ using std::reverse;
+
+ #include <hash_map>
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_map = my_hash_map;
+
+ #include <queue>
+ using std::priority_queue;
+@@ -65,7 +65,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
+ vector<S2CellId> const& covering,
+ bool interior) {
+ // Keep track of how many cells have the same coverer.min_level() ancestor.
+- hash_map<S2CellId, int> min_level_cells;
++ my_hash_map<S2CellId, int> min_level_cells;
+ for (int i = 0; i < covering.size(); ++i) {
+ int level = covering[i].level();
+ EXPECT_GE(level, coverer.min_level());
+@@ -76,7 +76,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
+ if (covering.size() > coverer.max_cells()) {
+ // If the covering has more than the requested number of cells, then check
+ // that the cell count cannot be reduced by using the parent of some cell.
+- for (hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
++ for (my_hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
+ i != min_level_cells.end(); ++i) {
+ EXPECT_EQ(i->second, 1);
+ }
+--- a/src/third_party/s2/strings/split.cc
++++ b/src/third_party/s2/strings/split.cc
+@@ -156,7 +156,7 @@ struct simple_insert_iterator {
+ // SplitStringToIterator{Using|AllowEmpty}().
+ template <typename T>
+ struct simple_hash_map_iterator {
+- typedef hash_map<T, T> hashmap;
++ typedef my_hash_map<T, T> hashmap;
+ hashmap* t;
+ bool even;
+ typename hashmap::iterator curr;
+@@ -246,8 +246,8 @@ void SplitStringAllowEmpty(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
+- hash_set<string>* result) {
+- simple_insert_iterator<hash_set<string> > it(result);
++ my_hash_set<string>* result) {
++ simple_insert_iterator<my_hash_set<string> > it(result);
+ SplitStringToIteratorAllowEmpty(full, delim, 0, it);
+ }
+
+@@ -258,7 +258,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+- hash_map<string, string>* result) {
++ my_hash_map<string, string>* result) {
+ simple_hash_map_iterator<string> it(result);
+ SplitStringToIteratorAllowEmpty(full, delim, 0, it);
+ }
+@@ -352,8 +352,8 @@ void SplitStringUsing(const string& full,
+ }
+
+ void SplitStringToHashsetUsing(const string& full, const char* delim,
+- hash_set<string>* result) {
+- simple_insert_iterator<hash_set<string> > it(result);
++ my_hash_set<string>* result) {
++ simple_insert_iterator<my_hash_set<string> > it(result);
+ SplitStringToIteratorUsing(full, delim, it);
+ }
+
+@@ -364,7 +364,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashmapUsing(const string& full, const char* delim,
+- hash_map<string, string>* result) {
++ my_hash_map<string, string>* result) {
+ simple_hash_map_iterator<string> it(result);
+ SplitStringToIteratorUsing(full, delim, it);
+ }
+--- a/src/third_party/s2/strings/split.h
++++ b/src/third_party/s2/strings/split.h
+@@ -41,7 +41,7 @@ using namespace std;
+ void SplitStringAllowEmpty(const string& full, const char* delim,
+ vector<string>* res);
+ void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
+- hash_set<string>* res);
++ my_hash_set<string>* res);
+ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ set<string>* res);
+ // The even-positioned (0-based) components become the keys for the
+@@ -50,7 +50,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ // if the key was already present in the hash table, or will be the
+ // empty string if the key is a newly inserted key.
+ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+- hash_map<string, string>* result);
++ my_hash_map<string, string>* result);
+
+ // ----------------------------------------------------------------------
+ // SplitStringUsing()
+@@ -66,7 +66,7 @@ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+ void SplitStringUsing(const string& full, const char* delim,
+ vector<string>* res);
+ void SplitStringToHashsetUsing(const string& full, const char* delim,
+- hash_set<string>* res);
++ my_hash_set<string>* res);
+ void SplitStringToSetUsing(const string& full, const char* delim,
+ set<string>* res);
+ // The even-positioned (0-based) components become the keys for the
+@@ -75,7 +75,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
+ // if the key was already present in the hash table, or will be the
+ // empty string if the key is a newly inserted key.
+ void SplitStringToHashmapUsing(const string& full, const char* delim,
+- hash_map<string, string>* result);
++ my_hash_map<string, string>* result);
+
+ // ----------------------------------------------------------------------
+ // SplitOneIntToken()