summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorJustKidding2023-01-29 19:46:44 -0500
committerJustKidding2023-01-29 19:46:44 -0500
commitf54018d32c57648b6a9e83ab6f5e167f843553be (patch)
tree9a3e286d9dc562ee5c425fa758c6240ca0a1e378
parent34df14cbcc4d0f2692babd673cf22379257326ba (diff)
downloadaur-f54018d32c57648b6a9e83ab6f5e167f843553be.tar.gz
upgpkg: mongodb44 4.4.18-2
-rw-r--r--.SRCINFO6
-rw-r--r--PKGBUILD65
-rw-r--r--mongodb-4.4.10-boost-1.81.patch317
-rw-r--r--mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64 (renamed from mongodb-4.4.15-adjust-cache-alignment-assumptions.patch)0
4 files changed, 371 insertions, 17 deletions
diff --git a/.SRCINFO b/.SRCINFO
index 19c459a73b09..832eb8b314c3 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,7 +1,7 @@
pkgbase = mongodb44
pkgdesc = A high-performance, open source, schema-free document-oriented database (last version to support non-avx CPUs)
pkgver = 4.4.18
- pkgrel = 1
+ pkgrel = 2
url = https://www.mongodb.com/
arch = x86_64
arch = aarch64
@@ -34,7 +34,8 @@ pkgbase = mongodb44
source = mongodb-4.4.1-gcc11.patch
source = mongodb-4.4.10-boost-1.79.patch
source = mongodb-4.4.10-no-force-lld.patch
- source = mongodb-4.4.15-adjust-cache-alignment-assumptions.patch
+ source = mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64
+ source = mongodb-4.4.10-boost-1.81.patch
sha256sums = 03723468a175ea77c67ede4b941f1c27e45d0b086c697a8201d12581a09d1713
sha256sums = 3757d548cfb0e697f59b9104f39a344bb3d15f802608085f838cb2495c065795
sha256sums = b7d18726225cd447e353007f896ff7e4cbedb2f641077bce70ab9d292e8f8d39
@@ -45,5 +46,6 @@ pkgbase = mongodb44
sha256sums = 4202e039944fde80daa1bd3a5f332c522d8db96b4c3cf7c764355c5fc9089137
sha256sums = 76e61d1d4f5b4e7c8cd760b1fc0dc86978a8e180d184cdfc7f61fba7d5543a95
sha256sums = e748b669bca526a08c06e5d8ec2bd371b938e57f83a2339d62e38a4527810e47
+ sha256sums = 7bfeadf2fb7e13bd93c4515faada070410ddd8e276cc947b5b2b2292539051b7
pkgname = mongodb44
diff --git a/PKGBUILD b/PKGBUILD
index d5a6060c94e4..5c2291c1d4ce 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -4,7 +4,7 @@ pkgname=mongodb44
_pkgname=mongodb
# #.<odd number>.# releases are unstable development/testing
pkgver=4.4.18
-pkgrel=1
+pkgrel=2
pkgdesc="A high-performance, open source, schema-free document-oriented database (last version to support non-avx CPUs)"
arch=("x86_64" "aarch64")
url="https://www.mongodb.com/"
@@ -25,7 +25,8 @@ source=(https://fastdl.mongodb.org/src/mongodb-src-r$pkgver.tar.gz
mongodb-4.4.1-gcc11.patch
mongodb-4.4.10-boost-1.79.patch
mongodb-4.4.10-no-force-lld.patch
- mongodb-4.4.15-adjust-cache-alignment-assumptions.patch)
+ mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64
+ mongodb-4.4.10-boost-1.81.patch)
sha256sums=('03723468a175ea77c67ede4b941f1c27e45d0b086c697a8201d12581a09d1713'
'3757d548cfb0e697f59b9104f39a344bb3d15f802608085f838cb2495c065795'
'b7d18726225cd447e353007f896ff7e4cbedb2f641077bce70ab9d292e8f8d39'
@@ -35,22 +36,53 @@ sha256sums=('03723468a175ea77c67ede4b941f1c27e45d0b086c697a8201d12581a09d1713'
'f7e6d87b68f7703cdbd45e255962ed5a4f6d583aa76d6fcf4fdc7005211fbf06'
'4202e039944fde80daa1bd3a5f332c522d8db96b4c3cf7c764355c5fc9089137'
'76e61d1d4f5b4e7c8cd760b1fc0dc86978a8e180d184cdfc7f61fba7d5543a95'
- 'e748b669bca526a08c06e5d8ec2bd371b938e57f83a2339d62e38a4527810e47')
-
+ 'e748b669bca526a08c06e5d8ec2bd371b938e57f83a2339d62e38a4527810e47'
+ '7bfeadf2fb7e13bd93c4515faada070410ddd8e276cc947b5b2b2292539051b7')
+
_scons_args=(
- --use-system-pcre # wait for pcre 8.44+ https://jira.mongodb.org/browse/SERVER-40836 and https://jira.mongodb.org/browse/SERVER-42990
+ CC="${CC:-gcc}"
+ CXX="${CXX:-g++}"
+ AR="${AR:-ar}"
+
+ --use-system-pcre
--use-system-snappy
- --use-system-yaml # https://jira.mongodb.org/browse/SERVER-43980
+ --use-system-yaml
--use-system-zlib
--use-system-stemmer
--use-sasl-client
--ssl
--disable-warnings-as-errors
- --use-system-boost # Doesn't compile
+ --use-system-boost
--use-system-zstd
--runtime-hardening=off
)
+all-flag-vars() {
+ echo {C,CXX}FLAGS
+}
+
+_filter-var() {
+ local f x var=$1 new=()
+ shift
+
+ for f in ${!var} ; do
+ for x in "$@" ; do
+ # Note this should work with globs like -O*
+ [[ ${f} == ${x} ]] && continue 2
+ done
+ new+=( "${f}" )
+ done
+ export ${var}="${new[*]}"
+}
+
+filter-flags() {
+ local v
+ for v in $(all-flag-vars) ; do
+ _filter-var ${v} "$@"
+ done
+ return 0
+}
+
prepare() {
cd "${srcdir}/${_pkgname}-src-r${pkgver}"
@@ -75,7 +107,7 @@ prepare() {
if [[ $CARCH == "aarch64" ]]; then
_scons_args+=(--use-hardware-crc32=off)
- patch -Np1 -i ../mongodb-4.4.15-adjust-cache-alignment-assumptions.patch
+ patch -Np1 -i ../mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64
fi
if check_option debug y; then
@@ -87,19 +119,22 @@ prepare() {
fi
# apply gentoo patches
- patch -Np1 -i ../mongodb-4.4.1-fix-scons.patch
- patch -Np1 -i ../mongodb-4.4.8-no-compass.patch
- patch -Np1 -i ../mongodb-4.4.1-boost.patch
- patch -Np1 -i ../mongodb-4.4.1-gcc11.patch
- patch -Np1 -i ../mongodb-4.4.10-boost-1.79.patch
- patch -Np1 -i ../mongodb-4.4.10-no-force-lld.patch
+ for file in $srcdir/*.patch; do
+ echo "Applying patch $file..."
+ patch -Np1 -i $file
+ done
}
build() {
cd "${srcdir}/${_pkgname}-src-r${pkgver}"
+ if check_option debug n; then
+ filter-flags '-m*'
+ filter-flags '-O?'
+ fi
+
export SCONSFLAGS="$MAKEFLAGS"
- ./buildscripts/scons.py install-devcore "${_scons_args[@]}"
+ ./buildscripts/scons.py "${_scons_args[@]}" install-devcore
}
package() {
diff --git a/mongodb-4.4.10-boost-1.81.patch b/mongodb-4.4.10-boost-1.81.patch
new file mode 100644
index 000000000000..331f0c5b922a
--- /dev/null
+++ b/mongodb-4.4.10-boost-1.81.patch
@@ -0,0 +1,317 @@
+https://bugs.gentoo.org/887037
+
+Workaround https://github.com/boostorg/container/commit/99091420ae553b27345e04279fd19fe24fb684c1
+in Boost 1.81.
+
+Upstream s2 (as in real upstream, not MongoDB) has deviated substantially
+from the version vendored.
+--- a/src/third_party/s2/base/stl_decl_msvc.h
++++ b/src/third_party/s2/base/stl_decl_msvc.h
+@@ -118,8 +118,8 @@ namespace msvchash {
+ class hash_multimap;
+ } // end namespace msvchash
+
+-using msvchash::hash_set;
+-using msvchash::hash_map;
++using msvchash::hash_set = my_hash_set;
++using msvchash::hash_map = my_hash_map;
+ using msvchash::hash;
+ using msvchash::hash_multimap;
+ using msvchash::hash_multiset;
+--- a/src/third_party/s2/base/stl_decl_osx.h
++++ b/src/third_party/s2/base/stl_decl_osx.h
+@@ -68,8 +68,8 @@ using std::string;
+
+ using namespace std;
+ using __gnu_cxx::hash;
+-using __gnu_cxx::hash_set;
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_set = my_hash_set;
++using __gnu_cxx::hash_map = my_hash_map;
+ using __gnu_cxx::select1st;
+
+ /* On Linux (and gdrive on OSX), this comes from places like
+--- a/src/third_party/s2/hash.h
++++ b/src/third_party/s2/hash.h
+@@ -2,10 +2,10 @@
+ #define THIRD_PARTY_S2_HASH_H_
+
+ #include <unordered_map>
+-#define hash_map std::unordered_map
++#define my_hash_map std::unordered_map
+
+ #include <unordered_set>
+-#define hash_set std::unordered_set
++#define my_hash_set std::unordered_set
+
+ #define HASH_NAMESPACE_START namespace std {
+ #define HASH_NAMESPACE_END }
+--- a/src/third_party/s2/s2_test.cc
++++ b/src/third_party/s2/s2_test.cc
+@@ -10,7 +10,7 @@ using std::reverse;
+
+ #include <hash_set>
+ #include <hash_map>
+-using __gnu_cxx::hash_set;
++using __gnu_cxx::hash_set = my_hash_map;
+
+ #include "s2.h"
+ #include "base/logging.h"
+@@ -709,8 +709,8 @@ TEST(S2, Frames) {
+ #if 0
+ TEST(S2, S2PointHashSpreads) {
+ int kTestPoints = 1 << 16;
+- hash_set<size_t> set;
+- hash_set<S2Point> points;
++ my_hash_set<size_t> set;
++ my_hash_set<S2Point> points;
+ hash<S2Point> hasher;
+ S2Point base = S2Point(1, 1, 1);
+ for (int i = 0; i < kTestPoints; ++i) {
+@@ -733,7 +733,7 @@ TEST(S2, S2PointHashCollapsesZero) {
+ double minus_zero = -zero;
+ EXPECT_NE(*reinterpret_cast<uint64 const*>(&zero),
+ *reinterpret_cast<uint64 const*>(&minus_zero));
+- hash_map<S2Point, int> map;
++ my_hash_map<S2Point, int> map;
+ S2Point zero_pt(zero, zero, zero);
+ S2Point minus_zero_pt(minus_zero, minus_zero, minus_zero);
+
+--- a/src/third_party/s2/s2cellid_test.cc
++++ b/src/third_party/s2/s2cellid_test.cc
+@@ -10,7 +10,7 @@ using std::reverse;
+
+ #include <cstdio>
+ #include <hash_map>
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_map = my_hash_map;
+
+ #include <sstream>
+ #include <vector>
+@@ -170,7 +170,7 @@ TEST(S2CellId, Tokens) {
+ static const int kMaxExpandLevel = 3;
+
+ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
+- hash_map<S2CellId, S2CellId>* parent_map) {
++ my_hash_map<S2CellId, S2CellId>* parent_map) {
+ cells->push_back(parent);
+ if (parent.level() == kMaxExpandLevel) return;
+ int i, j, orientation;
+@@ -194,7 +194,7 @@ static void ExpandCell(S2CellId const& parent, vector<S2CellId>* cells,
+
+ TEST(S2CellId, Containment) {
+ // Test contains() and intersects().
+- hash_map<S2CellId, S2CellId> parent_map;
++ my_hash_map<S2CellId, S2CellId> parent_map;
+ vector<S2CellId> cells;
+ for (int face = 0; face < 6; ++face) {
+ ExpandCell(S2CellId::FromFacePosLevel(face, 0, 0), &cells, &parent_map);
+--- a/src/third_party/s2/s2loop.cc
++++ b/src/third_party/s2/s2loop.cc
+@@ -120,7 +120,7 @@ bool S2Loop::IsValid(string* err) const {
+ }
+ }
+ // Loops are not allowed to have any duplicate vertices.
+- hash_map<S2Point, int> vmap;
++ my_hash_map<S2Point, int> vmap;
+ for (int i = 0; i < num_vertices(); ++i) {
+ if (!vmap.insert(make_pair(vertex(i), i)).second) {
+ VLOG(2) << "Duplicate vertices: " << vmap[vertex(i)] << " and " << i;
+--- a/src/third_party/s2/s2polygon.cc
++++ b/src/third_party/s2/s2polygon.cc
+@@ -117,7 +117,7 @@ HASH_NAMESPACE_END
+ bool S2Polygon::IsValid(const vector<S2Loop*>& loops, string* err) {
+ // If a loop contains an edge AB, then no other loop may contain AB or BA.
+ if (loops.size() > 1) {
+- hash_map<S2PointPair, pair<int, int> > edges;
++ my_hash_map<S2PointPair, pair<int, int> > edges;
+ for (size_t i = 0; i < loops.size(); ++i) {
+ S2Loop* lp = loops[i];
+ for (int j = 0; j < lp->num_vertices(); ++j) {
+--- a/src/third_party/s2/s2polygonbuilder.cc
++++ b/src/third_party/s2/s2polygonbuilder.cc
+@@ -175,7 +175,7 @@ S2Loop* S2PolygonBuilder::AssembleLoop(S2Point const& v0, S2Point const& v1,
+ // This ensures that only CCW loops are constructed when possible.
+
+ vector<S2Point> path; // The path so far.
+- hash_map<S2Point, int> index; // Maps a vertex to its index in "path".
++ my_hash_map<S2Point, int> index; // Maps a vertex to its index in "path".
+ path.push_back(v0);
+ path.push_back(v1);
+ index[v1] = 1;
+@@ -361,7 +361,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+
+ // First, we build the set of all the distinct vertices in the input.
+ // We need to include the source and destination of every edge.
+- hash_set<S2Point> vertices;
++ my_hash_set<S2Point> vertices;
+ for (EdgeSet::const_iterator i = edges_->begin(); i != edges_->end(); ++i) {
+ vertices.insert(i->first);
+ VertexSet const& vset = i->second;
+@@ -370,7 +370,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+ }
+
+ // Build a spatial index containing all the distinct vertices.
+- for (hash_set<S2Point>::const_iterator i = vertices.begin();
++ for (my_hash_set<S2Point>::const_iterator i = vertices.begin();
+ i != vertices.end(); ++i) {
+ index->Insert(*i);
+ }
+@@ -378,7 +378,7 @@ void S2PolygonBuilder::BuildMergeMap(PointIndex* index, MergeMap* merge_map) {
+ // Next, we loop through all the vertices and attempt to grow a maximial
+ // mergeable group starting from each vertex.
+ vector<S2Point> frontier, mergeable;
+- for (hash_set<S2Point>::const_iterator vstart = vertices.begin();
++ for (my_hash_set<S2Point>::const_iterator vstart = vertices.begin();
+ vstart != vertices.end(); ++vstart) {
+ // Skip any vertices that have already been merged with another vertex.
+ if (merge_map->find(*vstart) != merge_map->end()) continue;
+--- a/src/third_party/s2/s2polygonbuilder.h
++++ b/src/third_party/s2/s2polygonbuilder.h
+@@ -262,7 +262,7 @@ class S2PolygonBuilder {
+ // current position to a new position, and also returns a spatial index
+ // containing all of the vertices that do not need to be moved.
+ class PointIndex;
+- typedef hash_map<S2Point, S2Point> MergeMap;
++ typedef my_hash_map<S2Point, S2Point> MergeMap;
+ void BuildMergeMap(PointIndex* index, MergeMap* merge_map);
+
+ // Moves a set of vertices from old to new positions.
+@@ -282,7 +282,7 @@ class S2PolygonBuilder {
+ // once. We could have also used a multiset<pair<S2Point, S2Point> >,
+ // but this representation is a bit more convenient.
+ typedef multiset<S2Point> VertexSet;
+- typedef hash_map<S2Point, VertexSet> EdgeSet;
++ typedef my_hash_map<S2Point, VertexSet> EdgeSet;
+ scoped_ptr<EdgeSet> edges_;
+
+ // Unique collection of the starting (first) vertex of all edges,
+--- a/src/third_party/s2/s2regioncoverer.cc
++++ b/src/third_party/s2/s2regioncoverer.cc
+@@ -321,7 +321,7 @@ void S2RegionCoverer::GetInteriorCellUnion(S2Region const& region,
+
+ void S2RegionCoverer::FloodFill(
+ S2Region const& region, S2CellId const& start, vector<S2CellId>* output) {
+- hash_set<S2CellId> all;
++ my_hash_set<S2CellId> all;
+ vector<S2CellId> frontier;
+ output->clear();
+ all.insert(start);
+--- a/src/third_party/s2/s2regioncoverer_test.cc
++++ b/src/third_party/s2/s2regioncoverer_test.cc
+@@ -11,7 +11,7 @@ using std::swap;
+ using std::reverse;
+
+ #include <hash_map>
+-using __gnu_cxx::hash_map;
++using __gnu_cxx::hash_map = my_hash_map;
+
+ #include <queue>
+ using std::priority_queue;
+@@ -65,7 +65,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
+ vector<S2CellId> const& covering,
+ bool interior) {
+ // Keep track of how many cells have the same coverer.min_level() ancestor.
+- hash_map<S2CellId, int> min_level_cells;
++ my_hash_map<S2CellId, int> min_level_cells;
+ for (int i = 0; i < covering.size(); ++i) {
+ int level = covering[i].level();
+ EXPECT_GE(level, coverer.min_level());
+@@ -76,7 +76,7 @@ static void CheckCovering(S2RegionCoverer const& coverer,
+ if (covering.size() > coverer.max_cells()) {
+ // If the covering has more than the requested number of cells, then check
+ // that the cell count cannot be reduced by using the parent of some cell.
+- for (hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
++ for (my_hash_map<S2CellId, int>::const_iterator i = min_level_cells.begin();
+ i != min_level_cells.end(); ++i) {
+ EXPECT_EQ(i->second, 1);
+ }
+--- a/src/third_party/s2/strings/split.cc
++++ b/src/third_party/s2/strings/split.cc
+@@ -156,7 +156,7 @@ struct simple_insert_iterator {
+ // SplitStringToIterator{Using|AllowEmpty}().
+ template <typename T>
+ struct simple_hash_map_iterator {
+- typedef hash_map<T, T> hashmap;
++ typedef my_hash_map<T, T> hashmap;
+ hashmap* t;
+ bool even;
+ typename hashmap::iterator curr;
+@@ -246,8 +246,8 @@ void SplitStringAllowEmpty(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
+- hash_set<string>* result) {
+- simple_insert_iterator<hash_set<string> > it(result);
++ my_hash_set<string>* result) {
++ simple_insert_iterator<my_hash_set<string> > it(result);
+ SplitStringToIteratorAllowEmpty(full, delim, 0, it);
+ }
+
+@@ -258,7 +258,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+- hash_map<string, string>* result) {
++ my_hash_map<string, string>* result) {
+ simple_hash_map_iterator<string> it(result);
+ SplitStringToIteratorAllowEmpty(full, delim, 0, it);
+ }
+@@ -352,8 +352,8 @@ void SplitStringUsing(const string& full,
+ }
+
+ void SplitStringToHashsetUsing(const string& full, const char* delim,
+- hash_set<string>* result) {
+- simple_insert_iterator<hash_set<string> > it(result);
++ my_hash_set<string>* result) {
++ simple_insert_iterator<my_hash_set<string> > it(result);
+ SplitStringToIteratorUsing(full, delim, it);
+ }
+
+@@ -364,7 +364,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
+ }
+
+ void SplitStringToHashmapUsing(const string& full, const char* delim,
+- hash_map<string, string>* result) {
++ my_hash_map<string, string>* result) {
+ simple_hash_map_iterator<string> it(result);
+ SplitStringToIteratorUsing(full, delim, it);
+ }
+--- a/src/third_party/s2/strings/split.h
++++ b/src/third_party/s2/strings/split.h
+@@ -41,7 +41,7 @@ using namespace std;
+ void SplitStringAllowEmpty(const string& full, const char* delim,
+ vector<string>* res);
+ void SplitStringToHashsetAllowEmpty(const string& full, const char* delim,
+- hash_set<string>* res);
++ my_hash_set<string>* res);
+ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ set<string>* res);
+ // The even-positioned (0-based) components become the keys for the
+@@ -50,7 +50,7 @@ void SplitStringToSetAllowEmpty(const string& full, const char* delim,
+ // if the key was already present in the hash table, or will be the
+ // empty string if the key is a newly inserted key.
+ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+- hash_map<string, string>* result);
++ my_hash_map<string, string>* result);
+
+ // ----------------------------------------------------------------------
+ // SplitStringUsing()
+@@ -66,7 +66,7 @@ void SplitStringToHashmapAllowEmpty(const string& full, const char* delim,
+ void SplitStringUsing(const string& full, const char* delim,
+ vector<string>* res);
+ void SplitStringToHashsetUsing(const string& full, const char* delim,
+- hash_set<string>* res);
++ my_hash_set<string>* res);
+ void SplitStringToSetUsing(const string& full, const char* delim,
+ set<string>* res);
+ // The even-positioned (0-based) components become the keys for the
+@@ -75,7 +75,7 @@ void SplitStringToSetUsing(const string& full, const char* delim,
+ // if the key was already present in the hash table, or will be the
+ // empty string if the key is a newly inserted key.
+ void SplitStringToHashmapUsing(const string& full, const char* delim,
+- hash_map<string, string>* result);
++ my_hash_map<string, string>* result);
+
+ // ----------------------------------------------------------------------
+ // SplitOneIntToken()
diff --git a/mongodb-4.4.15-adjust-cache-alignment-assumptions.patch b/mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64
index 27c4a2da4d4a..27c4a2da4d4a 100644
--- a/mongodb-4.4.15-adjust-cache-alignment-assumptions.patch
+++ b/mongodb-4.4.15-adjust-cache-alignment-assumptions.patch.arm64