2017-04-27 18:24:53 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
2015-02-11 19:22:00 +00:00
|
|
|
/* divesite.c */
|
|
|
|
#include "divesite.h"
|
2015-02-12 19:19:05 +00:00
|
|
|
#include "dive.h"
|
2015-09-01 00:45:31 +00:00
|
|
|
#include "divelist.h"
|
2024-03-24 20:03:08 +00:00
|
|
|
#include "errorhelper.h"
|
2024-05-04 15:18:08 +00:00
|
|
|
#include "format.h"
|
2017-02-19 22:11:37 +00:00
|
|
|
#include "membuffer.h"
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
#include "subsurface-string.h"
|
2019-04-05 19:33:27 +00:00
|
|
|
#include "sha1.h"
|
2015-02-12 19:19:05 +00:00
|
|
|
|
2015-06-13 09:54:33 +00:00
|
|
|
#include <math.h>
|
|
|
|
|
2024-06-07 08:25:09 +00:00
|
|
|
int divesite_comp_uuid(const dive_site &ds1, const dive_site &ds2)
|
|
|
|
{
|
|
|
|
if (ds1.uuid == ds2.uuid)
|
|
|
|
return 0;
|
|
|
|
return ds1.uuid < ds2.uuid ? -1 : 1;
|
|
|
|
}
|
|
|
|
|
2024-05-11 09:47:45 +00:00
|
|
|
template <typename PRED>
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *get_by_predicate(const dive_site_table &ds_table, PRED pred)
|
2019-02-26 10:03:57 +00:00
|
|
|
{
|
2024-05-11 09:47:45 +00:00
|
|
|
auto it = std::find_if(ds_table.begin(), ds_table.end(), pred);
|
|
|
|
return it != ds_table.end() ? it->get() : NULL;
|
|
|
|
}
|
|
|
|
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::get_by_uuid(uint32_t uuid) const
|
2024-05-11 09:47:45 +00:00
|
|
|
{
|
|
|
|
// The table is sorted by uuid
|
2024-05-11 12:22:33 +00:00
|
|
|
auto it = std::lower_bound(begin(), end(), uuid,
|
2024-05-11 09:47:45 +00:00
|
|
|
[] (const auto &ds, auto uuid) { return ds->uuid < uuid; });
|
2024-05-11 12:22:33 +00:00
|
|
|
return it != end() && (*it)->uuid == uuid ? it->get() : NULL;
|
2019-02-26 10:03:57 +00:00
|
|
|
}
|
|
|
|
|
2015-02-13 09:14:33 +00:00
|
|
|
/* there could be multiple sites of the same name - return the first one */
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::get_by_name(const std::string &name) const
|
2015-02-13 09:14:33 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
return get_by_predicate(*this, [&name](const auto &ds) { return ds->name == name; });
|
2015-02-13 09:14:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* there could be multiple sites at the same GPS fix - return the first one */
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::get_by_gps(const location_t *loc) const
|
2015-02-13 09:14:33 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
return get_by_predicate(*this, [loc](const auto &ds) { return ds->location == *loc; });
|
2015-02-13 09:14:33 +00:00
|
|
|
}
|
|
|
|
|
2015-08-30 17:10:07 +00:00
|
|
|
/* to avoid a bug where we have two dive sites with different name and the same GPS coordinates
|
|
|
|
* and first get the gps coordinates (reading a V2 file) and happen to get back "the other" name,
|
|
|
|
* this function allows us to verify if a very specific name/GPS combination already exists */
|
2024-05-12 20:30:15 +00:00
|
|
|
dive_site *dive_site_table::get_by_gps_and_name(const std::string &name, const location_t loc) const
|
2015-08-30 17:10:07 +00:00
|
|
|
{
|
2024-05-12 20:30:15 +00:00
|
|
|
return get_by_predicate(*this, [&name, loc](const auto &ds) { return ds->location == loc &&
|
2024-05-11 12:22:33 +00:00
|
|
|
ds->name == name; });
|
2015-08-30 17:10:07 +00:00
|
|
|
}
|
|
|
|
|
2015-06-10 18:45:34 +00:00
|
|
|
/* find the closest one, no more than distance meters away - if more than one at same distance, pick the first */
|
2024-05-11 13:18:37 +00:00
|
|
|
dive_site *dive_site_table::get_by_gps_proximity(location_t loc, int distance) const
|
2015-06-10 18:45:34 +00:00
|
|
|
{
|
2024-05-11 09:47:45 +00:00
|
|
|
struct dive_site *res = nullptr;
|
2015-06-13 09:54:33 +00:00
|
|
|
unsigned int cur_distance, min_distance = distance;
|
2024-05-11 12:22:33 +00:00
|
|
|
for (const auto &ds: *this) {
|
2024-06-30 14:33:52 +00:00
|
|
|
if (ds->has_gps_location() &&
|
2024-05-11 13:18:37 +00:00
|
|
|
(cur_distance = get_distance(ds->location, loc)) < min_distance) {
|
2015-06-10 18:45:34 +00:00
|
|
|
min_distance = cur_distance;
|
2024-05-11 09:47:45 +00:00
|
|
|
res = ds.get();
|
2015-06-10 18:45:34 +00:00
|
|
|
}
|
|
|
|
}
|
2018-10-23 10:42:01 +00:00
|
|
|
return res;
|
2015-06-10 18:45:34 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 09:47:45 +00:00
|
|
|
dive_site_table::put_result dive_site_table::register_site(std::unique_ptr<dive_site> ds)
|
2019-03-10 21:28:14 +00:00
|
|
|
{
|
2019-04-05 19:33:27 +00:00
|
|
|
/* If the site doesn't yet have an UUID, create a new one.
|
|
|
|
* Make this deterministic for testing. */
|
|
|
|
if (!ds->uuid) {
|
2024-04-23 13:30:40 +00:00
|
|
|
SHA1 sha;
|
2024-05-04 15:18:08 +00:00
|
|
|
if (!ds->name.empty())
|
|
|
|
sha.update(ds->name);
|
|
|
|
if (!ds->description.empty())
|
|
|
|
sha.update(ds->description);
|
|
|
|
if (!ds->notes.empty())
|
|
|
|
sha.update(ds->notes);
|
2024-04-23 13:30:40 +00:00
|
|
|
ds->uuid = sha.hash_uint32();
|
2019-04-05 19:33:27 +00:00
|
|
|
}
|
|
|
|
|
2019-03-08 18:37:27 +00:00
|
|
|
/* Take care to never have the same uuid twice. This could happen on
|
|
|
|
* reimport of a log where the dive sites have diverged */
|
2024-05-11 12:22:33 +00:00
|
|
|
while (ds->uuid == 0 || get_by_uuid(ds->uuid) != NULL)
|
2019-04-05 19:33:27 +00:00
|
|
|
++ds->uuid;
|
2019-03-08 18:37:27 +00:00
|
|
|
|
2024-05-11 09:47:45 +00:00
|
|
|
return put(std::move(ds));
|
2019-03-03 14:12:22 +00:00
|
|
|
}
|
|
|
|
|
2024-05-04 11:39:04 +00:00
|
|
|
dive_site::dive_site()
|
2019-03-03 16:10:09 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-04 15:18:08 +00:00
|
|
|
dive_site::dive_site(const std::string &name) : name(name)
|
2020-03-17 20:26:57 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-12 20:30:15 +00:00
|
|
|
dive_site::dive_site(const std::string &name, const location_t loc) : name(name), location(loc)
|
2020-03-17 20:26:57 +00:00
|
|
|
{
|
2024-05-04 11:39:04 +00:00
|
|
|
}
|
2020-03-17 20:26:57 +00:00
|
|
|
|
2024-05-11 09:47:45 +00:00
|
|
|
dive_site::dive_site(uint32_t uuid) : uuid(uuid)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-04 11:39:04 +00:00
|
|
|
dive_site::~dive_site()
|
|
|
|
{
|
2020-03-17 20:26:57 +00:00
|
|
|
}
|
|
|
|
|
2019-03-03 17:39:12 +00:00
|
|
|
/* when parsing, dive sites are identified by uuid */
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::alloc_or_get(uint32_t uuid)
|
2019-03-03 14:12:22 +00:00
|
|
|
{
|
|
|
|
struct dive_site *ds;
|
|
|
|
|
2024-05-11 12:22:33 +00:00
|
|
|
if (uuid && (ds = get_by_uuid(uuid)) != NULL)
|
2019-03-03 14:12:22 +00:00
|
|
|
return ds;
|
|
|
|
|
2024-05-11 12:22:33 +00:00
|
|
|
return register_site(std::make_unique<dive_site>(uuid)).ptr;
|
2015-02-12 19:19:05 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 13:01:37 +00:00
|
|
|
size_t dive_site::nr_of_dives() const
|
2015-07-18 20:34:05 +00:00
|
|
|
{
|
2024-05-11 13:01:37 +00:00
|
|
|
return dives.size();
|
2015-07-18 20:34:05 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 13:01:37 +00:00
|
|
|
bool dive_site::is_selected() const
|
2015-07-16 04:25:26 +00:00
|
|
|
{
|
2024-05-11 13:01:37 +00:00
|
|
|
return any_of(dives.begin(), dives.end(),
|
2024-05-04 12:41:04 +00:00
|
|
|
[](dive *dive) { return dive->selected; });
|
2015-07-16 04:25:26 +00:00
|
|
|
}
|
|
|
|
|
2024-06-30 15:38:36 +00:00
|
|
|
bool dive_site::has_gps_location() const
|
|
|
|
{
|
|
|
|
return has_location(&location);
|
|
|
|
}
|
|
|
|
|
2015-02-12 19:19:05 +00:00
|
|
|
/* allocate a new site and add it to the table */
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::create(const std::string &name)
|
2015-02-12 09:59:16 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
return register_site(std::make_unique<dive_site>(name)).ptr;
|
2015-02-12 09:59:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* same as before, but with GPS data */
|
2024-05-12 20:30:15 +00:00
|
|
|
dive_site *dive_site_table::create(const std::string &name, const location_t loc)
|
2015-02-12 19:19:05 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
return register_site(std::make_unique<dive_site>(name, loc)).ptr;
|
2015-02-12 19:19:05 +00:00
|
|
|
}
|
2015-02-14 06:53:03 +00:00
|
|
|
|
2019-03-03 17:39:12 +00:00
|
|
|
/* if all fields are empty, the dive site is pointless */
|
2024-05-11 13:01:37 +00:00
|
|
|
bool dive_site::is_empty() const
|
2015-02-14 06:53:03 +00:00
|
|
|
{
|
2024-05-11 13:01:37 +00:00
|
|
|
return name.empty() &&
|
|
|
|
description.empty() &&
|
|
|
|
notes.empty() &&
|
|
|
|
!has_location(&location);
|
2015-02-14 06:53:03 +00:00
|
|
|
}
|
2015-06-26 17:40:12 +00:00
|
|
|
|
2024-05-04 15:18:08 +00:00
|
|
|
static void merge_string(std::string &a, const std::string &b)
|
2017-10-03 05:57:26 +00:00
|
|
|
{
|
2024-05-04 15:18:08 +00:00
|
|
|
if (b.empty())
|
2017-02-20 01:20:09 +00:00
|
|
|
return;
|
|
|
|
|
2024-05-04 15:18:08 +00:00
|
|
|
if (a == b)
|
2017-02-19 22:11:37 +00:00
|
|
|
return;
|
|
|
|
|
2024-05-04 15:18:08 +00:00
|
|
|
if (a.empty()) {
|
|
|
|
a = b;
|
2017-02-19 22:11:37 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-05-04 15:18:08 +00:00
|
|
|
a = format_string_std("(%s) or (%s)", a.c_str(), b.c_str());
|
2017-02-19 22:11:37 +00:00
|
|
|
}
|
|
|
|
|
2019-03-03 14:12:22 +00:00
|
|
|
/* Used to check on import if two dive sites are equivalent.
|
|
|
|
* Since currently no merging is performed, be very conservative
|
|
|
|
* and only consider equal dive sites that are exactly the same.
|
|
|
|
* Taxonomy is not compared, as no taxonomy is generated on
|
|
|
|
* import.
|
|
|
|
*/
|
2024-05-11 12:22:33 +00:00
|
|
|
static bool same(const struct dive_site &a, const struct dive_site &b)
|
2019-03-03 14:12:22 +00:00
|
|
|
{
|
2024-05-11 09:47:45 +00:00
|
|
|
return a.name == b.name
|
|
|
|
&& a.location == b.location
|
|
|
|
&& a.description == b.description
|
|
|
|
&& a.notes == b.notes;
|
2019-03-03 14:12:22 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 16:41:49 +00:00
|
|
|
dive_site *dive_site_table::get_same(const struct dive_site &site) const
|
2019-03-03 14:12:22 +00:00
|
|
|
{
|
2024-05-11 16:41:49 +00:00
|
|
|
return get_by_predicate(*this, [site](const auto &ds) { return same(*ds, site); });
|
2019-03-03 14:12:22 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 13:01:37 +00:00
|
|
|
void dive_site::merge(dive_site &b)
|
2017-02-19 22:11:37 +00:00
|
|
|
{
|
2024-05-11 13:01:37 +00:00
|
|
|
if (!has_location(&location)) location = b.location;
|
|
|
|
merge_string(name, b.name);
|
|
|
|
merge_string(notes, b.notes);
|
|
|
|
merge_string(description, b.description);
|
2017-02-19 22:11:37 +00:00
|
|
|
|
2024-05-11 13:01:37 +00:00
|
|
|
if (taxonomy.empty())
|
|
|
|
taxonomy = std::move(b.taxonomy);
|
2017-02-19 22:11:37 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 12:22:33 +00:00
|
|
|
dive_site *dive_site_table::find_or_create(const std::string &name)
|
2015-07-13 18:13:48 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
struct dive_site *ds = get_by_name(name);
|
2015-07-13 18:13:48 +00:00
|
|
|
if (ds)
|
2018-10-23 11:29:04 +00:00
|
|
|
return ds;
|
2024-05-11 12:22:33 +00:00
|
|
|
return create(name);
|
2015-07-13 18:13:48 +00:00
|
|
|
}
|
2015-08-24 18:07:57 +00:00
|
|
|
|
2024-05-11 12:22:33 +00:00
|
|
|
void dive_site_table::purge_empty()
|
2019-01-01 09:45:26 +00:00
|
|
|
{
|
2024-05-11 12:22:33 +00:00
|
|
|
for (const auto &ds: *this) {
|
2024-05-11 13:01:37 +00:00
|
|
|
if (!ds->is_empty())
|
2019-01-01 09:45:26 +00:00
|
|
|
continue;
|
2024-05-11 09:47:45 +00:00
|
|
|
while (!ds->dives.empty()) {
|
|
|
|
struct dive *d = ds->dives.back();
|
|
|
|
if (d->dive_site != ds.get()) {
|
|
|
|
report_info("Warning: dive %d registered to wrong dive site in %s", d->number, __func__);
|
|
|
|
ds->dives.pop_back();
|
|
|
|
} else {
|
2019-03-05 21:58:47 +00:00
|
|
|
unregister_dive_from_dive_site(d);
|
2024-05-11 09:47:45 +00:00
|
|
|
}
|
2019-01-01 09:45:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-11 13:01:37 +00:00
|
|
|
void dive_site::add_dive(struct dive *d)
|
2019-03-04 22:20:29 +00:00
|
|
|
{
|
2022-02-25 23:11:16 +00:00
|
|
|
if (!d) {
|
2024-05-11 13:01:37 +00:00
|
|
|
report_info("Warning: dive_site::add_dive() called with NULL dive");
|
2022-02-25 23:11:16 +00:00
|
|
|
return;
|
|
|
|
}
|
2024-05-11 13:01:37 +00:00
|
|
|
if (d->dive_site == this)
|
2019-03-04 22:20:29 +00:00
|
|
|
return;
|
2019-03-05 21:58:47 +00:00
|
|
|
if (d->dive_site) {
|
2024-03-24 20:03:08 +00:00
|
|
|
report_info("Warning: adding dive that already belongs to a dive site to a different site");
|
2019-03-05 21:58:47 +00:00
|
|
|
unregister_dive_from_dive_site(d);
|
|
|
|
}
|
2024-05-11 13:01:37 +00:00
|
|
|
dives.push_back(d);
|
|
|
|
d->dive_site = this;
|
2019-03-04 22:20:29 +00:00
|
|
|
}
|
|
|
|
|
2024-05-04 11:39:04 +00:00
|
|
|
struct dive_site *unregister_dive_from_dive_site(struct dive *d)
|
2019-03-04 22:20:29 +00:00
|
|
|
{
|
|
|
|
struct dive_site *ds = d->dive_site;
|
|
|
|
if (!ds)
|
2024-05-04 12:41:04 +00:00
|
|
|
return nullptr;
|
|
|
|
auto it = std::find(ds->dives.begin(), ds->dives.end(), d);
|
|
|
|
if (it != ds->dives.end())
|
|
|
|
ds->dives.erase(it);
|
|
|
|
else
|
|
|
|
report_info("Warning: dive not found in divesite table, even though it should be registered there.");
|
|
|
|
d->dive_site = nullptr;
|
2019-03-04 22:20:29 +00:00
|
|
|
return ds;
|
|
|
|
}
|