2017-04-27 20:24:53 +02:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
2015-02-11 11:22:00 -08:00
|
|
|
/* divesite.c */
|
|
|
|
#include "divesite.h"
|
2015-02-12 11:19:05 -08:00
|
|
|
#include "dive.h"
|
2015-08-31 21:45:31 -03:00
|
|
|
#include "divelist.h"
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 21:31:08 +01:00
|
|
|
#include "divelog.h"
|
2024-03-24 21:03:08 +01:00
|
|
|
#include "errorhelper.h"
|
2024-05-04 17:18:08 +02:00
|
|
|
#include "format.h"
|
2024-05-04 14:55:10 +02:00
|
|
|
#include "gettextfromc.h"
|
2017-02-19 14:11:37 -08:00
|
|
|
#include "membuffer.h"
|
2024-05-04 14:55:10 +02:00
|
|
|
#include "pref.h"
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 21:31:08 +01:00
|
|
|
#include "subsurface-string.h"
|
2019-04-05 21:33:27 +02:00
|
|
|
#include "sha1.h"
|
2015-02-12 11:19:05 -08:00
|
|
|
|
2015-06-13 11:54:33 +02:00
|
|
|
#include <math.h>
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
int get_divesite_idx(const struct dive_site *ds, dive_site_table &ds_table)
|
2019-02-26 21:14:48 +01:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
auto it = std::find_if(ds_table.begin(), ds_table.end(), [ds] (const auto &ds2) { return ds2.get() == ds; });
|
|
|
|
return it != ds_table.end() ? it - ds_table.begin() : -1;
|
2019-02-26 21:14:48 +01:00
|
|
|
}
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
template <typename PRED>
|
|
|
|
struct dive_site *get_dive_site_by_predicate(dive_site_table &ds_table, PRED pred)
|
2019-02-26 11:03:57 +01:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
auto it = std::find_if(ds_table.begin(), ds_table.end(), pred);
|
|
|
|
return it != ds_table.end() ? it->get() : NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct dive_site *get_dive_site_by_uuid(uint32_t uuid, dive_site_table &ds_table)
|
|
|
|
{
|
|
|
|
// The table is sorted by uuid
|
|
|
|
auto it = std::lower_bound(ds_table.begin(), ds_table.end(), uuid,
|
|
|
|
[] (const auto &ds, auto uuid) { return ds->uuid < uuid; });
|
|
|
|
return it != ds_table.end() && (*it)->uuid == uuid ? it->get() : NULL;
|
2019-02-26 11:03:57 +01:00
|
|
|
}
|
|
|
|
|
2015-02-13 01:14:33 -08:00
|
|
|
/* there could be multiple sites of the same name - return the first one */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *get_dive_site_by_name(const std::string &name, dive_site_table &ds_table)
|
2015-02-13 01:14:33 -08:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return get_dive_site_by_predicate(ds_table,
|
|
|
|
[&name](const auto &ds) { return ds->name == name; });
|
2015-02-13 01:14:33 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* there could be multiple sites at the same GPS fix - return the first one */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *get_dive_site_by_gps(const location_t *loc, dive_site_table &ds_table)
|
2015-02-13 01:14:33 -08:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return get_dive_site_by_predicate(ds_table,
|
|
|
|
[loc](const auto &ds) { return ds->location == *loc; });
|
2015-02-13 01:14:33 -08:00
|
|
|
}
|
|
|
|
|
2015-08-30 10:10:07 -07:00
|
|
|
/* to avoid a bug where we have two dive sites with different name and the same GPS coordinates
|
|
|
|
* and first get the gps coordinates (reading a V2 file) and happen to get back "the other" name,
|
|
|
|
* this function allows us to verify if a very specific name/GPS combination already exists */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *get_dive_site_by_gps_and_name(const std::string &name, const location_t *loc, dive_site_table &ds_table)
|
2015-08-30 10:10:07 -07:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return get_dive_site_by_predicate(ds_table,
|
|
|
|
[&name, loc](const auto &ds) { return ds->location == *loc &&
|
|
|
|
ds->name == name; });
|
2015-08-30 10:10:07 -07:00
|
|
|
}
|
|
|
|
|
2015-06-13 11:54:33 +02:00
|
|
|
// Calculate the distance in meters between two coordinates.
|
2024-05-04 13:39:04 +02:00
|
|
|
unsigned int get_distance(const location_t *loc1, const location_t *loc2)
|
2015-06-13 11:54:33 +02:00
|
|
|
{
|
2019-04-30 06:53:31 -07:00
|
|
|
double lat1_r = udeg_to_radians(loc1->lat.udeg);
|
2018-10-20 14:12:15 -04:00
|
|
|
double lat2_r = udeg_to_radians(loc2->lat.udeg);
|
|
|
|
double lat_d_r = udeg_to_radians(loc2->lat.udeg - loc1->lat.udeg);
|
|
|
|
double lon_d_r = udeg_to_radians(loc2->lon.udeg - loc1->lon.udeg);
|
2015-06-13 11:54:33 +02:00
|
|
|
|
|
|
|
double a = sin(lat_d_r/2) * sin(lat_d_r/2) +
|
2019-04-30 06:53:31 -07:00
|
|
|
cos(lat1_r) * cos(lat2_r) * sin(lon_d_r/2) * sin(lon_d_r/2);
|
|
|
|
if (a < 0.0) a = 0.0;
|
|
|
|
if (a > 1.0) a = 1.0;
|
2015-06-13 11:54:33 +02:00
|
|
|
double c = 2 * atan2(sqrt(a), sqrt(1.0 - a));
|
|
|
|
|
2019-04-30 06:53:31 -07:00
|
|
|
// Earth radius in metres
|
2017-03-09 23:07:30 +07:00
|
|
|
return lrint(6371000 * c);
|
2015-06-13 11:54:33 +02:00
|
|
|
}
|
2015-06-10 11:45:34 -07:00
|
|
|
|
|
|
|
/* find the closest one, no more than distance meters away - if more than one at same distance, pick the first */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *get_dive_site_by_gps_proximity(const location_t *loc, int distance, dive_site_table &ds_table)
|
2015-06-10 11:45:34 -07:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *res = nullptr;
|
2015-06-13 11:54:33 +02:00
|
|
|
unsigned int cur_distance, min_distance = distance;
|
2024-05-11 11:47:45 +02:00
|
|
|
for (const auto &ds: ds_table) {
|
|
|
|
if (dive_site_has_gps_location(ds.get()) &&
|
2018-10-20 14:12:15 -04:00
|
|
|
(cur_distance = get_distance(&ds->location, loc)) < min_distance) {
|
2015-06-10 11:45:34 -07:00
|
|
|
min_distance = cur_distance;
|
2024-05-11 11:47:45 +02:00
|
|
|
res = ds.get();
|
2015-06-10 11:45:34 -07:00
|
|
|
}
|
|
|
|
}
|
2018-10-23 12:42:01 +02:00
|
|
|
return res;
|
2015-06-10 11:45:34 -07:00
|
|
|
}
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
dive_site_table::put_result dive_site_table::register_site(std::unique_ptr<dive_site> ds)
|
2019-03-10 22:28:14 +01:00
|
|
|
{
|
2019-04-05 21:33:27 +02:00
|
|
|
/* If the site doesn't yet have an UUID, create a new one.
|
|
|
|
* Make this deterministic for testing. */
|
|
|
|
if (!ds->uuid) {
|
2024-04-23 21:30:40 +08:00
|
|
|
SHA1 sha;
|
2024-05-04 17:18:08 +02:00
|
|
|
if (!ds->name.empty())
|
|
|
|
sha.update(ds->name);
|
|
|
|
if (!ds->description.empty())
|
|
|
|
sha.update(ds->description);
|
|
|
|
if (!ds->notes.empty())
|
|
|
|
sha.update(ds->notes);
|
2024-04-23 21:30:40 +08:00
|
|
|
ds->uuid = sha.hash_uint32();
|
2019-04-05 21:33:27 +02:00
|
|
|
}
|
|
|
|
|
2019-03-08 19:37:27 +01:00
|
|
|
/* Take care to never have the same uuid twice. This could happen on
|
|
|
|
* reimport of a log where the dive sites have diverged */
|
2024-05-11 11:47:45 +02:00
|
|
|
while (ds->uuid == 0 || get_dive_site_by_uuid(ds->uuid, *this) != NULL)
|
2019-04-05 21:33:27 +02:00
|
|
|
++ds->uuid;
|
2019-03-08 19:37:27 +01:00
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
return put(std::move(ds));
|
2019-03-03 15:12:22 +01:00
|
|
|
}
|
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
dive_site::dive_site()
|
2019-03-03 17:10:09 +01:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
dive_site::dive_site(const std::string &name) : name(name)
|
2020-03-17 21:26:57 +01:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
dive_site::dive_site(const std::string &name, const location_t *loc) : name(name), location(*loc)
|
2020-03-17 21:26:57 +01:00
|
|
|
{
|
2024-05-04 13:39:04 +02:00
|
|
|
}
|
2020-03-17 21:26:57 +01:00
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
dive_site::dive_site(uint32_t uuid) : uuid(uuid)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
dive_site::~dive_site()
|
|
|
|
{
|
2020-03-17 21:26:57 +01:00
|
|
|
}
|
|
|
|
|
2019-03-03 18:39:12 +01:00
|
|
|
/* when parsing, dive sites are identified by uuid */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *alloc_or_get_dive_site(uint32_t uuid, dive_site_table &ds_table)
|
2019-03-03 15:12:22 +01:00
|
|
|
{
|
|
|
|
struct dive_site *ds;
|
|
|
|
|
|
|
|
if (uuid && (ds = get_dive_site_by_uuid(uuid, ds_table)) != NULL)
|
|
|
|
return ds;
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
return ds_table.register_site(std::make_unique<dive_site>(uuid)).ptr;
|
2015-02-12 11:19:05 -08:00
|
|
|
}
|
|
|
|
|
2024-05-04 14:41:04 +02:00
|
|
|
size_t nr_of_dives_at_dive_site(const dive_site &ds)
|
2015-07-18 13:34:05 -07:00
|
|
|
{
|
2024-05-04 14:41:04 +02:00
|
|
|
return ds.dives.size();
|
2015-07-18 13:34:05 -07:00
|
|
|
}
|
|
|
|
|
2024-05-04 14:41:04 +02:00
|
|
|
bool is_dive_site_selected(const struct dive_site &ds)
|
2015-07-15 21:25:26 -07:00
|
|
|
{
|
2024-05-04 14:41:04 +02:00
|
|
|
return any_of(ds.dives.begin(), ds.dives.end(),
|
|
|
|
[](dive *dive) { return dive->selected; });
|
2015-07-15 21:25:26 -07:00
|
|
|
}
|
|
|
|
|
2015-02-12 11:19:05 -08:00
|
|
|
/* allocate a new site and add it to the table */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *create_dive_site(const std::string &name, dive_site_table &ds_table)
|
2015-02-12 01:59:16 -08:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return ds_table.register_site(std::make_unique<dive_site>(name)).ptr;
|
2015-02-12 01:59:16 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/* same as before, but with GPS data */
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *create_dive_site_with_gps(const std::string &name, const location_t *loc, dive_site_table &ds_table)
|
2015-02-12 11:19:05 -08:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return ds_table.register_site(std::make_unique<dive_site>(name, loc)).ptr;
|
2015-02-12 11:19:05 -08:00
|
|
|
}
|
2015-02-13 22:53:03 -08:00
|
|
|
|
2019-03-03 18:39:12 +01:00
|
|
|
/* if all fields are empty, the dive site is pointless */
|
2024-05-04 13:39:04 +02:00
|
|
|
bool dive_site_is_empty(struct dive_site *ds)
|
2015-02-13 22:53:03 -08:00
|
|
|
{
|
2018-07-06 22:38:01 -07:00
|
|
|
return !ds ||
|
2024-05-04 17:18:08 +02:00
|
|
|
(ds->name.empty() &&
|
|
|
|
ds->description.empty() &&
|
|
|
|
ds->notes.empty() &&
|
|
|
|
!has_location(&ds->location));
|
2015-02-13 22:53:03 -08:00
|
|
|
}
|
2015-06-26 14:40:12 -03:00
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
static void merge_string(std::string &a, const std::string &b)
|
2017-10-02 22:57:26 -07:00
|
|
|
{
|
2024-05-04 17:18:08 +02:00
|
|
|
if (b.empty())
|
2017-02-19 17:20:09 -08:00
|
|
|
return;
|
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
if (a == b)
|
2017-02-19 14:11:37 -08:00
|
|
|
return;
|
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
if (a.empty()) {
|
|
|
|
a = b;
|
2017-02-19 14:11:37 -08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-05-04 17:18:08 +02:00
|
|
|
a = format_string_std("(%s) or (%s)", a.c_str(), b.c_str());
|
2017-02-19 14:11:37 -08:00
|
|
|
}
|
|
|
|
|
2019-03-03 15:12:22 +01:00
|
|
|
/* Used to check on import if two dive sites are equivalent.
|
|
|
|
* Since currently no merging is performed, be very conservative
|
|
|
|
* and only consider equal dive sites that are exactly the same.
|
|
|
|
* Taxonomy is not compared, as no taxonomy is generated on
|
|
|
|
* import.
|
|
|
|
*/
|
2024-05-11 11:47:45 +02:00
|
|
|
static bool same_dive_site(const struct dive_site &a, const struct dive_site &b)
|
2019-03-03 15:12:22 +01:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return a.name == b.name
|
|
|
|
&& a.location == b.location
|
|
|
|
&& a.description == b.description
|
|
|
|
&& a.notes == b.notes;
|
2019-03-03 15:12:22 +01:00
|
|
|
}
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *get_same_dive_site(const struct dive_site &site)
|
2019-03-03 15:12:22 +01:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
return get_dive_site_by_predicate(*divelog.sites,
|
|
|
|
[site](const auto &ds) { return same_dive_site(*ds, site); });
|
2019-03-03 15:12:22 +01:00
|
|
|
}
|
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
void merge_dive_site(struct dive_site *a, struct dive_site *b)
|
2017-02-19 14:11:37 -08:00
|
|
|
{
|
2018-10-20 14:12:15 -04:00
|
|
|
if (!has_location(&a->location)) a->location = b->location;
|
2024-05-04 17:18:08 +02:00
|
|
|
merge_string(a->name, b->name);
|
|
|
|
merge_string(a->notes, b->notes);
|
|
|
|
merge_string(a->description, b->description);
|
2017-02-19 14:11:37 -08:00
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
if (a->taxonomy.empty())
|
|
|
|
a->taxonomy = std::move(b->taxonomy);
|
2017-02-19 14:11:37 -08:00
|
|
|
}
|
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *find_or_create_dive_site_with_name(const std::string &name, dive_site_table &ds_table)
|
2015-07-13 15:13:48 -03:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
struct dive_site *ds = get_dive_site_by_name(name, ds_table);
|
2015-07-13 15:13:48 -03:00
|
|
|
if (ds)
|
2018-10-23 13:29:04 +02:00
|
|
|
return ds;
|
2019-03-03 18:39:12 +01:00
|
|
|
return create_dive_site(name, ds_table);
|
2015-07-13 15:13:48 -03:00
|
|
|
}
|
2015-08-24 11:07:57 -07:00
|
|
|
|
2024-05-11 11:47:45 +02:00
|
|
|
void purge_empty_dive_sites(dive_site_table &ds_table)
|
2019-01-01 11:45:26 +02:00
|
|
|
{
|
2024-05-11 11:47:45 +02:00
|
|
|
for (const auto &ds: ds_table) {
|
|
|
|
if (!dive_site_is_empty(ds.get()))
|
2019-01-01 11:45:26 +02:00
|
|
|
continue;
|
2024-05-11 11:47:45 +02:00
|
|
|
while (!ds->dives.empty()) {
|
|
|
|
struct dive *d = ds->dives.back();
|
|
|
|
if (d->dive_site != ds.get()) {
|
|
|
|
report_info("Warning: dive %d registered to wrong dive site in %s", d->number, __func__);
|
|
|
|
ds->dives.pop_back();
|
|
|
|
} else {
|
2019-03-05 22:58:47 +01:00
|
|
|
unregister_dive_from_dive_site(d);
|
2024-05-11 11:47:45 +02:00
|
|
|
}
|
2019-01-01 11:45:26 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
void add_dive_to_dive_site(struct dive *d, struct dive_site *ds)
|
2019-03-04 23:20:29 +01:00
|
|
|
{
|
2022-02-25 15:11:16 -08:00
|
|
|
if (!d) {
|
2024-03-24 21:03:08 +01:00
|
|
|
report_info("Warning: add_dive_to_dive_site called with NULL dive");
|
2022-02-25 15:11:16 -08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!ds) {
|
2024-03-24 21:03:08 +01:00
|
|
|
report_info("Warning: add_dive_to_dive_site called with NULL dive site");
|
2022-02-25 15:11:16 -08:00
|
|
|
return;
|
|
|
|
}
|
2019-03-04 23:20:29 +01:00
|
|
|
if (d->dive_site == ds)
|
|
|
|
return;
|
2019-03-05 22:58:47 +01:00
|
|
|
if (d->dive_site) {
|
2024-03-24 21:03:08 +01:00
|
|
|
report_info("Warning: adding dive that already belongs to a dive site to a different site");
|
2019-03-05 22:58:47 +01:00
|
|
|
unregister_dive_from_dive_site(d);
|
|
|
|
}
|
2024-05-04 14:41:04 +02:00
|
|
|
ds->dives.push_back(d);
|
2019-03-04 23:20:29 +01:00
|
|
|
d->dive_site = ds;
|
|
|
|
}
|
|
|
|
|
2024-05-04 13:39:04 +02:00
|
|
|
struct dive_site *unregister_dive_from_dive_site(struct dive *d)
|
2019-03-04 23:20:29 +01:00
|
|
|
{
|
|
|
|
struct dive_site *ds = d->dive_site;
|
|
|
|
if (!ds)
|
2024-05-04 14:41:04 +02:00
|
|
|
return nullptr;
|
|
|
|
auto it = std::find(ds->dives.begin(), ds->dives.end(), d);
|
|
|
|
if (it != ds->dives.end())
|
|
|
|
ds->dives.erase(it);
|
|
|
|
else
|
|
|
|
report_info("Warning: dive not found in divesite table, even though it should be registered there.");
|
|
|
|
d->dive_site = nullptr;
|
2019-03-04 23:20:29 +01:00
|
|
|
return ds;
|
|
|
|
}
|
2024-05-04 14:55:10 +02:00
|
|
|
|
|
|
|
std::string constructLocationTags(const taxonomy_data &taxonomy, bool for_maintab)
|
|
|
|
{
|
|
|
|
using namespace std::string_literals;
|
|
|
|
std::string locationTag;
|
|
|
|
|
|
|
|
if (taxonomy.empty())
|
|
|
|
return locationTag;
|
|
|
|
|
|
|
|
/* Check if the user set any of the 3 geocoding categories */
|
|
|
|
bool prefs_set = false;
|
|
|
|
for (int i = 0; i < 3; i++) {
|
|
|
|
if (prefs.geocoding.category[i] != TC_NONE)
|
|
|
|
prefs_set = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!prefs_set && !for_maintab) {
|
|
|
|
locationTag = "<small><small>" + gettextFromC::tr("No dive site layout categories set in preferences!").toStdString() +
|
|
|
|
"</small></small>"s;
|
|
|
|
return locationTag;
|
|
|
|
}
|
|
|
|
else if (!prefs_set)
|
|
|
|
return locationTag;
|
|
|
|
|
|
|
|
if (for_maintab)
|
|
|
|
locationTag = "<small><small>("s + gettextFromC::tr("Tags").toStdString() + ": "s;
|
|
|
|
else
|
|
|
|
locationTag = "<small><small>"s;
|
|
|
|
std::string connector;
|
|
|
|
for (int i = 0; i < 3; i++) {
|
|
|
|
if (prefs.geocoding.category[i] == TC_NONE)
|
|
|
|
continue;
|
|
|
|
for (auto const &t: taxonomy) {
|
|
|
|
if (t.category == prefs.geocoding.category[i]) {
|
|
|
|
if (!t.value.empty()) {
|
|
|
|
locationTag += connector + t.value;
|
|
|
|
connector = " / "s;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (for_maintab)
|
|
|
|
locationTag += ")</small></small>"s;
|
|
|
|
else
|
|
|
|
locationTag += "</small></small>"s;
|
|
|
|
return locationTag;
|
|
|
|
}
|