2019-03-12 21:35:43 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
|
|
|
|
|
|
|
#include "command_divesite.h"
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
#include "core/divelog.h"
|
2019-03-12 21:35:43 +00:00
|
|
|
#include "core/divesite.h"
|
2020-02-03 18:33:06 +00:00
|
|
|
#include "core/subsurface-qt/divelistnotifier.h"
|
2019-03-12 22:51:39 +00:00
|
|
|
#include "core/qthelper.h"
|
2019-03-14 07:26:50 +00:00
|
|
|
#include "core/subsurface-string.h"
|
2019-03-12 22:51:39 +00:00
|
|
|
#include "qt-models/divelocationmodel.h"
|
2019-03-22 19:55:05 +00:00
|
|
|
#include "qt-models/filtermodels.h"
|
2019-03-12 21:35:43 +00:00
|
|
|
|
|
|
|
namespace Command {
|
|
|
|
|
|
|
|
// Helper functions to add / remove a set of dive sites
|
|
|
|
|
|
|
|
// Add a set of dive sites to the core. The dives that were associated with
|
|
|
|
// that dive site will be restored to that dive site.
|
2024-05-04 11:39:04 +00:00
|
|
|
static std::vector<dive_site *> addDiveSites(std::vector<std::unique_ptr<dive_site>> &sites)
|
2019-03-12 21:35:43 +00:00
|
|
|
{
|
|
|
|
std::vector<dive_site *> res;
|
2019-06-23 07:22:26 +00:00
|
|
|
QVector<dive *> changedDives;
|
2019-03-12 21:35:43 +00:00
|
|
|
res.reserve(sites.size());
|
|
|
|
|
2024-05-04 11:39:04 +00:00
|
|
|
for (std::unique_ptr<dive_site> &ds: sites) {
|
2019-03-12 21:35:43 +00:00
|
|
|
// Readd the dives that belonged to this site
|
2024-05-04 12:41:04 +00:00
|
|
|
for (dive *d: ds->dives) {
|
2019-03-12 21:35:43 +00:00
|
|
|
// TODO: send dive site changed signal
|
2019-03-22 19:55:05 +00:00
|
|
|
d->dive_site = ds.get();
|
|
|
|
changedDives.push_back(d);
|
2019-03-12 21:35:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Add dive site to core, but remember a non-owning pointer first.
|
|
|
|
res.push_back(ds.get());
|
|
|
|
int idx = register_dive_site(ds.release()); // Return ownership to backend.
|
|
|
|
emit diveListNotifier.diveSiteAdded(res.back(), idx); // Inform frontend of new dive site.
|
|
|
|
}
|
|
|
|
|
2019-06-23 07:22:26 +00:00
|
|
|
emit diveListNotifier.divesChanged(changedDives, DiveField::DIVESITE);
|
2019-03-22 19:55:05 +00:00
|
|
|
|
2019-03-12 21:35:43 +00:00
|
|
|
// Clear vector of unused owning pointers
|
|
|
|
sites.clear();
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove a set of dive sites. Get owning pointers to them. The dives are set to
|
|
|
|
// being at no dive site, but the dive site will retain a list of dives, so
|
|
|
|
// that the dives can be readded to the site on undo.
|
2024-05-04 11:39:04 +00:00
|
|
|
static std::vector<std::unique_ptr<dive_site>> removeDiveSites(std::vector<dive_site *> &sites)
|
2019-03-12 21:35:43 +00:00
|
|
|
{
|
2024-05-04 11:39:04 +00:00
|
|
|
std::vector<std::unique_ptr<dive_site>> res;
|
2019-06-23 07:22:26 +00:00
|
|
|
QVector<dive *> changedDives;
|
2019-03-12 21:35:43 +00:00
|
|
|
res.reserve(sites.size());
|
|
|
|
|
|
|
|
for (dive_site *ds: sites) {
|
|
|
|
// Reset the dive_site field of the affected dives
|
2024-05-04 12:41:04 +00:00
|
|
|
for (dive *d: ds->dives) {
|
2019-03-22 19:55:05 +00:00
|
|
|
d->dive_site = nullptr;
|
|
|
|
changedDives.push_back(d);
|
2019-03-12 21:35:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Remove dive site from core and take ownership.
|
|
|
|
int idx = unregister_dive_site(ds);
|
|
|
|
res.emplace_back(ds);
|
|
|
|
emit diveListNotifier.diveSiteDeleted(ds, idx); // Inform frontend of removed dive site.
|
|
|
|
}
|
|
|
|
|
2019-06-23 07:22:26 +00:00
|
|
|
emit diveListNotifier.divesChanged(changedDives, DiveField::DIVESITE);
|
2019-03-22 19:55:05 +00:00
|
|
|
|
2019-03-12 21:35:43 +00:00
|
|
|
sites.clear();
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2019-03-13 19:58:25 +00:00
|
|
|
AddDiveSite::AddDiveSite(const QString &name)
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("add dive site"));
|
2024-05-04 11:39:04 +00:00
|
|
|
sitesToAdd.push_back(std::make_unique<dive_site>());
|
2024-05-04 15:18:08 +00:00
|
|
|
sitesToAdd.back()->name = name.toStdString();
|
2019-03-13 19:58:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool AddDiveSite::workToBeDone()
|
|
|
|
{
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void AddDiveSite::redo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
2019-03-13 19:58:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void AddDiveSite::undo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
2019-03-13 19:58:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-05 03:40:27 +00:00
|
|
|
ImportDiveSites::ImportDiveSites(struct dive_site_table *sites, const QString &source)
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("import dive sites from %1").arg(source));
|
2019-05-05 03:40:27 +00:00
|
|
|
|
|
|
|
for (int i = 0; i < sites->nr; ++i) {
|
|
|
|
struct dive_site *new_ds = sites->dive_sites[i];
|
|
|
|
|
|
|
|
// Don't import dive sites that already exist. Currently we only check for
|
|
|
|
// the same name. We might want to be smarter here and merge dive site data, etc.
|
|
|
|
struct dive_site *old_ds = get_same_dive_site(new_ds);
|
|
|
|
if (old_ds) {
|
2024-05-04 11:39:04 +00:00
|
|
|
delete new_ds;
|
2019-05-05 03:40:27 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
sitesToAdd.emplace_back(new_ds);
|
|
|
|
}
|
|
|
|
|
|
|
|
// All site have been consumed
|
|
|
|
sites->nr = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ImportDiveSites::workToBeDone()
|
|
|
|
{
|
|
|
|
return !sitesToAdd.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ImportDiveSites::redo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
2019-05-05 03:40:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ImportDiveSites::undo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
2019-05-05 03:40:27 +00:00
|
|
|
}
|
|
|
|
|
2020-01-06 20:47:53 +00:00
|
|
|
DeleteDiveSites::DeleteDiveSites(const QVector<dive_site *> &sites) : sitesToRemove(std::vector<dive_site *>(sites.begin(),sites.end()))
|
2019-03-12 21:35:43 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("delete %n dive site(s)", "", sites.size()));
|
2019-03-12 21:35:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool DeleteDiveSites::workToBeDone()
|
|
|
|
{
|
|
|
|
return !sitesToRemove.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void DeleteDiveSites::redo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
2019-03-12 21:35:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void DeleteDiveSites::undo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
2019-03-12 21:35:43 +00:00
|
|
|
}
|
|
|
|
|
2019-03-19 18:52:54 +00:00
|
|
|
PurgeUnusedDiveSites::PurgeUnusedDiveSites()
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("purge unused dive sites"));
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
for (int i = 0; i < divelog.sites->nr; ++i) {
|
|
|
|
dive_site *ds = divelog.sites->dive_sites[i];
|
2024-05-04 12:41:04 +00:00
|
|
|
if (ds->dives.empty())
|
2019-03-19 18:52:54 +00:00
|
|
|
sitesToRemove.push_back(ds);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool PurgeUnusedDiveSites::workToBeDone()
|
|
|
|
{
|
|
|
|
return !sitesToRemove.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void PurgeUnusedDiveSites::redo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
2019-03-19 18:52:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void PurgeUnusedDiveSites::undo()
|
|
|
|
{
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
2019-03-19 18:52:54 +00:00
|
|
|
}
|
|
|
|
|
2019-03-12 22:51:39 +00:00
|
|
|
EditDiveSiteName::EditDiveSiteName(dive_site *dsIn, const QString &name) : ds(dsIn),
|
2024-05-04 15:18:08 +00:00
|
|
|
value(name.toStdString())
|
2019-03-12 22:51:39 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site name"));
|
2019-03-12 22:51:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteName::workToBeDone()
|
|
|
|
{
|
2024-05-04 15:18:08 +00:00
|
|
|
return value != ds->name;
|
2019-03-12 22:51:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteName::redo()
|
|
|
|
{
|
2019-03-13 19:10:22 +00:00
|
|
|
swap(ds->name, value);
|
2019-03-12 22:51:39 +00:00
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::NAME); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteName::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-13 19:10:22 +00:00
|
|
|
EditDiveSiteDescription::EditDiveSiteDescription(dive_site *dsIn, const QString &description) : ds(dsIn),
|
2024-05-04 15:18:08 +00:00
|
|
|
value(description.toStdString())
|
2019-03-13 19:10:22 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site description"));
|
2019-03-13 19:10:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteDescription::workToBeDone()
|
|
|
|
{
|
2024-05-04 15:18:08 +00:00
|
|
|
return value != ds->description;
|
2019-03-13 19:10:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteDescription::redo()
|
|
|
|
{
|
|
|
|
swap(ds->description, value);
|
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::DESCRIPTION); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteDescription::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-13 23:00:54 +00:00
|
|
|
EditDiveSiteNotes::EditDiveSiteNotes(dive_site *dsIn, const QString ¬es) : ds(dsIn),
|
2024-05-04 15:18:08 +00:00
|
|
|
value(notes.toStdString())
|
2019-03-13 23:00:54 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site notes"));
|
2019-03-13 23:00:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteNotes::workToBeDone()
|
|
|
|
{
|
2024-05-04 15:18:08 +00:00
|
|
|
return value != ds->notes;
|
2019-03-13 23:00:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteNotes::redo()
|
|
|
|
{
|
|
|
|
swap(ds->notes, value);
|
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::NOTES); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteNotes::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-14 07:26:50 +00:00
|
|
|
EditDiveSiteCountry::EditDiveSiteCountry(dive_site *dsIn, const QString &country) : ds(dsIn),
|
2024-05-04 11:39:04 +00:00
|
|
|
value(country.toStdString())
|
2019-03-14 07:26:50 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site country"));
|
2019-03-14 07:26:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteCountry::workToBeDone()
|
|
|
|
{
|
2024-05-04 11:39:04 +00:00
|
|
|
return value == taxonomy_get_country(ds->taxonomy);
|
2019-03-14 07:26:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteCountry::redo()
|
|
|
|
{
|
2024-05-04 11:39:04 +00:00
|
|
|
std::string old = taxonomy_get_country(ds->taxonomy);
|
|
|
|
taxonomy_set_country(ds->taxonomy, value, taxonomy_origin::GEOMANUAL);
|
2019-03-14 07:26:50 +00:00
|
|
|
value = old;
|
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::TAXONOMY); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteCountry::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-14 22:28:45 +00:00
|
|
|
EditDiveSiteLocation::EditDiveSiteLocation(dive_site *dsIn, const location_t location) : ds(dsIn),
|
|
|
|
value(location)
|
2019-03-14 21:07:48 +00:00
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site location"));
|
2019-03-14 21:07:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteLocation::workToBeDone()
|
|
|
|
{
|
|
|
|
bool ok = has_location(&value);
|
|
|
|
bool old_ok = has_location(&ds->location);
|
|
|
|
if (ok != old_ok)
|
|
|
|
return true;
|
|
|
|
return ok && !same_location(&value, &ds->location);
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteLocation::redo()
|
|
|
|
{
|
|
|
|
std::swap(value, ds->location);
|
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::LOCATION); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteLocation::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-15 13:32:55 +00:00
|
|
|
EditDiveSiteTaxonomy::EditDiveSiteTaxonomy(dive_site *dsIn, taxonomy_data &taxonomy) : ds(dsIn),
|
|
|
|
value(taxonomy)
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("Edit dive site taxonomy"));
|
2019-03-15 13:32:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
EditDiveSiteTaxonomy::~EditDiveSiteTaxonomy()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
bool EditDiveSiteTaxonomy::workToBeDone()
|
|
|
|
{
|
|
|
|
// TODO: Apparently we have no way of comparing taxonomies?
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteTaxonomy::redo()
|
|
|
|
{
|
|
|
|
std::swap(value, ds->taxonomy);
|
|
|
|
emit diveListNotifier.diveSiteChanged(ds, LocationInformationModel::TAXONOMY); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
|
|
|
|
void EditDiveSiteTaxonomy::undo()
|
|
|
|
{
|
|
|
|
// Undo and redo do the same
|
|
|
|
redo();
|
|
|
|
}
|
|
|
|
|
2019-03-15 16:41:31 +00:00
|
|
|
MergeDiveSites::MergeDiveSites(dive_site *dsIn, const QVector<dive_site *> &sites) : ds(dsIn)
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("merge dive sites"));
|
2019-03-15 16:41:31 +00:00
|
|
|
sitesToRemove.reserve(sites.size());
|
|
|
|
for (dive_site *site: sites) {
|
|
|
|
if (site != ds)
|
|
|
|
sitesToRemove.push_back(site);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool MergeDiveSites::workToBeDone()
|
|
|
|
{
|
|
|
|
return !sitesToRemove.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void MergeDiveSites::redo()
|
|
|
|
{
|
|
|
|
// First, remove all dive sites
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
2019-03-15 16:41:31 +00:00
|
|
|
|
2019-03-20 21:02:10 +00:00
|
|
|
// Remember which dives changed so that we can send a single dives-edited signal
|
2019-06-23 07:22:26 +00:00
|
|
|
QVector<dive *> divesChanged;
|
2019-03-20 21:02:10 +00:00
|
|
|
|
2019-03-15 16:41:31 +00:00
|
|
|
// The dives of the above dive sites were reset to no dive sites.
|
|
|
|
// Add them to the merged-into dive site. Thankfully, we remember
|
|
|
|
// the dives in the sitesToAdd vector.
|
2024-05-04 11:39:04 +00:00
|
|
|
for (const std::unique_ptr<dive_site> &site: sitesToAdd) {
|
2024-05-04 12:41:04 +00:00
|
|
|
for (dive *d: site->dives) {
|
|
|
|
add_dive_to_dive_site(d, ds);
|
|
|
|
divesChanged.push_back(d);
|
2019-03-20 21:02:10 +00:00
|
|
|
}
|
2019-03-15 16:41:31 +00:00
|
|
|
}
|
2019-06-23 07:22:26 +00:00
|
|
|
emit diveListNotifier.divesChanged(divesChanged, DiveField::DIVESITE);
|
2019-03-15 16:41:31 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void MergeDiveSites::undo()
|
|
|
|
{
|
2019-03-20 21:02:10 +00:00
|
|
|
// Remember which dives changed so that we can send a single dives-edited signal
|
2019-06-23 07:22:26 +00:00
|
|
|
QVector<dive *> divesChanged;
|
2019-03-20 21:02:10 +00:00
|
|
|
|
2019-03-15 16:41:31 +00:00
|
|
|
// Before readding the dive sites, unregister the corresponding dives so that they can be
|
|
|
|
// readded to their old dive sites.
|
2024-05-04 11:39:04 +00:00
|
|
|
for (const std::unique_ptr<dive_site> &site: sitesToAdd) {
|
2024-05-04 12:41:04 +00:00
|
|
|
for (dive *d: site->dives) {
|
|
|
|
unregister_dive_from_dive_site(d);
|
|
|
|
divesChanged.push_back(d);
|
2019-03-20 21:02:10 +00:00
|
|
|
}
|
2019-03-15 16:41:31 +00:00
|
|
|
}
|
|
|
|
|
2019-11-13 20:34:22 +00:00
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
2019-03-22 19:55:05 +00:00
|
|
|
|
2019-06-23 07:22:26 +00:00
|
|
|
emit diveListNotifier.divesChanged(divesChanged, DiveField::DIVESITE);
|
2019-03-15 16:41:31 +00:00
|
|
|
}
|
|
|
|
|
2019-11-16 20:35:26 +00:00
|
|
|
ApplyGPSFixes::ApplyGPSFixes(const std::vector<DiveAndLocation> &fixes)
|
|
|
|
{
|
2020-03-21 23:46:36 +00:00
|
|
|
setText(Command::Base::tr("apply GPS fixes"));
|
2019-11-16 20:35:26 +00:00
|
|
|
|
|
|
|
for (const DiveAndLocation &dl: fixes) {
|
|
|
|
struct dive_site *ds = dl.d->dive_site;
|
|
|
|
if (ds) {
|
|
|
|
// Arbitrary choice: if we find multiple fixes for the same dive, we use the first one.
|
|
|
|
if (std::find_if(siteLocations.begin(), siteLocations.end(),
|
|
|
|
[ds] (const SiteAndLocation &sl) { return sl.ds == ds; }) == siteLocations.end()) {
|
|
|
|
siteLocations.push_back({ ds, dl.location });
|
|
|
|
}
|
|
|
|
} else {
|
2024-05-04 15:18:08 +00:00
|
|
|
ds = create_dive_site(dl.name.toStdString(), divelog.sites);
|
2019-11-16 20:35:26 +00:00
|
|
|
ds->location = dl.location;
|
|
|
|
add_dive_to_dive_site(dl.d, ds);
|
|
|
|
dl.d->dive_site = nullptr; // This will be set on redo()
|
|
|
|
sitesToAdd.emplace_back(ds);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ApplyGPSFixes::workToBeDone()
|
|
|
|
{
|
|
|
|
return !sitesToAdd.empty() || !siteLocations.empty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ApplyGPSFixes::editDiveSites()
|
|
|
|
{
|
|
|
|
for (SiteAndLocation &sl: siteLocations) {
|
|
|
|
std::swap(sl.location, sl.ds->location);
|
|
|
|
emit diveListNotifier.diveSiteChanged(sl.ds, LocationInformationModel::LOCATION); // Inform frontend of changed dive site.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void ApplyGPSFixes::redo()
|
|
|
|
{
|
|
|
|
sitesToRemove = addDiveSites(sitesToAdd);
|
|
|
|
editDiveSites();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ApplyGPSFixes::undo()
|
|
|
|
{
|
|
|
|
sitesToAdd = removeDiveSites(sitesToRemove);
|
|
|
|
editDiveSites();
|
|
|
|
}
|
|
|
|
|
2019-03-12 21:35:43 +00:00
|
|
|
} // namespace Command
|