2017-04-27 18:26:05 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
2016-04-05 05:02:03 +00:00
|
|
|
#include "desktop-widgets/divelogimportdialog.h"
|
|
|
|
#include "desktop-widgets/mainwindow.h"
|
2019-11-13 14:08:40 +00:00
|
|
|
#include "commands/command.h"
|
2016-04-05 05:02:03 +00:00
|
|
|
#include "core/color.h"
|
2013-12-29 16:11:20 +00:00
|
|
|
#include "ui_divelogimportdialog.h"
|
2015-01-17 09:43:52 +00:00
|
|
|
#include <QShortcut>
|
2015-01-06 17:03:58 +00:00
|
|
|
#include <QDrag>
|
|
|
|
#include <QMimeData>
|
2021-10-25 23:38:31 +00:00
|
|
|
#include <QRegularExpression>
|
2018-07-21 19:29:49 +00:00
|
|
|
#include <QUndoStack>
|
2018-10-21 16:00:02 +00:00
|
|
|
#include <QPainter>
|
2022-02-10 01:21:47 +00:00
|
|
|
#include <QFile>
|
2020-06-17 20:45:33 +00:00
|
|
|
#include "core/filterpreset.h"
|
2017-05-07 06:41:27 +00:00
|
|
|
#include "core/qthelper.h"
|
2019-03-04 22:20:29 +00:00
|
|
|
#include "core/divesite.h"
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
#include "core/divelog.h"
|
2020-10-17 10:32:22 +00:00
|
|
|
#include "core/device.h"
|
2019-05-31 14:09:14 +00:00
|
|
|
#include "core/trip.h"
|
2018-01-06 20:24:38 +00:00
|
|
|
#include "core/import-csv.h"
|
2020-10-17 18:15:23 +00:00
|
|
|
#include "core/xmlparams.h"
|
2015-01-06 18:11:27 +00:00
|
|
|
|
2015-01-06 19:32:29 +00:00
|
|
|
static QString subsurface_mimedata = "subsurface/csvcolumns";
|
2015-01-07 17:40:10 +00:00
|
|
|
static QString subsurface_index = "subsurface/csvindex";
|
2015-01-06 19:32:29 +00:00
|
|
|
|
2016-03-10 04:25:09 +00:00
|
|
|
#define SILENCE_WARNING 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ""
|
|
|
|
|
2018-10-01 13:05:11 +00:00
|
|
|
struct CSVAppConfig {
|
|
|
|
QString name;
|
|
|
|
int time;
|
|
|
|
int depth;
|
|
|
|
int temperature;
|
|
|
|
int po2;
|
|
|
|
int sensor1;
|
|
|
|
int sensor2;
|
|
|
|
int sensor3;
|
|
|
|
int cns;
|
|
|
|
int ndl;
|
|
|
|
int tts;
|
|
|
|
int stopdepth;
|
|
|
|
int pressure;
|
|
|
|
int setpoint;
|
|
|
|
QString separator;
|
|
|
|
};
|
|
|
|
static const CSVAppConfig CSVApps[] = {
|
2015-07-21 13:37:42 +00:00
|
|
|
// time, depth, temperature, po2, sensor1, sensor2, sensor3, cns, ndl, tts, stopdepth, pressure, setpoint
|
2015-01-08 00:59:42 +00:00
|
|
|
// indices are 0 based, -1 means the column doesn't exist
|
2016-03-10 04:25:09 +00:00
|
|
|
{ "Manual import", SILENCE_WARNING },
|
2015-07-22 15:06:30 +00:00
|
|
|
{ "APD Log Viewer - DC1", 0, 1, 15, 6, 3, 4, 5, 17, -1, -1, 18, -1, 2, "Tab" },
|
|
|
|
{ "APD Log Viewer - DC2", 0, 1, 15, 6, 7, 8, 9, 17, -1, -1, 18, -1, 2, "Tab" },
|
2016-04-24 18:11:25 +00:00
|
|
|
{ "DL7", 1, 2, -1, -1, -1, -1, -1, -1, -1, 8, -1, 10, -1, "|" },
|
2015-07-21 13:37:42 +00:00
|
|
|
{ "XP5", 0, 1, 9, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, "Tab" },
|
|
|
|
{ "SensusCSV", 9, 10, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, "," },
|
|
|
|
{ "Seabear CSV", 0, 1, 5, -1, -1, -1, -1, -1, 2, 3, 4, 6, -1, ";" },
|
|
|
|
{ "SubsurfaceCSV", -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, "Tab" },
|
2016-12-31 07:56:08 +00:00
|
|
|
{ "AV1", 0, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, " " },
|
2018-10-01 12:17:38 +00:00
|
|
|
{ "Poseidon MkVI", 0, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, "," },
|
2013-10-16 19:05:19 +00:00
|
|
|
};
|
|
|
|
|
2015-11-07 20:32:19 +00:00
|
|
|
enum Known {
|
2015-07-22 15:06:27 +00:00
|
|
|
MANUAL,
|
|
|
|
APD,
|
2015-07-22 15:06:30 +00:00
|
|
|
APD2,
|
2016-04-24 18:11:19 +00:00
|
|
|
DL7,
|
2015-07-22 15:06:27 +00:00
|
|
|
XP5,
|
|
|
|
SENSUS,
|
|
|
|
SEABEAR,
|
2016-12-31 07:56:08 +00:00
|
|
|
SUBSURFACE,
|
2018-10-01 12:17:38 +00:00
|
|
|
AV1,
|
|
|
|
POSEIDON
|
2015-11-07 20:32:19 +00:00
|
|
|
};
|
2015-07-22 15:06:27 +00:00
|
|
|
|
2015-01-06 16:45:49 +00:00
|
|
|
ColumnNameProvider::ColumnNameProvider(QObject *parent) : QAbstractListModel(parent)
|
2015-01-06 16:32:03 +00:00
|
|
|
{
|
2020-05-04 04:57:54 +00:00
|
|
|
columnNames << tr("Dive #") << tr("Date") << tr("Time") << tr("Duration") << tr("Mode") << tr("Location") << tr("GPS") << tr("Weight") << tr("Cyl. size") << tr("Start pressure") <<
|
2015-04-24 12:39:20 +00:00
|
|
|
tr("End pressure") << tr("Max. depth") << tr("Avg. depth") << tr("Divemaster") << tr("Buddy") << tr("Suit") << tr("Notes") << tr("Tags") << tr("Air temp.") << tr("Water temp.") <<
|
2015-01-25 15:03:17 +00:00
|
|
|
tr("O₂") << tr("He") << tr("Sample time") << tr("Sample depth") << tr("Sample temperature") << tr("Sample pO₂") << tr("Sample CNS") << tr("Sample NDL") <<
|
2015-06-27 21:38:10 +00:00
|
|
|
tr("Sample TTS") << tr("Sample stopdepth") << tr("Sample pressure") <<
|
2015-07-21 13:37:42 +00:00
|
|
|
tr("Sample sensor1 pO₂") << tr("Sample sensor2 pO₂") << tr("Sample sensor3 pO₂") <<
|
2019-05-11 03:39:46 +00:00
|
|
|
tr("Sample setpoint") << tr("Visibility") << tr("Rating") << tr("Sample heartrate");
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
bool ColumnNameProvider::insertRows(int row, int, const QModelIndex&)
|
2015-01-06 16:32:03 +00:00
|
|
|
{
|
2015-01-06 17:30:59 +00:00
|
|
|
beginInsertRows(QModelIndex(), row, row);
|
|
|
|
columnNames.append(QString());
|
|
|
|
endInsertRows();
|
2015-01-06 23:09:40 +00:00
|
|
|
return true;
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
bool ColumnNameProvider::removeRows(int row, int, const QModelIndex&)
|
2015-01-06 16:32:03 +00:00
|
|
|
{
|
2015-01-06 17:26:19 +00:00
|
|
|
beginRemoveRows(QModelIndex(), row, row);
|
|
|
|
columnNames.removeAt(row);
|
|
|
|
endRemoveRows();
|
2015-01-06 23:09:40 +00:00
|
|
|
return true;
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool ColumnNameProvider::setData(const QModelIndex &index, const QVariant &value, int role)
|
|
|
|
{
|
2015-01-06 17:26:19 +00:00
|
|
|
if (role == Qt::EditRole) {
|
|
|
|
columnNames[index.row()] = value.toString();
|
|
|
|
}
|
|
|
|
dataChanged(index, index);
|
2015-01-06 23:09:40 +00:00
|
|
|
return true;
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
QVariant ColumnNameProvider::data(const QModelIndex &index, int role) const
|
|
|
|
{
|
2015-01-06 16:39:56 +00:00
|
|
|
if (!index.isValid())
|
|
|
|
return QVariant();
|
|
|
|
if (role != Qt::DisplayRole)
|
|
|
|
return QVariant();
|
2015-01-06 16:32:03 +00:00
|
|
|
|
2015-01-06 16:39:56 +00:00
|
|
|
return QVariant(columnNames[index.row()]);
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
int ColumnNameProvider::rowCount(const QModelIndex&) const
|
2015-01-06 16:32:03 +00:00
|
|
|
{
|
2015-01-06 16:39:56 +00:00
|
|
|
return columnNames.count();
|
2015-01-06 16:32:03 +00:00
|
|
|
}
|
|
|
|
|
2015-01-07 20:27:20 +00:00
|
|
|
int ColumnNameProvider::mymatch(QString value) const
|
|
|
|
{
|
|
|
|
QString searchString = value.toLower();
|
2021-10-25 23:38:31 +00:00
|
|
|
QRegularExpression re(" \\(.*\\)");
|
2018-04-28 08:42:44 +00:00
|
|
|
|
|
|
|
searchString.replace("\"", "").replace(re, "").replace(" ", "").replace(".", "").replace("\n","");
|
2015-01-07 20:27:20 +00:00
|
|
|
for (int i = 0; i < columnNames.count(); i++) {
|
|
|
|
QString name = columnNames.at(i).toLower();
|
|
|
|
name.replace("\"", "").replace(" ", "").replace(".", "").replace("\n","");
|
|
|
|
if (searchString == name.toLower())
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
ColumnNameView::ColumnNameView(QWidget*)
|
2015-01-06 16:54:58 +00:00
|
|
|
{
|
2015-01-06 17:14:14 +00:00
|
|
|
setAcceptDrops(true);
|
|
|
|
setDragEnabled(true);
|
2015-01-06 16:54:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnNameView::mousePressEvent(QMouseEvent *press)
|
|
|
|
{
|
2015-01-06 17:03:58 +00:00
|
|
|
QModelIndex atClick = indexAt(press->pos());
|
|
|
|
if (!atClick.isValid())
|
|
|
|
return;
|
|
|
|
|
2015-01-06 17:42:50 +00:00
|
|
|
QRect indexRect = visualRect(atClick);
|
|
|
|
QPixmap pix(indexRect.width(), indexRect.height());
|
|
|
|
pix.fill(QColor(0,0,0,0));
|
|
|
|
render(&pix, QPoint(0, 0),QRegion(indexRect));
|
|
|
|
|
2015-01-06 17:03:58 +00:00
|
|
|
QDrag *drag = new QDrag(this);
|
|
|
|
QMimeData *mimeData = new QMimeData;
|
2015-01-06 19:32:29 +00:00
|
|
|
mimeData->setData(subsurface_mimedata, atClick.data().toByteArray());
|
2015-01-06 17:26:19 +00:00
|
|
|
model()->removeRow(atClick.row());
|
2015-01-06 17:42:50 +00:00
|
|
|
drag->setPixmap(pix);
|
2015-01-06 17:03:58 +00:00
|
|
|
drag->setMimeData(mimeData);
|
2015-01-06 19:19:08 +00:00
|
|
|
if (drag->exec() == Qt::IgnoreAction){
|
|
|
|
model()->insertRow(model()->rowCount());
|
|
|
|
QModelIndex idx = model()->index(model()->rowCount()-1, 0);
|
2015-01-06 19:32:29 +00:00
|
|
|
model()->setData(idx, mimeData->data(subsurface_mimedata));
|
2015-01-06 19:19:08 +00:00
|
|
|
}
|
2015-01-06 17:14:14 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
void ColumnNameView::dragLeaveEvent(QDragLeaveEvent*)
|
2015-01-07 00:24:46 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnNameView::dragEnterEvent(QDragEnterEvent *event)
|
|
|
|
{
|
|
|
|
event->acceptProposedAction();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnNameView::dragMoveEvent(QDragMoveEvent *event)
|
|
|
|
{
|
|
|
|
QModelIndex curr = indexAt(event->pos());
|
|
|
|
if (!curr.isValid() || curr.row() != 0)
|
|
|
|
return;
|
|
|
|
event->acceptProposedAction();
|
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnNameView::dropEvent(QDropEvent *event)
|
|
|
|
{
|
|
|
|
const QMimeData *mimeData = event->mimeData();
|
|
|
|
if (mimeData->data(subsurface_mimedata).count()) {
|
|
|
|
if (event->source() != this) {
|
|
|
|
event->acceptProposedAction();
|
|
|
|
QVariant value = QString(mimeData->data(subsurface_mimedata));
|
|
|
|
model()->insertRow(model()->rowCount());
|
|
|
|
model()->setData(model()->index(model()->rowCount()-1, 0), value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
ColumnDropCSVView::ColumnDropCSVView(QWidget*)
|
2015-01-06 17:56:08 +00:00
|
|
|
{
|
2015-01-06 18:52:15 +00:00
|
|
|
setAcceptDrops(true);
|
2015-01-06 17:56:08 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
void ColumnDropCSVView::dragLeaveEvent(QDragLeaveEvent*)
|
2015-01-06 17:56:08 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnDropCSVView::dragEnterEvent(QDragEnterEvent *event)
|
|
|
|
{
|
2015-01-06 18:52:15 +00:00
|
|
|
event->acceptProposedAction();
|
2015-01-06 17:56:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnDropCSVView::dragMoveEvent(QDragMoveEvent *event)
|
|
|
|
{
|
2015-01-06 18:52:15 +00:00
|
|
|
QModelIndex curr = indexAt(event->pos());
|
|
|
|
if (!curr.isValid() || curr.row() != 0)
|
|
|
|
return;
|
|
|
|
event->acceptProposedAction();
|
2015-01-06 17:56:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ColumnDropCSVView::dropEvent(QDropEvent *event)
|
|
|
|
{
|
2015-01-06 18:54:50 +00:00
|
|
|
QModelIndex curr = indexAt(event->pos());
|
|
|
|
if (!curr.isValid() || curr.row() != 0)
|
|
|
|
return;
|
2015-01-06 17:56:08 +00:00
|
|
|
|
2015-01-06 18:54:50 +00:00
|
|
|
const QMimeData *mimeData = event->mimeData();
|
2015-01-07 01:30:46 +00:00
|
|
|
if (!mimeData->data(subsurface_mimedata).count())
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (event->source() == this ) {
|
2015-01-07 17:40:10 +00:00
|
|
|
int value_old = mimeData->data(subsurface_index).toInt();
|
|
|
|
int value_new = curr.column();
|
2015-01-07 01:30:46 +00:00
|
|
|
ColumnNameResult *m = qobject_cast<ColumnNameResult*>(model());
|
|
|
|
m->swapValues(value_old, value_new);
|
|
|
|
event->acceptProposedAction();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (curr.data().toString().isEmpty()) {
|
|
|
|
QVariant value = QString(mimeData->data(subsurface_mimedata));
|
|
|
|
model()->setData(curr, value);
|
|
|
|
event->acceptProposedAction();
|
2015-01-06 18:54:50 +00:00
|
|
|
}
|
2015-01-06 17:56:08 +00:00
|
|
|
}
|
2015-01-06 17:14:14 +00:00
|
|
|
|
2015-01-06 18:41:49 +00:00
|
|
|
ColumnNameResult::ColumnNameResult(QObject *parent) : QAbstractTableModel(parent)
|
2015-01-06 18:01:21 +00:00
|
|
|
{
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-02-23 17:31:02 +00:00
|
|
|
void ColumnNameResult::swapValues(int firstIndex, int secondIndex)
|
|
|
|
{
|
2015-01-07 17:40:10 +00:00
|
|
|
QString one = columnNames[firstIndex];
|
|
|
|
QString two = columnNames[secondIndex];
|
|
|
|
setData(index(0, firstIndex), QVariant(two), Qt::EditRole);
|
2015-01-07 01:20:54 +00:00
|
|
|
setData(index(0, secondIndex), QVariant(one), Qt::EditRole);
|
2015-01-07 00:12:05 +00:00
|
|
|
}
|
|
|
|
|
2015-01-06 18:01:21 +00:00
|
|
|
bool ColumnNameResult::setData(const QModelIndex &index, const QVariant &value, int role)
|
|
|
|
{
|
2015-01-07 01:20:54 +00:00
|
|
|
if (!index.isValid() || index.row() != 0) {
|
2015-01-06 18:40:51 +00:00
|
|
|
return false;
|
2015-01-07 01:20:54 +00:00
|
|
|
}
|
2015-01-06 18:40:51 +00:00
|
|
|
if (role == Qt::EditRole) {
|
|
|
|
columnNames[index.column()] = value.toString();
|
|
|
|
dataChanged(index, index);
|
|
|
|
}
|
2015-01-06 23:09:40 +00:00
|
|
|
return true;
|
2015-01-06 18:01:21 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
QVariant ColumnNameResult::data(const QModelIndex &index, int role) const
|
|
|
|
{
|
2015-01-06 18:37:36 +00:00
|
|
|
if (!index.isValid())
|
|
|
|
return QVariant();
|
2020-10-25 21:41:52 +00:00
|
|
|
if (role == Qt::BackgroundRole)
|
2015-01-07 20:54:23 +00:00
|
|
|
if (index.row() == 0)
|
|
|
|
return QVariant(AIR_BLUE_TRANS);
|
|
|
|
|
2015-01-06 18:37:36 +00:00
|
|
|
if (role != Qt::DisplayRole)
|
|
|
|
return QVariant();
|
2015-01-06 18:01:21 +00:00
|
|
|
|
2015-01-06 18:37:36 +00:00
|
|
|
if (index.row() == 0) {
|
2018-02-17 20:21:16 +00:00
|
|
|
return columnNames[index.column()];
|
2015-01-06 18:37:36 +00:00
|
|
|
}
|
2015-01-07 03:53:49 +00:00
|
|
|
// make sure the element exists before returning it - this might get called before the
|
|
|
|
// model is correctly set up again (e.g., when changing separators)
|
|
|
|
if (columnValues.count() > index.row() - 1 && columnValues[index.row() - 1].count() > index.column())
|
|
|
|
return QVariant(columnValues[index.row() - 1][index.column()]);
|
|
|
|
else
|
|
|
|
return QVariant();
|
2015-01-06 18:01:21 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
int ColumnNameResult::rowCount(const QModelIndex&) const
|
2015-01-06 18:01:21 +00:00
|
|
|
{
|
2015-01-06 18:30:59 +00:00
|
|
|
return columnValues.count() + 1; // +1 == the header.
|
2015-01-06 18:01:21 +00:00
|
|
|
}
|
|
|
|
|
2018-05-21 16:09:09 +00:00
|
|
|
int ColumnNameResult::columnCount(const QModelIndex&) const
|
2015-01-06 18:01:21 +00:00
|
|
|
{
|
2015-01-06 18:30:59 +00:00
|
|
|
return columnNames.count();
|
2015-01-06 18:01:21 +00:00
|
|
|
}
|
|
|
|
|
2015-01-06 20:12:48 +00:00
|
|
|
QStringList ColumnNameResult::result() const
|
|
|
|
{
|
|
|
|
return columnNames;
|
|
|
|
}
|
|
|
|
|
2015-01-06 18:05:06 +00:00
|
|
|
void ColumnNameResult::setColumnValues(QList<QStringList> columns)
|
|
|
|
{
|
2015-01-06 18:30:59 +00:00
|
|
|
if (rowCount() != 1) {
|
|
|
|
beginRemoveRows(QModelIndex(), 1, rowCount()-1);
|
|
|
|
columnValues.clear();
|
|
|
|
endRemoveRows();
|
|
|
|
}
|
|
|
|
if (columnCount() != 0) {
|
|
|
|
beginRemoveColumns(QModelIndex(), 0, columnCount()-1);
|
|
|
|
columnNames.clear();
|
|
|
|
endRemoveColumns();
|
|
|
|
}
|
|
|
|
|
|
|
|
QStringList first = columns.first();
|
|
|
|
beginInsertColumns(QModelIndex(), 0, first.count()-1);
|
2015-01-07 19:24:57 +00:00
|
|
|
for(int i = 0; i < first.count(); i++)
|
2015-01-06 18:30:59 +00:00
|
|
|
columnNames.append(QString());
|
2015-01-07 19:24:57 +00:00
|
|
|
|
2015-01-06 18:30:59 +00:00
|
|
|
endInsertColumns();
|
2015-01-06 18:05:06 +00:00
|
|
|
|
2015-01-06 18:30:59 +00:00
|
|
|
beginInsertRows(QModelIndex(), 0, columns.count()-1);
|
|
|
|
columnValues = columns;
|
|
|
|
endInsertRows();
|
2015-01-06 18:05:06 +00:00
|
|
|
}
|
|
|
|
|
2015-01-06 23:49:48 +00:00
|
|
|
void ColumnDropCSVView::mousePressEvent(QMouseEvent *press)
|
|
|
|
{
|
|
|
|
QModelIndex atClick = indexAt(press->pos());
|
|
|
|
if (!atClick.isValid() || atClick.row())
|
|
|
|
return;
|
|
|
|
|
|
|
|
QRect indexRect = visualRect(atClick);
|
|
|
|
QPixmap pix(indexRect.width(), indexRect.height());
|
|
|
|
pix.fill(QColor(0,0,0,0));
|
|
|
|
render(&pix, QPoint(0, 0),QRegion(indexRect));
|
|
|
|
|
|
|
|
QDrag *drag = new QDrag(this);
|
|
|
|
QMimeData *mimeData = new QMimeData;
|
|
|
|
mimeData->setData(subsurface_mimedata, atClick.data().toByteArray());
|
2018-03-02 19:29:50 +00:00
|
|
|
mimeData->setData(subsurface_index, QString::number(atClick.column()).toUtf8());
|
2015-01-06 23:49:48 +00:00
|
|
|
drag->setPixmap(pix);
|
|
|
|
drag->setMimeData(mimeData);
|
|
|
|
if (drag->exec() != Qt::IgnoreAction){
|
2015-01-07 01:20:54 +00:00
|
|
|
QObject *target = drag->target();
|
|
|
|
if (target->objectName() == "qt_scrollarea_viewport")
|
|
|
|
target = target->parent();
|
2015-01-07 17:47:40 +00:00
|
|
|
if (target != drag->source())
|
2015-01-07 00:41:57 +00:00
|
|
|
model()->setData(atClick, QString());
|
2015-01-06 23:49:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-06 18:11:27 +00:00
|
|
|
DiveLogImportDialog::DiveLogImportDialog(QStringList fn, QWidget *parent) : QDialog(parent),
|
2013-10-16 19:05:19 +00:00
|
|
|
selector(true),
|
2013-12-29 16:11:20 +00:00
|
|
|
ui(new Ui::DiveLogImportDialog)
|
2013-10-16 19:05:19 +00:00
|
|
|
{
|
|
|
|
ui->setupUi(this);
|
2015-01-06 18:11:27 +00:00
|
|
|
fileNames = fn;
|
2014-12-25 13:25:29 +00:00
|
|
|
column = 0;
|
2015-03-22 15:13:47 +00:00
|
|
|
delta = "0";
|
2015-07-27 13:13:29 +00:00
|
|
|
hw = "";
|
2016-05-05 06:26:12 +00:00
|
|
|
txtLog = false;
|
2013-10-16 19:05:19 +00:00
|
|
|
|
2020-03-11 10:30:51 +00:00
|
|
|
/* Add indices of XSLTs requiring special handling to the list */
|
2015-09-06 10:06:16 +00:00
|
|
|
specialCSV << SENSUS;
|
|
|
|
specialCSV << SUBSURFACE;
|
2016-04-24 18:11:21 +00:00
|
|
|
specialCSV << DL7;
|
2016-12-31 07:56:08 +00:00
|
|
|
specialCSV << AV1;
|
2018-10-01 12:17:38 +00:00
|
|
|
specialCSV << POSEIDON;
|
2014-01-16 20:50:16 +00:00
|
|
|
|
2018-10-01 13:05:11 +00:00
|
|
|
for (const CSVAppConfig &conf: CSVApps)
|
|
|
|
ui->knownImports->addItem(conf.name);
|
2013-10-16 19:05:19 +00:00
|
|
|
|
2016-04-24 18:11:15 +00:00
|
|
|
ui->CSVSeparator->addItems( QStringList() << tr("Tab") << "," << ";" << "|");
|
2014-12-25 13:25:29 +00:00
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
loadFileContents(-1, INITIAL);
|
2015-01-06 18:11:27 +00:00
|
|
|
|
2014-12-25 13:25:29 +00:00
|
|
|
/* manually import CSV file */
|
2022-02-08 19:24:53 +00:00
|
|
|
QShortcut *close = new QShortcut(QKeySequence(Qt::CTRL | Qt::Key_W), this);
|
2014-04-25 17:44:23 +00:00
|
|
|
connect(close, SIGNAL(activated()), this, SLOT(close()));
|
2022-02-08 19:24:53 +00:00
|
|
|
QShortcut *quit = new QShortcut(QKeySequence(Qt::CTRL | Qt::Key_Q), this);
|
2014-04-25 17:44:23 +00:00
|
|
|
connect(quit, SIGNAL(activated()), parent, SLOT(close()));
|
2015-01-06 18:37:36 +00:00
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
connect(ui->CSVSeparator, SIGNAL(currentIndexChanged(int)), this, SLOT(loadFileContentsSeperatorSelected(int)));
|
|
|
|
connect(ui->knownImports, SIGNAL(currentIndexChanged(int)), this, SLOT(loadFileContentsKnownTypesSelected(int)));
|
2013-10-16 19:05:19 +00:00
|
|
|
}
|
|
|
|
|
2013-12-29 16:11:20 +00:00
|
|
|
DiveLogImportDialog::~DiveLogImportDialog()
|
2013-10-16 19:05:19 +00:00
|
|
|
{
|
|
|
|
delete ui;
|
|
|
|
}
|
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
void DiveLogImportDialog::loadFileContentsSeperatorSelected(int value)
|
|
|
|
{
|
|
|
|
loadFileContents(value, SEPARATOR);
|
|
|
|
}
|
|
|
|
|
|
|
|
void DiveLogImportDialog::loadFileContentsKnownTypesSelected(int value)
|
|
|
|
{
|
|
|
|
loadFileContents(value, KNOWNTYPES);
|
|
|
|
}
|
|
|
|
|
2018-10-01 12:17:38 +00:00
|
|
|
// Turn a "*.csv" or "*.txt" filename into a pair of both, "*.csv" and "*.txt".
|
|
|
|
// If the input wasn't either "*.csv" or "*.txt", then both returned strings
|
|
|
|
// are empty
|
|
|
|
static QPair<QString, QString> poseidonFileNames(const QString &fn)
|
|
|
|
{
|
|
|
|
if (fn.endsWith(".csv", Qt::CaseInsensitive)) {
|
|
|
|
QString txt = fn.left(fn.size() - 3) + "txt";
|
|
|
|
return { fn, txt };
|
|
|
|
} else if (fn.endsWith(".txt", Qt::CaseInsensitive)) {
|
|
|
|
QString csv = fn.left(fn.size() - 3) + "csv";
|
|
|
|
return { csv, fn };
|
|
|
|
} else {
|
|
|
|
return { QString(), QString() };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
void DiveLogImportDialog::loadFileContents(int value, whatChanged triggeredBy)
|
|
|
|
{
|
2015-01-06 18:21:50 +00:00
|
|
|
QList<QStringList> fileColumns;
|
|
|
|
QStringList currColumns;
|
2015-01-07 19:24:57 +00:00
|
|
|
QStringList headers;
|
2015-01-07 20:41:06 +00:00
|
|
|
bool matchedSome = false;
|
2015-01-22 09:33:09 +00:00
|
|
|
bool seabear = false;
|
2015-01-23 18:06:32 +00:00
|
|
|
bool xp5 = false;
|
2015-02-07 14:42:28 +00:00
|
|
|
bool apd = false;
|
2016-04-24 18:11:19 +00:00
|
|
|
bool dl7 = false;
|
2018-10-01 12:17:38 +00:00
|
|
|
bool poseidon = false;
|
2015-01-06 18:21:50 +00:00
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
// reset everything
|
|
|
|
ColumnNameProvider *provider = new ColumnNameProvider(this);
|
|
|
|
ui->avaliableColumns->setModel(provider);
|
|
|
|
ui->avaliableColumns->setItemDelegate(new TagDragDelegate(ui->avaliableColumns));
|
|
|
|
resultModel = new ColumnNameResult(this);
|
|
|
|
ui->tableView->setModel(resultModel);
|
|
|
|
|
2018-10-01 12:17:38 +00:00
|
|
|
// Poseidon MkVI is special: it is made up of a .csv *and* a .txt file.
|
|
|
|
// If the user specified one, we'll try to check for the other.
|
|
|
|
QString fileName = fileNames.first();
|
|
|
|
QPair<QString, QString> pair = poseidonFileNames(fileName);
|
|
|
|
if (!pair.second.isEmpty()) {
|
|
|
|
QFile f_txt(pair.second);
|
|
|
|
f_txt.open(QFile::ReadOnly);
|
|
|
|
QString firstLine = f_txt.readLine();
|
|
|
|
if (firstLine.startsWith("MkVI_Config ")) {
|
|
|
|
poseidon = true;
|
|
|
|
fileName = pair.first; // Read data from CSV
|
|
|
|
headers.append("Time");
|
|
|
|
headers.append("Depth");
|
|
|
|
blockSignals(true);
|
|
|
|
ui->knownImports->setCurrentText("Poseidon MkVI");
|
|
|
|
blockSignals(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
QFile f(fileName);
|
2015-01-06 18:21:50 +00:00
|
|
|
f.open(QFile::ReadOnly);
|
2015-01-07 16:56:01 +00:00
|
|
|
QString firstLine = f.readLine();
|
2015-01-22 09:33:09 +00:00
|
|
|
if (firstLine.contains("SEABEAR")) {
|
|
|
|
seabear = true;
|
2015-03-22 15:13:47 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Parse header - currently only interested in sample
|
2015-07-27 13:13:29 +00:00
|
|
|
* interval and hardware version. If we have old format
|
|
|
|
* the interval value is missing from the header.
|
|
|
|
*/
|
|
|
|
|
2015-07-28 16:20:27 +00:00
|
|
|
while ((firstLine = f.readLine().trimmed()).length() > 0 && !f.atEnd()) {
|
2015-07-27 13:13:29 +00:00
|
|
|
if (firstLine.contains("//Hardware Version: ")) {
|
|
|
|
hw = firstLine.replace(QString::fromLatin1("//Hardware Version: "), QString::fromLatin1("\"Seabear ")).trimmed().append("\"");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Note that we scan over the "Log interval" on purpose
|
2015-03-22 15:13:47 +00:00
|
|
|
*/
|
|
|
|
|
2015-07-28 16:20:27 +00:00
|
|
|
while ((firstLine = f.readLine().trimmed()).length() > 0 && !f.atEnd()) {
|
2015-03-22 15:13:47 +00:00
|
|
|
if (firstLine.contains("//Log interval: "))
|
2015-07-22 15:06:32 +00:00
|
|
|
delta = firstLine.remove(QString::fromLatin1("//Log interval: ")).trimmed().remove(QString::fromLatin1(" s"));
|
2015-03-22 15:13:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Parse CSV fields
|
|
|
|
*/
|
|
|
|
|
|
|
|
firstLine = f.readLine().trimmed();
|
|
|
|
|
|
|
|
currColumns = firstLine.split(';');
|
|
|
|
Q_FOREACH (QString columnText, currColumns) {
|
|
|
|
if (columnText == "Time") {
|
|
|
|
headers.append("Sample time");
|
|
|
|
} else if (columnText == "Depth") {
|
|
|
|
headers.append("Sample depth");
|
|
|
|
} else if (columnText == "Temperature") {
|
|
|
|
headers.append("Sample temperature");
|
|
|
|
} else if (columnText == "NDT") {
|
|
|
|
headers.append("Sample NDL");
|
|
|
|
} else if (columnText == "TTS") {
|
|
|
|
headers.append("Sample TTS");
|
|
|
|
} else if (columnText == "pO2_1") {
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.append("Sample sensor1 pO₂");
|
2015-03-22 15:13:47 +00:00
|
|
|
} else if (columnText == "pO2_2") {
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.append("Sample sensor2 pO₂");
|
2015-03-22 15:13:47 +00:00
|
|
|
} else if (columnText == "pO2_3") {
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.append("Sample sensor3 pO₂");
|
2015-03-22 15:13:47 +00:00
|
|
|
} else if (columnText == "Ceiling") {
|
|
|
|
headers.append("Sample ceiling");
|
2015-07-22 16:06:02 +00:00
|
|
|
} else if (columnText == "Tank pressure") {
|
|
|
|
headers.append("Sample pressure");
|
2015-03-22 15:13:47 +00:00
|
|
|
} else {
|
|
|
|
// We do not know about this value
|
|
|
|
qDebug() << "Seabear import found an un-handled field: " << columnText;
|
|
|
|
headers.append("");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
firstLine = headers.join(";");
|
2015-01-22 16:09:44 +00:00
|
|
|
blockSignals(true);
|
|
|
|
ui->knownImports->setCurrentText("Seabear CSV");
|
|
|
|
blockSignals(false);
|
2015-01-23 18:06:32 +00:00
|
|
|
} else if (firstLine.contains("Tauchgangs-Nr.:")) {
|
|
|
|
xp5 = true;
|
|
|
|
//"Abgelaufene Tauchzeit (Std:Min.)\tTiefe\tStickstoff Balkenanzeige\tSauerstoff Balkenanzeige\tAufstiegsgeschwindigkeit\tRestluftzeit\tRestliche Tauchzeit\tDekompressionszeit (Std:Min)\tDekostopp-Tiefe\tTemperatur\tPO2\tPressluftflasche\tLesen des Druckes\tStatus der Verbindung\tTauchstatus";
|
|
|
|
firstLine = "Sample time\tSample depth\t\t\t\t\t\t\t\tSample temperature\t";
|
|
|
|
blockSignals(true);
|
|
|
|
ui->knownImports->setCurrentText("XP5");
|
|
|
|
blockSignals(false);
|
2016-04-24 18:11:19 +00:00
|
|
|
} else if (firstLine.contains("FSH")) {
|
2016-04-24 18:11:20 +00:00
|
|
|
QString units = "Metric";
|
2016-04-24 18:11:19 +00:00
|
|
|
dl7 = true;
|
2016-04-24 18:11:20 +00:00
|
|
|
while ((firstLine = f.readLine().trimmed()).length() > 0 && !f.atEnd()) {
|
2017-03-06 12:05:13 +00:00
|
|
|
/* DL7 actually defines individual units (e.g. depth, temperature,
|
|
|
|
* pressure, etc.) and there are quite a few other options as well,
|
|
|
|
* but let's use metric unless depth unit is clearly Imperial. */
|
2016-04-24 18:11:20 +00:00
|
|
|
|
|
|
|
if (firstLine.contains("ThFt")) {
|
|
|
|
units = "Imperial";
|
|
|
|
}
|
|
|
|
}
|
2016-04-24 18:11:25 +00:00
|
|
|
firstLine = "|Sample time|Sample depth||||||Sample temperature||Sample pressure";
|
2016-04-24 18:11:19 +00:00
|
|
|
blockSignals(true);
|
2016-04-24 18:11:21 +00:00
|
|
|
ui->knownImports->setCurrentText("DL7");
|
2016-04-24 18:11:20 +00:00
|
|
|
ui->CSVUnits->setCurrentText(units);
|
2016-04-24 18:11:19 +00:00
|
|
|
blockSignals(false);
|
2016-05-05 06:26:12 +00:00
|
|
|
} else if (firstLine.contains("Life Time Dive")) {
|
|
|
|
txtLog = true;
|
|
|
|
|
|
|
|
while ((firstLine = f.readLine().trimmed()).length() >= 0 && !f.atEnd()) {
|
|
|
|
if (firstLine.contains("Dive Profile")) {
|
|
|
|
f.readLine();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
firstLine = f.readLine().trimmed();
|
2015-01-22 09:33:09 +00:00
|
|
|
}
|
2015-01-24 15:03:09 +00:00
|
|
|
|
|
|
|
// Special handling for APD Log Viewer
|
2015-07-22 15:06:30 +00:00
|
|
|
if ((triggeredBy == KNOWNTYPES && (value == APD || value == APD2)) || (triggeredBy == INITIAL && fileNames.first().endsWith(".apd", Qt::CaseInsensitive))) {
|
2016-09-24 05:52:39 +00:00
|
|
|
QString apdseparator;
|
|
|
|
int tabs = firstLine.count('\t');
|
|
|
|
int commas = firstLine.count(',');
|
|
|
|
if (tabs > commas)
|
|
|
|
apdseparator = "\t";
|
|
|
|
else
|
|
|
|
apdseparator = ",";
|
|
|
|
|
2015-02-07 14:42:28 +00:00
|
|
|
apd=true;
|
2016-09-24 05:52:39 +00:00
|
|
|
|
2021-05-13 13:33:07 +00:00
|
|
|
firstLine = tr("Sample time") + apdseparator + tr("Sample depth") + apdseparator + tr("Sample setpoint") + apdseparator + tr("Sample sensor1 pO₂") + apdseparator + tr("Sample sensor2 pO₂") + apdseparator + tr("Sample sensor3 pO₂") + apdseparator + tr("Sample pO₂") + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + "" + apdseparator + tr("Sample temperature") + apdseparator + "" + apdseparator + tr("Sample CNS") + apdseparator + tr("Sample stopdepth");
|
2015-01-24 15:03:09 +00:00
|
|
|
blockSignals(true);
|
2016-09-24 05:52:39 +00:00
|
|
|
ui->CSVSeparator->setCurrentText(apdseparator);
|
2015-02-07 14:42:28 +00:00
|
|
|
if (triggeredBy == INITIAL && fileNames.first().contains(".apd", Qt::CaseInsensitive))
|
2015-07-22 15:06:30 +00:00
|
|
|
ui->knownImports->setCurrentText("APD Log Viewer - DC1");
|
2015-01-24 15:03:09 +00:00
|
|
|
blockSignals(false);
|
|
|
|
}
|
|
|
|
|
2015-01-08 01:02:42 +00:00
|
|
|
QString separator = ui->CSVSeparator->currentText() == tr("Tab") ? "\t" : ui->CSVSeparator->currentText();
|
2015-01-07 19:24:57 +00:00
|
|
|
currColumns = firstLine.split(separator);
|
2015-01-08 01:02:42 +00:00
|
|
|
if (triggeredBy == INITIAL) {
|
|
|
|
// guess the separator
|
|
|
|
int tabs = firstLine.count('\t');
|
|
|
|
int commas = firstLine.count(',');
|
|
|
|
int semis = firstLine.count(';');
|
2016-04-24 18:11:15 +00:00
|
|
|
int pipes = firstLine.count('|');
|
|
|
|
if (tabs > commas && tabs > semis && tabs > pipes)
|
2015-01-08 01:02:42 +00:00
|
|
|
separator = "\t";
|
2016-04-24 18:11:15 +00:00
|
|
|
else if (commas > tabs && commas > semis && commas > pipes)
|
2015-01-08 01:02:42 +00:00
|
|
|
separator = ",";
|
2016-04-24 18:11:15 +00:00
|
|
|
else if (pipes > tabs && pipes > commas && pipes > semis)
|
|
|
|
separator = "|";
|
|
|
|
else if (semis > tabs && semis > commas && semis > pipes)
|
2015-01-08 01:02:42 +00:00
|
|
|
separator = ";";
|
|
|
|
if (ui->CSVSeparator->currentText() != separator) {
|
|
|
|
blockSignals(true);
|
|
|
|
ui->CSVSeparator->setCurrentText(separator);
|
|
|
|
blockSignals(false);
|
|
|
|
currColumns = firstLine.split(separator);
|
2015-01-07 19:24:57 +00:00
|
|
|
}
|
|
|
|
}
|
2015-07-22 15:06:27 +00:00
|
|
|
if (triggeredBy == INITIAL || (triggeredBy == KNOWNTYPES && value == MANUAL) || triggeredBy == SEPARATOR) {
|
2016-01-31 12:37:57 +00:00
|
|
|
int count = -1;
|
|
|
|
QString line = f.readLine().trimmed();
|
|
|
|
QStringList columns;
|
|
|
|
if (line.length() > 0)
|
|
|
|
columns = line.split(separator);
|
2015-01-08 01:02:42 +00:00
|
|
|
// now try and guess the columns
|
|
|
|
Q_FOREACH (QString columnText, currColumns) {
|
2016-01-31 12:37:57 +00:00
|
|
|
count++;
|
2015-07-22 15:06:29 +00:00
|
|
|
/*
|
|
|
|
* We have to skip the conversion of 2 to ₂ for APD Log
|
|
|
|
* viewer as that would mess up the sensor numbering. We
|
|
|
|
* also know that the column headers do not need this
|
|
|
|
* conversion.
|
|
|
|
*/
|
2015-09-06 10:06:15 +00:00
|
|
|
if (apd == false) {
|
2015-07-22 15:06:29 +00:00
|
|
|
columnText.replace("\"", "");
|
|
|
|
columnText.replace("number", "#", Qt::CaseInsensitive);
|
|
|
|
columnText.replace("2", "₂", Qt::CaseInsensitive);
|
|
|
|
columnText.replace("cylinder", "cyl.", Qt::CaseInsensitive);
|
|
|
|
}
|
2015-09-11 18:10:36 +00:00
|
|
|
int idx = provider->mymatch(columnText.trimmed());
|
2015-01-08 01:02:42 +00:00
|
|
|
if (idx >= 0) {
|
2015-01-08 21:44:51 +00:00
|
|
|
QString foundHeading = provider->data(provider->index(idx, 0), Qt::DisplayRole).toString();
|
|
|
|
provider->removeRow(idx);
|
2015-01-08 01:02:42 +00:00
|
|
|
headers.append(foundHeading);
|
|
|
|
matchedSome = true;
|
2016-01-31 12:37:57 +00:00
|
|
|
if (foundHeading == QString::fromLatin1("Date") && columns.count() >= count) {
|
|
|
|
QString date = columns.at(count);
|
|
|
|
if (date.contains('-')) {
|
|
|
|
ui->DateFormat->setCurrentText("yyyy-mm-dd");
|
|
|
|
|
|
|
|
} else if (date.contains('/')) {
|
|
|
|
ui->DateFormat->setCurrentText("mm/dd/yyyy");
|
|
|
|
}
|
2016-01-31 12:37:58 +00:00
|
|
|
} else if (foundHeading == QString::fromLatin1("Time") && columns.count() >= count) {
|
|
|
|
QString time = columns.at(count);
|
|
|
|
if (time.contains(':')) {
|
|
|
|
ui->DurationFormat->setCurrentText("Minutes:seconds");
|
|
|
|
|
|
|
|
}
|
2016-01-31 12:37:57 +00:00
|
|
|
}
|
2015-01-08 01:02:42 +00:00
|
|
|
} else {
|
|
|
|
headers.append("");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (matchedSome) {
|
|
|
|
ui->dragInstructions->setText(tr("Some column headers were pre-populated; please drag and drop the headers so they match the column they are in."));
|
2016-04-24 18:11:19 +00:00
|
|
|
if (triggeredBy != KNOWNTYPES && !seabear && !xp5 && !apd && !dl7) {
|
2015-01-08 01:02:42 +00:00
|
|
|
blockSignals(true);
|
|
|
|
ui->knownImports->setCurrentIndex(0); // <- that's "Manual import"
|
|
|
|
blockSignals(false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-07-22 15:06:27 +00:00
|
|
|
if (triggeredBy == KNOWNTYPES && value != MANUAL) {
|
2015-01-08 01:02:42 +00:00
|
|
|
// an actual known type
|
2016-09-24 05:52:39 +00:00
|
|
|
if (value == SUBSURFACE || value == APD || value == APD2) {
|
2015-01-24 15:03:15 +00:00
|
|
|
/*
|
|
|
|
* Subsurface CSV file needs separator detection
|
|
|
|
* as we used to default to comma but switched
|
|
|
|
* to tab.
|
|
|
|
*/
|
|
|
|
int tabs = firstLine.count('\t');
|
|
|
|
int commas = firstLine.count(',');
|
|
|
|
if (tabs > commas)
|
|
|
|
separator = "Tab";
|
|
|
|
else
|
|
|
|
separator = ",";
|
|
|
|
} else {
|
|
|
|
separator = CSVApps[value].separator;
|
|
|
|
}
|
2015-01-24 15:03:08 +00:00
|
|
|
|
|
|
|
if (ui->CSVSeparator->currentText() != separator || separator == "Tab") {
|
2015-01-24 15:03:14 +00:00
|
|
|
ui->CSVSeparator->blockSignals(true);
|
2015-01-08 01:02:42 +00:00
|
|
|
ui->CSVSeparator->setCurrentText(separator);
|
2015-01-24 15:03:14 +00:00
|
|
|
ui->CSVSeparator->blockSignals(false);
|
2015-01-08 01:02:42 +00:00
|
|
|
if (separator == "Tab")
|
|
|
|
separator = "\t";
|
|
|
|
currColumns = firstLine.split(separator);
|
|
|
|
}
|
2015-07-21 13:37:42 +00:00
|
|
|
// now set up time, depth, temperature, po2, cns, ndl, tts, stopdepth, pressure, setpoint
|
2015-01-08 01:02:42 +00:00
|
|
|
for (int i = 0; i < currColumns.count(); i++)
|
|
|
|
headers.append("");
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].time > -1 && CSVApps[value].time < currColumns.count())
|
2015-01-08 01:02:42 +00:00
|
|
|
headers.replace(CSVApps[value].time, tr("Sample time"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].depth > -1 && CSVApps[value].depth < currColumns.count())
|
2015-01-08 01:02:42 +00:00
|
|
|
headers.replace(CSVApps[value].depth, tr("Sample depth"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].temperature > -1 && CSVApps[value].temperature < currColumns.count())
|
2015-01-08 01:02:42 +00:00
|
|
|
headers.replace(CSVApps[value].temperature, tr("Sample temperature"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].po2 > -1 && CSVApps[value].po2 < currColumns.count())
|
2015-01-25 15:00:18 +00:00
|
|
|
headers.replace(CSVApps[value].po2, tr("Sample pO₂"));
|
2015-07-22 15:06:28 +00:00
|
|
|
if (CSVApps[value].sensor1 > -1 && CSVApps[value].sensor1 < currColumns.count())
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.replace(CSVApps[value].sensor1, tr("Sample sensor1 pO₂"));
|
2015-07-22 15:06:28 +00:00
|
|
|
if (CSVApps[value].sensor2 > -1 && CSVApps[value].sensor2 < currColumns.count())
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.replace(CSVApps[value].sensor2, tr("Sample sensor2 pO₂"));
|
2015-07-22 15:06:28 +00:00
|
|
|
if (CSVApps[value].sensor3 > -1 && CSVApps[value].sensor3 < currColumns.count())
|
2015-06-27 21:38:10 +00:00
|
|
|
headers.replace(CSVApps[value].sensor3, tr("Sample sensor3 pO₂"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].cns > -1 && CSVApps[value].cns < currColumns.count())
|
2015-01-25 15:03:17 +00:00
|
|
|
headers.replace(CSVApps[value].cns, tr("Sample CNS"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].ndl > -1 && CSVApps[value].ndl < currColumns.count())
|
2015-01-25 15:03:17 +00:00
|
|
|
headers.replace(CSVApps[value].ndl, tr("Sample NDL"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].tts > -1 && CSVApps[value].tts < currColumns.count())
|
2015-01-25 15:03:17 +00:00
|
|
|
headers.replace(CSVApps[value].tts, tr("Sample TTS"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].stopdepth > -1 && CSVApps[value].stopdepth < currColumns.count())
|
2015-01-08 01:02:42 +00:00
|
|
|
headers.replace(CSVApps[value].stopdepth, tr("Sample stopdepth"));
|
2015-01-29 23:26:24 +00:00
|
|
|
if (CSVApps[value].pressure > -1 && CSVApps[value].pressure < currColumns.count())
|
2015-01-26 06:51:17 +00:00
|
|
|
headers.replace(CSVApps[value].pressure, tr("Sample pressure"));
|
2015-07-21 13:37:42 +00:00
|
|
|
if (CSVApps[value].setpoint > -1 && CSVApps[value].setpoint < currColumns.count())
|
|
|
|
headers.replace(CSVApps[value].setpoint, tr("Sample setpoint"));
|
2015-09-06 10:06:17 +00:00
|
|
|
|
|
|
|
/* Show the Subsurface CSV column headers */
|
2015-09-06 18:40:13 +00:00
|
|
|
if (value == SUBSURFACE && currColumns.count() >= 23) {
|
2015-09-06 10:06:17 +00:00
|
|
|
headers.replace(0, tr("Dive #"));
|
|
|
|
headers.replace(1, tr("Date"));
|
|
|
|
headers.replace(2, tr("Time"));
|
|
|
|
headers.replace(3, tr("Duration"));
|
|
|
|
headers.replace(4, tr("Max. depth"));
|
|
|
|
headers.replace(5, tr("Avg. depth"));
|
2020-05-04 04:57:54 +00:00
|
|
|
headers.replace(6, tr("Mode"));
|
|
|
|
headers.replace(7, tr("Air temp."));
|
|
|
|
headers.replace(8, tr("Water temp."));
|
|
|
|
headers.replace(9, tr("Cyl. size"));
|
|
|
|
headers.replace(10, tr("Start pressure"));
|
|
|
|
headers.replace(11, tr("End pressure"));
|
|
|
|
headers.replace(12, tr("O₂"));
|
|
|
|
headers.replace(13, tr("He"));
|
|
|
|
headers.replace(14, tr("Location"));
|
|
|
|
headers.replace(15, tr("GPS"));
|
|
|
|
headers.replace(16, tr("Divemaster"));
|
|
|
|
headers.replace(17, tr("Buddy"));
|
|
|
|
headers.replace(18, tr("Suit"));
|
|
|
|
headers.replace(19, tr("Rating"));
|
|
|
|
headers.replace(20, tr("Visibility"));
|
|
|
|
headers.replace(21, tr("Notes"));
|
|
|
|
headers.replace(22, tr("Weight"));
|
|
|
|
headers.replace(23, tr("Tags"));
|
2015-09-06 10:06:18 +00:00
|
|
|
|
|
|
|
blockSignals(true);
|
|
|
|
ui->CSVSeparator->setCurrentText(separator);
|
|
|
|
ui->DateFormat->setCurrentText("yyyy-mm-dd");
|
|
|
|
ui->DurationFormat->setCurrentText("Minutes:seconds");
|
|
|
|
blockSignals(false);
|
2015-09-06 10:06:17 +00:00
|
|
|
}
|
2015-01-07 23:11:54 +00:00
|
|
|
}
|
2015-01-08 01:02:42 +00:00
|
|
|
|
2015-01-07 16:56:01 +00:00
|
|
|
f.reset();
|
2015-01-06 18:21:50 +00:00
|
|
|
int rows = 0;
|
2015-01-22 09:33:09 +00:00
|
|
|
|
2015-01-23 18:06:32 +00:00
|
|
|
/* Skipping the header of Seabear and XP5 CSV files. */
|
|
|
|
if (seabear || xp5) {
|
2015-01-22 09:33:09 +00:00
|
|
|
/*
|
|
|
|
* First set of data on Seabear CSV file is metadata
|
|
|
|
* that is separated by an empty line (windows line
|
|
|
|
* termination might be encountered.
|
|
|
|
*/
|
2015-01-23 18:06:32 +00:00
|
|
|
while (strlen(f.readLine()) > 3 && !f.atEnd());
|
2015-01-22 09:33:09 +00:00
|
|
|
/*
|
|
|
|
* Next we have description of the fields and two dummy
|
|
|
|
* lines. Separated again with an empty line from the
|
|
|
|
* actual data.
|
|
|
|
*/
|
2015-01-23 18:06:32 +00:00
|
|
|
while (strlen(f.readLine()) > 3 && !f.atEnd());
|
2016-04-24 18:11:19 +00:00
|
|
|
} else if (dl7) {
|
|
|
|
while ((firstLine = f.readLine().trimmed()).length() > 0 && !f.atEnd()) {
|
|
|
|
if (firstLine.contains("ZDP")) {
|
|
|
|
firstLine = f.readLine().trimmed();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2016-05-05 06:26:12 +00:00
|
|
|
} else if (txtLog) {
|
|
|
|
while ((firstLine = f.readLine().trimmed()).length() >= 0 && !f.atEnd()) {
|
|
|
|
if (firstLine.contains("Dive Profile")) {
|
|
|
|
firstLine = f.readLine().trimmed();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2015-01-22 09:33:09 +00:00
|
|
|
}
|
|
|
|
|
2015-01-24 15:03:12 +00:00
|
|
|
while (rows < 10 && !f.atEnd()) {
|
2015-03-10 20:18:58 +00:00
|
|
|
QString currLine = f.readLine().trimmed();
|
2015-01-06 18:37:36 +00:00
|
|
|
currColumns = currLine.split(separator);
|
2018-10-01 12:17:38 +00:00
|
|
|
// For Poseidon, read only columns where the second value is 8 (=depth)
|
|
|
|
if (poseidon) {
|
|
|
|
if (currColumns.size() < 3 || currColumns[1] != "8")
|
|
|
|
continue;
|
|
|
|
currColumns.removeAt(1);
|
|
|
|
}
|
2015-01-06 18:21:50 +00:00
|
|
|
fileColumns.append(currColumns);
|
|
|
|
rows += 1;
|
|
|
|
}
|
2017-09-11 03:46:13 +00:00
|
|
|
|
|
|
|
if (rows > 0)
|
|
|
|
resultModel->setColumnValues(fileColumns);
|
2015-01-07 19:24:57 +00:00
|
|
|
for (int i = 0; i < headers.count(); i++)
|
|
|
|
if (!headers.at(i).isEmpty())
|
|
|
|
resultModel->setData(resultModel->index(0, i),headers.at(i),Qt::EditRole);
|
2015-01-06 18:11:27 +00:00
|
|
|
}
|
|
|
|
|
2020-10-17 18:15:23 +00:00
|
|
|
void DiveLogImportDialog::setup_csv_params(QStringList r, xml_params ¶ms)
|
|
|
|
{
|
|
|
|
xml_params_add_int(¶ms, "dateField", r.indexOf(tr("Date")));
|
|
|
|
xml_params_add_int(¶ms, "datefmt", ui->DateFormat->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "starttimeField", r.indexOf(tr("Time")));
|
|
|
|
xml_params_add_int(¶ms, "numberField", r.indexOf(tr("Dive #")));
|
|
|
|
xml_params_add_int(¶ms, "timeField", r.indexOf(tr("Sample time")));
|
|
|
|
xml_params_add_int(¶ms, "depthField", r.indexOf(tr("Sample depth")));
|
|
|
|
xml_params_add_int(¶ms, "tempField", r.indexOf(tr("Sample temperature")));
|
|
|
|
xml_params_add_int(¶ms, "po2Field", r.indexOf(tr("Sample pO₂")));
|
|
|
|
xml_params_add_int(¶ms, "o2sensor1Field", r.indexOf(tr("Sample sensor1 pO₂")));
|
|
|
|
xml_params_add_int(¶ms, "o2sensor2Field", r.indexOf(tr("Sample sensor2 pO₂")));
|
|
|
|
xml_params_add_int(¶ms, "o2sensor3Field", r.indexOf(tr("Sample sensor3 pO₂")));
|
|
|
|
xml_params_add_int(¶ms, "cnsField", r.indexOf(tr("Sample CNS")));
|
|
|
|
xml_params_add_int(¶ms, "ndlField", r.indexOf(tr("Sample NDL")));
|
|
|
|
xml_params_add_int(¶ms, "ttsField", r.indexOf(tr("Sample TTS")));
|
|
|
|
xml_params_add_int(¶ms, "stopdepthField", r.indexOf(tr("Sample stopdepth")));
|
|
|
|
xml_params_add_int(¶ms, "pressureField", r.indexOf(tr("Sample pressure")));
|
|
|
|
xml_params_add_int(¶ms, "heartBeat", r.indexOf(tr("Sample heartrate")));
|
|
|
|
xml_params_add_int(¶ms, "setpointField", r.indexOf(tr("Sample setpoint")));
|
|
|
|
xml_params_add_int(¶ms, "visibilityField", r.indexOf(tr("Visibility")));
|
|
|
|
xml_params_add_int(¶ms, "ratingField", r.indexOf(tr("Rating")));
|
|
|
|
xml_params_add_int(¶ms, "separatorIndex", ui->CSVSeparator->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "units", ui->CSVUnits->currentIndex());
|
|
|
|
if (hw.length())
|
|
|
|
xml_params_add(¶ms, "hw", qPrintable(hw));
|
|
|
|
else if (ui->knownImports->currentText().length() > 0)
|
|
|
|
xml_params_add(¶ms, "hw", qPrintable(ui->knownImports->currentText().prepend("\"").append("\"")));
|
|
|
|
}
|
|
|
|
|
|
|
|
void DiveLogImportDialog::parseTxtHeader(QString fileName, xml_params ¶ms)
|
2016-05-05 06:26:12 +00:00
|
|
|
{
|
2016-09-25 09:33:38 +00:00
|
|
|
QFile f(fileName);
|
2016-05-05 06:26:12 +00:00
|
|
|
QString date;
|
|
|
|
QString time;
|
|
|
|
QString line;
|
|
|
|
|
|
|
|
f.open(QFile::ReadOnly);
|
|
|
|
while ((line = f.readLine().trimmed()).length() >= 0 && !f.atEnd()) {
|
|
|
|
if (line.contains("Dive Profile")) {
|
|
|
|
f.readLine();
|
|
|
|
break;
|
|
|
|
} else if (line.contains("Dive Date: ")) {
|
|
|
|
date = line.replace(QString::fromLatin1("Dive Date: "), QString::fromLatin1(""));
|
|
|
|
|
|
|
|
if (date.contains('-')) {
|
|
|
|
QStringList fmtDate = date.split('-');
|
|
|
|
date = fmtDate[0] + fmtDate[1] + fmtDate[2];
|
|
|
|
} else if (date.contains('/')) {
|
|
|
|
QStringList fmtDate = date.split('/');
|
|
|
|
date = fmtDate[2] + fmtDate[0] + fmtDate[1];
|
|
|
|
} else {
|
|
|
|
QStringList fmtDate = date.split('.');
|
|
|
|
date = fmtDate[2] + fmtDate[1] + fmtDate[0];
|
|
|
|
}
|
|
|
|
} else if (line.contains("Elapsed Dive Time: ")) {
|
|
|
|
// Skipping dive duration for now
|
|
|
|
} else if (line.contains("Dive Time: ")) {
|
|
|
|
time = line.replace(QString::fromLatin1("Dive Time: "), QString::fromLatin1(""));
|
|
|
|
|
|
|
|
if (time.contains(':')) {
|
|
|
|
QStringList fmtTime = time.split(':');
|
|
|
|
time = fmtTime[0] + fmtTime[1];
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
f.close();
|
|
|
|
|
2020-10-17 18:15:23 +00:00
|
|
|
xml_params_add(¶ms, "date", qPrintable(date));
|
|
|
|
xml_params_add(¶ms, "time", qPrintable(time));
|
2016-05-05 06:26:12 +00:00
|
|
|
}
|
2015-08-27 14:36:23 +00:00
|
|
|
|
2013-12-29 16:11:20 +00:00
|
|
|
void DiveLogImportDialog::on_buttonBox_accepted()
|
2013-10-16 19:05:19 +00:00
|
|
|
{
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
struct divelog log;
|
2015-01-06 20:12:48 +00:00
|
|
|
QStringList r = resultModel->result();
|
2015-01-22 09:33:13 +00:00
|
|
|
if (ui->knownImports->currentText() != "Manual import") {
|
2015-01-06 20:12:48 +00:00
|
|
|
for (int i = 0; i < fileNames.size(); ++i) {
|
|
|
|
if (ui->knownImports->currentText() == "Seabear CSV") {
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
parse_seabear_log(qPrintable(fileNames[i]), &log);
|
2018-10-01 12:17:38 +00:00
|
|
|
} else if (ui->knownImports->currentText() == "Poseidon MkVI") {
|
|
|
|
QPair<QString, QString> pair = poseidonFileNames(fileNames[i]);
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
parse_txt_file(qPrintable(pair.second), qPrintable(pair.first), &log);
|
2015-01-06 20:12:48 +00:00
|
|
|
} else {
|
2020-10-17 18:15:23 +00:00
|
|
|
xml_params params;
|
2015-08-27 14:36:23 +00:00
|
|
|
|
2016-05-05 06:26:12 +00:00
|
|
|
if (txtLog) {
|
2020-10-17 18:15:23 +00:00
|
|
|
parseTxtHeader(fileNames[i], params);
|
2021-10-25 23:38:31 +00:00
|
|
|
} else {
|
|
|
|
QRegularExpression apdRe("^.*[/\\][0-9a-zA-Z]*_([0-9]{6})_([0-9]{6})\\.apd\\z");
|
|
|
|
QRegularExpressionMatch match = apdRe.match(fileNames[i]);
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
xml_params_add(¶ms, "date", qPrintable("20" + match.captured(1)));
|
|
|
|
xml_params_add(¶ms, "time", qPrintable("1" + match.captured(2)));
|
|
|
|
}
|
2016-05-03 16:47:50 +00:00
|
|
|
}
|
2020-10-17 18:15:23 +00:00
|
|
|
setup_csv_params(r, params);
|
|
|
|
parse_csv_file(qPrintable(fileNames[i]), ¶ms,
|
2018-09-28 11:59:01 +00:00
|
|
|
specialCSV.contains(ui->knownImports->currentIndex()) ? qPrintable(CSVApps[ui->knownImports->currentIndex()].name) : "csv",
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
&log);
|
2015-01-06 20:12:48 +00:00
|
|
|
}
|
2014-01-25 07:49:22 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
for (int i = 0; i < fileNames.size(); ++i) {
|
2015-08-23 17:56:18 +00:00
|
|
|
if (r.indexOf(tr("Sample time")) < 0) {
|
2020-10-17 18:15:23 +00:00
|
|
|
xml_params params;
|
|
|
|
xml_params_add_int(¶ms, "numberField", r.indexOf(tr("Dive #")));
|
|
|
|
xml_params_add_int(¶ms, "dateField", r.indexOf(tr("Date")));
|
|
|
|
xml_params_add_int(¶ms, "timeField", r.indexOf(tr("Time")));
|
|
|
|
xml_params_add_int(¶ms, "durationField", r.indexOf(tr("Duration")));
|
|
|
|
xml_params_add_int(¶ms, "modeField", r.indexOf(tr("Mode")));
|
|
|
|
xml_params_add_int(¶ms, "locationField", r.indexOf(tr("Location")));
|
|
|
|
xml_params_add_int(¶ms, "gpsField", r.indexOf(tr("GPS")));
|
|
|
|
xml_params_add_int(¶ms, "maxDepthField", r.indexOf(tr("Max. depth")));
|
|
|
|
xml_params_add_int(¶ms, "meanDepthField", r.indexOf(tr("Avg. depth")));
|
|
|
|
xml_params_add_int(¶ms, "divemasterField", r.indexOf(tr("Divemaster")));
|
|
|
|
xml_params_add_int(¶ms, "buddyField", r.indexOf(tr("Buddy")));
|
|
|
|
xml_params_add_int(¶ms, "suitField", r.indexOf(tr("Suit")));
|
|
|
|
xml_params_add_int(¶ms, "notesField", r.indexOf(tr("Notes")));
|
|
|
|
xml_params_add_int(¶ms, "weightField", r.indexOf(tr("Weight")));
|
|
|
|
xml_params_add_int(¶ms, "tagsField", r.indexOf(tr("Tags")));
|
|
|
|
xml_params_add_int(¶ms, "separatorIndex", ui->CSVSeparator->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "units", ui->CSVUnits->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "datefmt", ui->DateFormat->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "durationfmt", ui->DurationFormat->currentIndex());
|
|
|
|
xml_params_add_int(¶ms, "cylindersizeField", r.indexOf(tr("Cyl. size")));
|
|
|
|
xml_params_add_int(¶ms, "startpressureField", r.indexOf(tr("Start pressure")));
|
|
|
|
xml_params_add_int(¶ms, "endpressureField", r.indexOf(tr("End pressure")));
|
|
|
|
xml_params_add_int(¶ms, "o2Field", r.indexOf(tr("O₂")));
|
|
|
|
xml_params_add_int(¶ms, "heField", r.indexOf(tr("He")));
|
|
|
|
xml_params_add_int(¶ms, "airtempField", r.indexOf(tr("Air temp.")));
|
|
|
|
xml_params_add_int(¶ms, "watertempField", r.indexOf(tr("Water temp.")));
|
|
|
|
xml_params_add_int(¶ms, "visibilityField", r.indexOf(tr("Visibility")));
|
|
|
|
xml_params_add_int(¶ms, "ratingField", r.indexOf(tr("Rating")));
|
|
|
|
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
parse_manual_file(qPrintable(fileNames[i]), ¶ms, &log);
|
2015-08-27 14:36:23 +00:00
|
|
|
} else {
|
2020-10-17 18:15:23 +00:00
|
|
|
xml_params params;
|
2015-08-27 14:36:23 +00:00
|
|
|
|
2016-05-05 06:26:12 +00:00
|
|
|
if (txtLog) {
|
2020-10-17 18:15:23 +00:00
|
|
|
parseTxtHeader(fileNames[i], params);
|
2021-10-25 23:38:31 +00:00
|
|
|
} else {
|
|
|
|
QRegularExpression apdRe("\\A^.*[/\\][0-9a-zA-Z]*_([0-9]{6})_([0-9]{6})\\.apd\\z");
|
|
|
|
QRegularExpressionMatch match = apdRe.match(fileNames[i]);
|
|
|
|
if (match.hasMatch()) {
|
|
|
|
xml_params_add(¶ms, "date", qPrintable("20" + match.captured(1)));
|
|
|
|
xml_params_add(¶ms, "time", qPrintable("1" + match.captured(2)));
|
|
|
|
}
|
|
|
|
|
2016-05-03 16:47:50 +00:00
|
|
|
}
|
2020-10-17 18:15:23 +00:00
|
|
|
setup_csv_params(r, params);
|
|
|
|
parse_csv_file(qPrintable(fileNames[i]), ¶ms,
|
2018-09-28 11:59:01 +00:00
|
|
|
specialCSV.contains(ui->knownImports->currentIndex()) ? qPrintable(CSVApps[ui->knownImports->currentIndex()].name) : "csv",
|
core: introduce divelog structure
The parser API was very annoying, as a number of tables
to-be-filled were passed in as pointers. The goal of this
commit is to collect all these tables in a single struct.
This should make it (more or less) clear what is actually
written into the divelog files.
Moreover, it should now be rather easy to search for
instances, where the global logfile is accessed (and it
turns out that there are many!).
The divelog struct does not contain the tables as substructs,
but only collects pointers. The idea is that the "divelog.h"
file can be included without all the other files describing
the numerous tables.
To make it easier to use from C++ parts of the code, the
struct implements a constructor and a destructor. Sadly,
we can't use smart pointers, since the pointers are accessed
from C code. Therfore the constructor and destructor are
quite complex.
The whole commit is large, but was mostly an automatic
conversion.
One oddity of note: the divelog structure also contains
the "autogroup" flag, since that is saved in the divelog.
This actually fixes a bug: Before, when importing dives
from a different log, the autogroup flag was overwritten.
This was probably not intended and does not happen anymore.
Signed-off-by: Berthold Stoeger <bstoeger@mail.tuwien.ac.at>
2022-11-08 20:31:08 +00:00
|
|
|
&log);
|
2015-08-27 14:36:23 +00:00
|
|
|
}
|
2013-12-29 16:11:19 +00:00
|
|
|
}
|
2013-10-16 19:05:19 +00:00
|
|
|
}
|
2015-06-27 21:38:10 +00:00
|
|
|
|
2018-12-23 22:45:12 +00:00
|
|
|
QString source = fileNames.size() == 1 ? fileNames[0] : tr("multiple files");
|
2022-11-12 07:40:04 +00:00
|
|
|
Command::importDives(&log, IMPORT_MERGE_ALL_TRIPS, source);
|
2013-10-16 19:05:19 +00:00
|
|
|
}
|
2015-01-07 17:04:15 +00:00
|
|
|
|
|
|
|
TagDragDelegate::TagDragDelegate(QObject *parent) : QStyledItemDelegate(parent)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-02-28 21:45:17 +00:00
|
|
|
QSize TagDragDelegate::sizeHint(const QStyleOptionViewItem & option, const QModelIndex & index) const
|
2015-01-07 17:04:15 +00:00
|
|
|
{
|
2015-01-07 17:09:29 +00:00
|
|
|
QSize originalSize = QStyledItemDelegate::sizeHint(option, index);
|
|
|
|
return originalSize + QSize(5,5);
|
2015-01-07 17:04:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void TagDragDelegate::paint(QPainter * painter, const QStyleOptionViewItem & option, const QModelIndex & index) const
|
|
|
|
{
|
2015-01-07 17:19:19 +00:00
|
|
|
painter->save();
|
|
|
|
painter->setRenderHints(QPainter::Antialiasing);
|
2015-01-07 21:00:16 +00:00
|
|
|
painter->setBrush(QBrush(AIR_BLUE_TRANS));
|
|
|
|
painter->drawRoundedRect(option.rect.adjusted(2,2,-2,-2), 5, 5);
|
2015-01-07 17:19:19 +00:00
|
|
|
painter->restore();
|
2015-01-07 17:04:15 +00:00
|
|
|
QStyledItemDelegate::paint(painter, option, index);
|
|
|
|
}
|