Compare commits

...

5 commits

Author SHA1 Message Date
Morten Borup Petersen
16c7495cc9
Merge 407beefad6 into 46c439c443 2024-11-26 22:35:58 +13:00
rmultan
46c439c443 Bluetooth: Add Mares Sirius to known devices
The app already supports Mares Sirius,
but user has to allow scanning for
unknown devices in app settings.
This adds Mares Sirius to known devices.

Signed-off-by: rmultan <multan.rafal.k@gmail.com>
2024-11-26 22:29:13 +13:00
Michael Keller
015ac0e459 CICD: Build Release Versions on Push.
Change the builds on push (i.e. pull request merges) to build release
versions. This seems to be the correct way to build these, as they are
now distributed as the official builds on
https://subsurface-divelog.org/.
For users wanting to help with debugging, build artifacts on pull
requests are available, and these are debug builds.
Also adding the option to manually trigger builds, so if needed debug
builds can be run on `master`.

Signed-off-by: Michael Keller <github@ike.ch>
2024-11-26 19:37:45 +13:00
Morten Borup Petersen
407beefad6 fix default mem_csv value
Signed-off-by: Morten Borup Petersen <morten_bp@live.dk>
2024-11-11 16:59:48 +01:00
Morten Borup Petersen
1d2a430b80 Slight refactor of DAN parse code
Mostly NFC; this commit is mainly to get familiar with the codebase and to meet
the people who will review these changes.

I hope to make some changes to the DAN parsing code to eventually extract more
metainfo from my aqualung divecomputer's `.zxu` formatted logs. To do so, and
for me to be able to work on this efficiently, I've refactored the DAN parsing
code using a bit more modern C++-style, as well as being more true-to-spec wrt.
the (...ancient) DAN file format documentation that i could dig up... hopefully
that's an alright tradeoff for the project.

This more true-to-spec parsing also fixed a bug with the number being parsed
from the incorrect index in the ZDH vector (or, atleast i consider it a bug -
the "Export sequence" number was being used as the dive number, instead of the
"Internal Dive Sequence" number. The latter, described in the spec as: `The
sequence number assigned to the dive by the recording computer`).

Also contains some unrelated formatting changes; i tried to keep these minimal
(i presume these files haven't been touched in a while by `clang-format`).

Signed-off-by: Morten Borup Petersen <morten_bp@live.dk>
2024-11-02 18:31:56 +01:00
20 changed files with 315 additions and 155 deletions

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
build:
@ -49,6 +50,12 @@ jobs:
VERSION: ${{ steps.version_number.outputs.version }}
VERSION_4: ${{ steps.version_number.outputs.version_4 }}
run: |
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} release"
fi
# this is rather awkward, but it allows us to use the preinstalled
# Android and Qt versions with relative paths
cd ..
@ -65,7 +72,7 @@ jobs:
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
# get the build number via curl so this works both for a pull request as well as a push
export OUTPUT_DIR="$GITHUB_WORKSPACE"
bash -x ./subsurface/packaging/android/qmake-build.sh -buildnr $BUILDNR -canonicalversion $VERSION -canonicalversion_4 $VERSION_4
bash -x ./subsurface/packaging/android/qmake-build.sh ${BUILD_EXTRA_ARGS} -buildnr $BUILDNR -canonicalversion $VERSION -canonicalversion_4 $VERSION_4
- name: delete the keystore
if: github.event_name == 'push'

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
build:
@ -45,12 +46,18 @@ jobs:
env:
VERSION: ${{ steps.version_number.outputs.version }}
run: |
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
cd ..
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
export IOS_QT=$GITHUB_WORKSPACE/qt-ios
echo "build for simulator"
bash -x $GITHUB_WORKSPACE/packaging/ios/build.sh -simulator
bash -x $GITHUB_WORKSPACE/packaging/ios/build.sh -simulator ${BUILD_EXTRA_ARGS}
# We need this in order to be able to access the file and publish it
mv build-Subsurface-mobile-Qt_5_14_1_for_iOS-Release/Release-iphonesimulator/Subsurface-mobile.app $GITHUB_WORKSPACE/Subsurface-mobile-$VERSION.app

View file

@ -71,9 +71,15 @@ jobs:
echo "--------------------------------------------------------------"
echo "building desktop"
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
# now build for the desktop version (including WebKit)
cd ..
bash -e -x subsurface/scripts/build.sh -desktop -build-with-webkit
bash -e -x subsurface/scripts/build.sh -desktop -build-with-webkit ${BUILD_EXTRA_ARGS}
- name: test desktop build
run: |

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
do-build-test:

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
build:
@ -52,12 +53,18 @@ jobs:
echo "--------------------------------------------------------------"
echo "building desktop"
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
# now build for the desktop version (without WebKit)
cd ..
git config --global --add safe.directory $GITHUB_WORKSPACE
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
git config --global --get-all safe.directory
bash -e -x subsurface/scripts/build.sh -desktop -build-with-qt6
bash -e -x subsurface/scripts/build.sh -desktop -build-with-qt6 ${BUILD_EXTRA_ARGS}
- name: test desktop build
run: |

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
build:
@ -60,9 +61,15 @@ jobs:
echo "--------------------------------------------------------------"
echo "building desktop"
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
# now build the appimage
cd ..
bash -e -x subsurface/scripts/build.sh -desktop -create-appdir -build-with-webkit
bash -e -x subsurface/scripts/build.sh -desktop -create-appdir -build-with-webkit ${BUILD_EXTRA_ARGS}
- name: test desktop build
run: |

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
do-build-test:

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
do-build-test:

View file

@ -11,6 +11,7 @@ on:
- scripts/docker/**
branches:
- master
workflow_dispatch:
jobs:
do-build-test:

View file

@ -44,6 +44,12 @@ jobs:
env:
CANONICALVERSION: ${{ steps.version_number.outputs.version }}
run: |
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
cd ${GITHUB_WORKSPACE}/..
export QT_ROOT=${GITHUB_WORKSPACE}/qt-mac/Qt5.15.15
export QT_QPA_PLATFORM_PLUGIN_PATH=$QT_ROOT/plugins
@ -51,7 +57,7 @@ jobs:
export CMAKE_PREFIX_PATH=$QT_ROOT/lib/cmake
# now setup Subsurface with WebKit and build the dependencies, using the generic build script
bash -e -x ./subsurface/scripts/build.sh -desktop -build-with-webkit -release -build-deps -ftdi -prep-only
bash -e -x ./subsurface/scripts/build.sh -desktop -build-with-webkit -build-deps -ftdi -prep-only ${BUILD_EXTRA_ARGS}
echo "finished initial cmake setup of Subsurface - next build the package"
cd subsurface/build

View file

@ -47,9 +47,15 @@ jobs:
CANONICALVERSION: ${{ steps.version_number.outputs.version }}
CANONICALVERSION_4: ${{ steps.version_number.outputs.version_4 }}
run: |
BUILD_EXTRA_ARGS=""
if [ "${GITHUB_EVENT_NAME}" = "push" ]; then
echo "Building a release version"
BUILD_EXTRA_ARGS="${BUILD_EXTRA_ARGS} -release"
fi
export OUTPUT_DIR="$GITHUB_WORKSPACE"
cd /win
bash -x subsurface/packaging/windows/in-container-build.sh 2>&1 | tee build.log
bash -x subsurface/packaging/windows/in-container-build.sh ${BUILD_EXTRA_ARGS} 2>&1 | tee build.log
grep "Built target installer" build.log
- name: publish pull request artifacts

View file

@ -78,6 +78,7 @@ static struct namePattern name[] = {
{ "Luna 2.0", "Scubapro", "Luna 2.0" },
// Mares dive computers
{ "Mares Genius", "Mares", "Genius" },
{ "Sirius", "Mares", "Sirius" },
{ "Mares", "Mares", "Quad" }, // we actually don't know and just pick a common one - user needs to fix in UI
// Cress dive computers
{ "CARESIO_", "Cressi", "Cartesio" },

View file

@ -1,21 +1,21 @@
#include <unistd.h>
#include <stdlib.h>
#include <errno.h>
#include <libdivecomputer/parser.h>
#include <map>
#include <stdlib.h>
#include <unistd.h>
#include "dive.h"
#include "errorhelper.h"
#include "subsurface-string.h"
#include "divelist.h"
#include "divelog.h"
#include "errorhelper.h"
#include "file.h"
#include "format.h"
#include "parse.h"
#include "sample.h"
#include "divelist.h"
#include "gettext.h"
#include "import-csv.h"
#include "parse.h"
#include "qthelper.h"
#include "sample.h"
#include "subsurface-string.h"
#include "xmlparams.h"
#define MATCH(buffer, pattern) \
@ -107,18 +107,205 @@ static char *parse_dan_new_line(char *buf, const char *NL)
}
static int try_to_xslt_open_csv(const char *filename, std::string &mem, const char *tag);
static int parse_csv_line(char *&ptr, const char *NL, char delim, std::vector<std::string> &fields)
{
char *line_end = strstr(ptr, NL); // Find the end of the line using the newline string
bool withNL = line_end;
if (!line_end) {
// EOF - set line_end to end of 'ptr'
line_end = ptr + strlen(ptr);
}
// Create a temporary pointer to traverse the line
char *field_start = ptr;
char *field_end = nullptr;
// Skip leading delimiter
if (*field_start == delim) {
field_start++;
} else {
return report_error("DEBUG: No leading delimiter found");
}
while (field_start < line_end) {
// Find the next delimiter or end of line
field_end = static_cast<char *>(memchr(field_start, delim, line_end - field_start));
if (field_end) {
// If we found a delimiter, extract the field
fields.emplace_back(field_start, field_end - field_start);
// Move to the next character after the delimiter
field_start = field_end + 1;
} else {
// If no more delimiters, add the last field
fields.emplace_back(field_start, line_end - field_start);
break;
}
}
// Update the pointer to point to the next line
ptr = line_end;
if (withNL)
ptr += strlen(NL);
return 0;
}
// Parses a line of DAN data fields (| separated). The provided 'fields' mapping
// will get filled with as many fields as are found in the line.
static int parse_dan_fields(
const char *NL,
std::map<unsigned, std::string> &fields,
char *&ptr)
{
std::vector<std::string> csv_fields;
if (parse_csv_line(ptr, NL, '|', csv_fields) < 0)
return -1;
if (csv_fields.size() > fields.size()) {
report_info("DEBUG: More DAN fields than expected");
return -1;
}
for (size_t i = 0; i < csv_fields.size(); i++) {
fields[i] = csv_fields[i];
}
return 0;
}
// Parses the DAN ZDH dive header.
static int parse_dan_zdh(const char *NL, struct xml_params *params, char *&ptr)
{
// Skip the leading 'ZDH'
ptr += 3;
std::string temp;
// Parse all fields - we only use a subset of them, but parse all for code maintain- and debugability.
enum ZDH_FIELD {
EXPORT_SEQUENCE,
INTERNAL_DIVE_SEQUENCE,
RECORD_TYPE,
RECORDING_INTERVAL,
LEAVE_SURFACE,
AIR_TEMPERATURE,
TANK_VOLUME,
O2_MODE,
REBREATHER_DILUENT_GAS,
ALTITUDE,
};
std::map<unsigned, std::string> fields = {
{EXPORT_SEQUENCE, ""},
{INTERNAL_DIVE_SEQUENCE, ""},
{RECORD_TYPE, ""},
{RECORDING_INTERVAL, ""},
{LEAVE_SURFACE, ""},
{AIR_TEMPERATURE, ""},
{TANK_VOLUME, ""},
{O2_MODE, ""},
{REBREATHER_DILUENT_GAS, ""},
{ALTITUDE, ""},
};
if (parse_dan_fields(NL, fields, ptr) < 0)
return -1;
// Add relevant fields to the XML parameters.
// Parse date. 'leaveSurface' should (per the spec) be provided in
// the format "YYYYMMDDHHMMSS", but old code used to allow for just parsing
// the date... so we'll do that here as well.
auto &leaveSurface = fields[LEAVE_SURFACE];
if (leaveSurface.length() >= 8) {
xml_params_add(params, "date", leaveSurface.substr(0, 8));
}
// Parse time with "1" prefix
if (leaveSurface.length() >= 14) {
std::string time_str = "1" + leaveSurface.substr(8, 6);
xml_params_add(params, "time", time_str);
}
xml_params_add(params, "airTemp", fields[AIR_TEMPERATURE]);
xml_params_add(params, "diveNro", fields[INTERNAL_DIVE_SEQUENCE]);
return 0;
}
// Parse the DAN ZDT dive trailer.
static int parse_dan_zdt(const char *NL, struct xml_params *params, char *&ptr)
{
// Skip the leading 'ZDT'
ptr += 3;
enum ZDT_FIELD {
EXPORT_SEQUENCE,
INTERNAL_DIVE_SEQUENCE,
MAX_DEPTH,
REACH_SURFACE,
MIN_WATER_TEMP,
PRESSURE_DROP,
};
std::map<unsigned, std::string> fields = {
{EXPORT_SEQUENCE, ""},
{INTERNAL_DIVE_SEQUENCE, ""},
{MAX_DEPTH, ""},
{REACH_SURFACE, ""},
{MIN_WATER_TEMP, ""},
{PRESSURE_DROP, ""},
};
if (parse_dan_fields(NL, fields, ptr) < 0)
return -1;
// Add relevant fields to the XML parameters.
xml_params_add(params, "waterTemp", fields[MIN_WATER_TEMP]);
return 0;
}
static int parse_dan_zdp(const char *NL, const char *filename, struct xml_params *params, char *&ptr, std::string &mem_csv)
{
if (strncmp(ptr, "ZDP{", 4) != 0)
return report_error("DEBUG: Failed to find start of ZDP");
if (ptr && ptr[4] == '}')
return report_error(translate("gettextFromC", "No dive profile found from '%s'"), filename);
ptr = parse_dan_new_line(ptr, NL);
if (!ptr)
return -1;
// We're now in the ZDP segment. Look for the end of it.
char *end_ptr = strstr(ptr, "ZDP}");
if (!end_ptr) {
return report_error("DEBUG: failed to find end of ZDP");
}
/* Copy the current dive data to start of mem_csv buffer */
mem_csv = std::string(ptr, end_ptr - ptr);
// Skip the trailing 'ZDP}' line.
ptr = end_ptr;
ptr = parse_dan_new_line(end_ptr, NL);
return 0;
}
static int parse_dan_format(const char *filename, struct xml_params *params, struct divelog *log)
{
int ret = 0, i;
size_t end_ptr = 0;
char tmpbuf[MAXCOLDIGITS];
int ret = 0;
int params_orig_size = xml_params_count(params);
char *ptr = NULL;
const char *NL = NULL;
char *iter = NULL;
auto [mem, err] = readfile(filename);
const char *end = mem.data() + mem.size();
if (err < 0)
return report_error(translate("gettextFromC", "Failed to read '%s'"), filename);
@ -132,136 +319,43 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
return -1;
}
while ((end_ptr < mem.size()) && (ptr = strstr(mem.data() + end_ptr, "ZDH"))) {
xml_params_resize(params, params_orig_size); // restart with original parameter block
char *iter_end = NULL;
iter = ptr + 4;
iter = strchr(iter, '|');
if (iter) {
memcpy(tmpbuf, ptr + 4, iter - ptr - 4);
tmpbuf[iter - ptr - 4] = 0;
xml_params_add(params, "diveNro", tmpbuf);
}
// Iteratively parse ZDH, ZDP and ZDT fields, which together comprise a list of dives.
while (ptr < end) {
xml_params_resize(params, params_orig_size); // Restart with original parameter block
//report_info("DEBUG: BEGIN end_ptr %d round %d <%s>", end_ptr, j++, ptr);
iter = ptr + 1;
for (i = 0; i <= 4 && iter; ++i) {
iter = strchr(iter, '|');
if (iter)
++iter;
}
if (!iter) {
report_info("DEBUG: Data corrupt");
return -1;
}
/* Setting date */
memcpy(tmpbuf, iter, 8);
tmpbuf[8] = 0;
xml_params_add(params, "date", tmpbuf);
/* Setting time, gotta prepend it with 1 to
* avoid octal parsing (this is stripped out in
* XSLT */
tmpbuf[0] = '1';
memcpy(tmpbuf + 1, iter + 8, 6);
tmpbuf[7] = 0;
xml_params_add(params, "time", tmpbuf);
/* Air temperature */
memset(tmpbuf, 0, sizeof(tmpbuf));
iter = strchr(iter, '|');
if (iter) {
iter = iter + 1;
iter_end = strchr(iter, '|');
if (iter_end) {
memcpy(tmpbuf, iter, iter_end - iter);
xml_params_add(params, "airTemp", tmpbuf);
}
}
/* Search for the next line */
if (iter)
iter = parse_dan_new_line(iter, NL);
if (!iter)
return -1;
/* We got a trailer, no samples on this dive */
if (strncmp(iter, "ZDT", 3) == 0) {
end_ptr = iter - mem.data();
/* Water temperature */
memset(tmpbuf, 0, sizeof(tmpbuf));
for (i = 0; i < 5 && iter; ++i)
iter = strchr(iter + 1, '|');
if (iter) {
iter = iter + 1;
iter_end = strchr(iter, '|');
if (iter_end) {
memcpy(tmpbuf, iter, iter_end - iter);
xml_params_add(params, "waterTemp", tmpbuf);
}
}
ret |= parse_xml_buffer(filename, "<csv></csv>", 11, log, params);
continue;
}
/* After ZDH we should get either ZDT (above) or ZDP */
if (strncmp(iter, "ZDP{", 4) != 0) {
report_info("DEBUG: Input appears to violate DL7 specification");
end_ptr = iter - mem.data();
continue;
}
if (ptr && ptr[4] == '}')
return report_error(translate("gettextFromC", "No dive profile found from '%s'"), filename);
if (ptr)
// Locate the ZDH header.
while (strncmp(ptr, "ZDH", 3) != 0) {
ptr = parse_dan_new_line(ptr, NL);
if (!ptr)
return -1;
return report_error("Expected ZDH header not found");
}
end_ptr = ptr - mem.data();
if (int ret = parse_dan_zdh(NL, params, ptr); ret < 0)
return ret;
/* Copy the current dive data to start of mem_csv buffer */
std::string mem_csv(ptr, mem.size() - (ptr - mem.data()));
// Attempt to parse the ZDP field (optional)
std::string mem_csv;
if (strncmp(ptr, "ZDP", 3) == 0) {
if (int ret = parse_dan_zdp(NL, filename, params, ptr, mem_csv); ret < 0)
return ret;
}
ptr = strstr(mem_csv.data(), "ZDP}");
if (ptr) {
*ptr = 0;
// Parse the mandatorty ZDT field
if (strncmp(ptr, "ZDT", 3) == 0) {
if (int ret = parse_dan_zdt(NL, params, ptr); ret < 0)
return ret;
} else {
report_info("DEBUG: failed to find end ZDP");
return -1;
}
mem_csv.resize(ptr - mem_csv.data());
end_ptr += ptr - mem_csv.data();
iter = parse_dan_new_line(ptr + 1, NL);
if (iter && strncmp(iter, "ZDT", 3) == 0) {
/* Water temperature */
memset(tmpbuf, 0, sizeof(tmpbuf));
for (i = 0; i < 5 && iter; ++i)
iter = strchr(iter + 1, '|');
if (iter) {
iter = iter + 1;
iter_end = strchr(iter, '|');
if (iter_end) {
memcpy(tmpbuf, iter, iter_end - iter);
xml_params_add(params, "waterTemp", tmpbuf);
}
}
return report_error("Expected ZDT trailer not found");
}
if (mem_csv.empty()) {
mem_csv = "<csv></csv>";
} else {
if (try_to_xslt_open_csv(filename, mem_csv, "csv"))
return -1;
}
ret |= parse_xml_buffer(filename, mem_csv.data(), mem_csv.size(), log, params);
}

View file

@ -18,7 +18,12 @@ void xml_params_resize(struct xml_params *params, int count)
void xml_params_add(struct xml_params *params, const char *key, const char *value)
{
params->items.push_back({ std::string(key), std::string(value) });
xml_params_add(params, std::string(key), std::string(value));
}
void xml_params_add(struct xml_params *params, const std::string &key, const std::string &value)
{
params->items.push_back({key, value});
}
void xml_params_add_int(struct xml_params *params, const char *key, int value)

View file

@ -18,6 +18,7 @@ extern struct xml_params *alloc_xml_params();
extern void free_xml_params(struct xml_params *params);
extern void xml_params_resize(struct xml_params *params, int count);
extern void xml_params_add(struct xml_params *params, const char *key, const char *value);
extern void xml_params_add(struct xml_params *params, const std::string &key, const std::string &value);
extern void xml_params_add_int(struct xml_params *params, const char *key, int value);
extern int xml_params_count(const struct xml_params *params);
extern const char *xml_params_get_key(const struct xml_params *params, int idx); // not stable

View file

@ -9,6 +9,6 @@ ZDP{
|3300|10|||||
|3600|0|||||
ZDP}
ZDT|2|2|10.0|20180102110000|25||
ZDH|3|3|I|QS|20180103101000|28|11|FO2|||
ZDT|3|3|10.0|20180103102000|26||
ZDT|1|2|10.0|20180102110000|25||
ZDH|1|3|I|QS|20180103101000|28|11|FO2|||
ZDT|1|3|10.0|20180103102000|26||

View file

@ -13,9 +13,17 @@ set -e
mkdir -p win32
cd win32
BUILD_EXTRA_ARGS="debug"
SMTK2SSRF_EXTRA_ARGS="-b debug"
if [[ "$1" == "-release" ]]; then
BUILD_EXTRA_ARGS=""
SMTK2SSRF_BUILD_EXTRA_ARGS="-b release"
shift
fi
# build Subsurface
export MXEBUILDTYPE=x86_64-w64-mingw32.shared
bash -ex ../subsurface/packaging/windows/mxe-based-build.sh installer
bash -ex ../subsurface/packaging/windows/mxe-based-build.sh ${BUILD_EXTRA_ARGS} installer
# the strange two step move is in order to get predictable names to use
# in the publish step of the GitHub Action
@ -27,7 +35,7 @@ mv subsurface/"$fullname" ${OUTPUT_DIR}/"${fullname%.exe}-installer.exe"
bash -ex ../subsurface/packaging/windows/mxe-based-build.sh -noftdi -nolibraw subsurface
bash -ex ../subsurface/packaging/windows/smtk2ssrf-mxe-build.sh -a -i
bash -ex ../subsurface/packaging/windows/smtk2ssrf-mxe-build.sh ${SMTK2SSRF_BUILD_EXTRA_ARGS} -a -i
# the strange two step move is in order to get predictable names to use
# in the publish step of the GitHub Action

View file

@ -131,7 +131,6 @@ if [[ "$1" == "debug" ]] ; then
RELEASE="Debug"
RELEASE_MAIN="Debug"
RELEASE_GM="debug"
DLL_SUFFIX="d"
shift
if [[ -f Release ]] ; then
rm -rf *
@ -141,7 +140,6 @@ else
RELEASE="Release"
RELEASE_MAIN="RelWithDebInfo"
RELEASE_GM="release"
DLL_SUFFIX=""
if [[ -f Debug ]] ; then
rm -rf *
fi
@ -281,9 +279,9 @@ done
# for some reason we aren't installing Qt5Xml.dll and Qt5Location.dll
# I need to figure out why and fix that, but for now just manually copy that as well
EXTRA_MANUAL_DEPENDENCIES="$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5Xml$DLL_SUFFIX.dll \
$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5Location$DLL_SUFFIX.dll \
$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5QmlWorkerScript$DLL_SUFFIX.dll"
EXTRA_MANUAL_DEPENDENCIES="$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5Xml.dll \
$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5Location.dll \
$BASEDIR/"$MXEDIR"/usr/"$MXEBUILDTYPE"/qt5/bin/Qt5QmlWorkerScript.dll"
for f in $EXTRA_MANUAL_DEPENDENCIES
do

View file

@ -124,12 +124,10 @@ else
fi
case "$RELEASE" in
debug|Debug) RELEASE=Debug
DLL_SUFFIX="d"
[[ -f Release ]] && rm -rf ./*
touch Debug
;;
release|Release) RELEASE=Release
DLL_SUFFIX=""
[[ -f Debug ]] && rm -rf ./*
touch Release
;;
@ -175,7 +173,7 @@ $BASEDIR/mxe/usr/x86_64-w64-mingw32.shared/qt5/plugins/platforms"
# This comes from subsurface's mxe-based-build.sh. I'm not sure it is necessary
# but, well, it doesn't hurt.
EXTRA_MANUAL_DEPENDENCIES="$BASEDIR/mxe/usr/x86_64-w64-mingw32.shared/qt5/bin/Qt5Xml$DLL_SUFFIX.dll"
EXTRA_MANUAL_DEPENDENCIES="$BASEDIR/mxe/usr/x86_64-w64-mingw32.shared/qt5/bin/Qt5Xml.dll"
STAGING_DIR=$BUILDDIR/smtk-import/staging

View file

@ -5,12 +5,12 @@
#include "core/divelog.h"
#include "core/divesite.h"
#include "core/errorhelper.h"
#include "core/trip.h"
#include "core/file.h"
#include "core/import-csv.h"
#include "core/parse.h"
#include "core/qthelper.h"
#include "core/subsurface-string.h"
#include "core/trip.h"
#include "core/xmlparams.h"
#include <QTextStream>
@ -224,7 +224,8 @@ void TestParse::testParseNewFormat()
"/dives/")
.append(files.at(i))
.toLatin1()
.data(), &divelog),
.data(),
&divelog),
0);
QCOMPARE(divelog.dives.size(), i + 1);
}
@ -452,7 +453,11 @@ void TestParse::parseDL7()
QCOMPARE(parse_csv_file(SUBSURFACE_TEST_DATA "/dives/DL7.zxu",
&params, "DL7", &divelog),
0);
QCOMPARE(divelog.dives.size(), 3);
QCOMPARE(divelog.dives[0]->number, 1);
QCOMPARE(divelog.dives[1]->number, 2);
QCOMPARE(divelog.dives[2]->number, 3);
QCOMPARE(save_dives("./testdl7out.ssrf"), 0);
FILE_COMPARE("./testdl7out.ssrf",