Skip to content

Commit

Permalink
Remove final remnants of 'exported' (#1146)
Browse files Browse the repository at this point in the history
* Remove final remnants of 'exported'

* incorporate review comments in duplicate filter,

and remove deleted exported keyword from unicsv doc.

---------

Co-authored-by: Robert Lipe <[email protected]>
  • Loading branch information
tsteven4 and robertlipe authored Sep 25, 2023
1 parent dfc770a commit bdcb6b1
Show file tree
Hide file tree
Showing 11 changed files with 2 additions and 97 deletions.
47 changes: 2 additions & 45 deletions duplicate.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,52 +21,16 @@

#include "duplicate.h"

#include <algorithm> // for stable_sort

#include <QDateTime> // for QDateTime
#include <QList> // for QList, QList<>::iterator, QList<>::const_iterator
#include <QMultiHash> // for QMultiHash
#include <QtCore> // for qAsConst

#include "defs.h"
#include "geocache.h" // for Geocache
#include "src/core/datetime.h" // for DateTime


#if FILTERS_ENABLED

#define MYNAME "duplicate"
/*
It looks odd that we have different comparisons for date and index.
If exported if a < b return 1
if index if a < b return -1
The reason is that we want to sort in reverse order by date, but forward
order by index. So if we have four records:
date index
June 24 0
June 25 1
June 25 2
June 24 3
we want to sort them like this:
date index
June 25 1
June 25 2
June 24 0
June 24 3
Thus, the first point we come across is the latest point, but if we
have two points with the same export date/time, we will first see the
one with the smaller index (i.e. the first of those two points that we
came across while importing waypoints.)
In the (common) case that we have no exported dates, the dates will all
be zero so the sort will end up being an expensive no-op. However, the
complexity of this filter is dominated by other concerns.
*/

void DuplicateFilter::init()
{
Expand All @@ -77,15 +41,8 @@ void DuplicateFilter::init()

void DuplicateFilter::process()
{
auto wptlist = *global_waypoint_list;

auto compare_lambda = [](const Waypoint* wa, const Waypoint* wb)->bool {
return wa->gc_data->exported > wb->gc_data->exported;
};
std::stable_sort(wptlist.begin(), wptlist.end(), compare_lambda);

QMultiHash<QString, Waypoint*> wpthash;
for (Waypoint* waypointp : wptlist) {
for (Waypoint* waypointp : qAsConst(*global_waypoint_list)) {

QString key;
if (lcopt) {
Expand Down
1 change: 0 additions & 1 deletion geocache.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ class Geocache
status_t is_available:2;
status_t is_memberonly:2;
status_t has_customcoords:2;
gpsbabel::DateTime exported;
gpsbabel::DateTime last_found;
QString placer; /* Placer name */
int placer_id; /* Placer id */
Expand Down
5 changes: 0 additions & 5 deletions gpx.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1080,11 +1080,6 @@ GpxFormat::fprint_xml_chain(XmlTag* tag, const Waypoint* wpt) const
if (tag->child) {
fprint_xml_chain(tag->child, wpt);
}
if (wpt && wpt->gc_data->exported.isValid() &&
tag->tagname.compare(u"groundspeak:cache") == 0) {
writer->writeTextElement(QStringLiteral("time"),
wpt->gc_data->exported.toPrettyString());
}
writer->writeEndElement();
}
if (!tag->parentcdata.isEmpty()) {
Expand Down
5 changes: 0 additions & 5 deletions reference/duplicate_exported_1.csv

This file was deleted.

3 changes: 0 additions & 3 deletions reference/duplicate_exported_1~csv.csv

This file was deleted.

5 changes: 0 additions & 5 deletions reference/duplicate_exported_2.csv

This file was deleted.

3 changes: 0 additions & 3 deletions reference/duplicate_exported_2~csv.csv

This file was deleted.

5 changes: 0 additions & 5 deletions testo.d/duplicate.test
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,3 @@ gpsbabel -i geo -f ${REFERENCE}/geocaching.loc -o csv -F ${TMPDIR}/filterdupe.cs
gpsbabel -i geo -f ${REFERENCE}/geocaching.loc -f ${REFERENCE}/geocaching.loc -x duplicate,shortname \
-o csv -F ${TMPDIR}/filterdupe.csv2
sort_and_compare ${TMPDIR}/filterdupe.csv1 ${TMPDIR}/filterdupe.csv2

gpsbabel -i unicsv,utc -f ${REFERENCE}/duplicate_exported_1.csv -x duplicate,location -o unicsv,utc -F ${TMPDIR}/duplicate_exported_1~csv.csv
compare ${REFERENCE}/duplicate_exported_1~csv.csv ${TMPDIR}/duplicate_exported_1~csv.csv
gpsbabel -i unicsv,utc -f ${REFERENCE}/duplicate_exported_2.csv -x duplicate,location -o unicsv,utc -F ${TMPDIR}/duplicate_exported_2~csv.csv
compare ${REFERENCE}/duplicate_exported_2~csv.csv ${TMPDIR}/duplicate_exported_2~csv.csv
23 changes: 0 additions & 23 deletions unicsv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@ const UnicsvFormat::field_t UnicsvFormat::fields_def[] = {
{ "diff", fld_gc_diff, kStrAny },
{ "arch", fld_gc_is_archived, kStrAny },
{ "avail", fld_gc_is_available, kStrAny },
{ "exported", fld_gc_exported, kStrAny },
{ "found", fld_gc_last_found, kStrAny },
{ "placer_id", fld_gc_placer_id, kStrAny },
{ "placer", fld_gc_placer, kStrAny },
Expand Down Expand Up @@ -869,7 +868,6 @@ UnicsvFormat::unicsv_parse_one_line(const QString& ibuf)
case fld_gc_diff:
case fld_gc_is_archived:
case fld_gc_is_available:
case fld_gc_exported:
case fld_gc_last_found:
case fld_gc_placer:
case fld_gc_placer_id:
Expand Down Expand Up @@ -907,14 +905,6 @@ UnicsvFormat::unicsv_parse_one_line(const QString& ibuf)
case fld_gc_is_available:
gc_data->is_available = unicsv_parse_status(value);
break;
case fld_gc_exported: {
QTime etime;
QDate edate;
etime = unicsv_parse_time(value, edate);
if (edate.isValid() || etime.isValid()) {
gc_data->exported = unicsv_adjust_time(edate, etime, true);
}
}
break;
case fld_gc_last_found: {
QTime ftime;
Expand Down Expand Up @@ -1251,9 +1241,6 @@ UnicsvFormat::unicsv_waypt_enum_cb(const Waypoint* wpt)
if (gc_data->is_available != Geocache::status_t::gs_unknown) {
unicsv_outp_flags[fld_gc_is_available] = true;
}
if (gc_data->exported.isValid()) {
unicsv_outp_flags[fld_gc_exported] = true;
}
if (gc_data->last_found.isValid()) {
unicsv_outp_flags[fld_gc_last_found] = true;
}
Expand Down Expand Up @@ -1621,13 +1608,6 @@ UnicsvFormat::unicsv_waypt_disp_cb(const Waypoint* wpt)
*fout << unicsv_fieldsep;
}
}
if (unicsv_outp_flags[fld_gc_exported]) {
if (gc_data) {
unicsv_print_date_time(gc_data->exported);
} else {
*fout << unicsv_fieldsep;
}
}
if (unicsv_outp_flags[fld_gc_last_found]) {
if (gc_data) {
unicsv_print_date_time(gc_data->last_found);
Expand Down Expand Up @@ -1900,9 +1880,6 @@ UnicsvFormat::write()
if (unicsv_outp_flags[fld_gc_is_available]) {
*fout << unicsv_fieldsep << "Available";
}
if (unicsv_outp_flags[fld_gc_exported]) {
*fout << unicsv_fieldsep << "Exported";
}
if (unicsv_outp_flags[fld_gc_last_found]) {
*fout << unicsv_fieldsep << "Last Found";
}
Expand Down
1 change: 0 additions & 1 deletion unicsv.h
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,6 @@ class UnicsvFormat : public Format
fld_gc_diff,
fld_gc_is_archived,
fld_gc_is_available,
fld_gc_exported,
fld_gc_last_found,
fld_gc_placer,
fld_gc_placer_id,
Expand Down
1 change: 0 additions & 1 deletion xmldoc/formats/unicsv.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
diff = <link linkend="style_def_geodiff">Geocache difficulty</link>
ele = Elevation (in meters). For feet use "ele ft", "eleft", "ele feet", or "elefeet".
e/w = 'e' for eastern hemisphere, 'w' for western
exported = Geocache export date
found = <link linkend="style_def_geofound">Geocache last found date</link>
fix = 3d, 2d, etc.
gcid = Geocache cache id. This accepts GC-ID ("575006") and GC-Code ("GC1234G").
Expand Down

0 comments on commit bdcb6b1

Please sign in to comment.