fixed a few forgotten log statements

This commit is contained in:
Jean-Francois Dockes 2016-07-13 07:47:56 +02:00
parent 5af2d79604
commit 8f77b987ca
4 changed files with 38 additions and 44 deletions

View File

@ -135,7 +135,7 @@ static HANDLE eWorkFinished = INVALID_HANDLE_VALUE;
static BOOL WINAPI CtrlHandler(DWORD fdwCtrlType)
{
LOGDEB(("CtrlHandler\n"));
LOGDEB("CtrlHandler\n" );
if (l_sigcleanup == 0)
return FALSE;
@ -147,12 +147,12 @@ static BOOL WINAPI CtrlHandler(DWORD fdwCtrlType)
case CTRL_SHUTDOWN_EVENT:
{
l_sigcleanup(SIGINT);
LOGDEB0(("CtrlHandler: waiting for exit ready\n"));
LOGDEB0("CtrlHandler: waiting for exit ready\n" );
DWORD res = WaitForSingleObject(eWorkFinished, INFINITE);
if (res != WAIT_OBJECT_0) {
LOGERR(("CtrlHandler: exit ack wait failed\n"));
LOGERR("CtrlHandler: exit ack wait failed\n" );
}
LOGDEB0(("CtrlHandler: got exit ready event, exiting\n"));
LOGDEB0("CtrlHandler: got exit ready event, exiting\n" );
return TRUE;
}
default:
@ -170,12 +170,12 @@ LRESULT CALLBACK MainWndProc(HWND hwnd , UINT msg , WPARAM wParam,
case WM_CLOSE:
{
l_sigcleanup(SIGINT);
LOGDEB(("MainWndProc: got end message, waiting for work finished\n"));
LOGDEB("MainWndProc: got end message, waiting for work finished\n" );
DWORD res = WaitForSingleObject(eWorkFinished, INFINITE);
if (res != WAIT_OBJECT_0) {
LOGERR(("MainWndProc: exit ack wait failed\n"));
LOGERR("MainWndProc: exit ack wait failed\n" );
}
LOGDEB(("MainWindowProc: got exit ready event, exiting\n"));
LOGDEB("MainWindowProc: got exit ready event, exiting\n" );
return TRUE;
}
default:
@ -235,14 +235,14 @@ void initAsyncSigs(void (*sigcleanup)(int))
SetConsoleCtrlHandler((PHANDLER_ROUTINE)CtrlHandler, TRUE);
eWorkFinished = CreateEvent(NULL, TRUE, FALSE, NULL);
if (eWorkFinished == INVALID_HANDLE_VALUE) {
LOGERR(("initAsyncSigs: error creating exitready event\n"));
LOGERR("initAsyncSigs: error creating exitready event\n" );
}
}
void recoll_exitready()
{
LOGDEB(("recoll_exitready()\n"));
LOGDEB("recoll_exitready()\n" );
if (!SetEvent(eWorkFinished)) {
LOGERR(("recoll_exitready: SetEvent failed\n"));
LOGERR("recoll_exitready: SetEvent failed\n" );
}
}
@ -340,18 +340,17 @@ RclConfig *recollinit(RclInitFlags flags,
bool novfork;
config->getConfParam("novfork", &novfork);
if (novfork) {
LOGDEB0(("rclinit: will use fork() for starting commands\n"));
LOGDEB0("rclinit: will use fork() for starting commands\n" );
ExecCmd::useVfork(false);
} else {
LOGDEB0(("rclinit: will use vfork() for starting commands\n"));
LOGDEB0("rclinit: will use vfork() for starting commands\n" );
ExecCmd::useVfork(true);
}
#endif
int flushmb;
if (config->getConfParam("idxflushmb", &flushmb) && flushmb > 0) {
LOGDEB1(("rclinit: idxflushmb=%d, set XAPIAN_FLUSH_THRESHOLD to 10E6\n",
flushmb));
LOGDEB1("rclinit: idxflushmb=" << (flushmb) << ", set XAPIAN_FLUSH_THRESHOLD to 10E6\n" );
static const char *cp = "XAPIAN_FLUSH_THRESHOLD=1000000";
#ifdef PUTENV_ARG_CONST
::putenv(cp);
@ -390,3 +389,4 @@ bool recoll_ismainthread()
return std::this_thread::get_id() == mainthread_id;
}

View File

@ -55,8 +55,7 @@ public:
{
string out;
unacmaybefold(in, out, "UTF-8", m_op);
LOGDEB2(("SynTermTransUnac(%d): in [%s] out [%s]\n", int(m_op),
in.c_str(), out.c_str()));
LOGDEB2("SynTermTransUnac(" << (int(m_op)) << "): in [" << (in) << "] out [" << (out) << "]\n" );
return out;
}
UnacOp m_op;
@ -68,3 +67,4 @@ extern bool createExpansionDbs(Xapian::WritableDatabase& wdb,
}
#endif /* _EXPANSIONDBS_H_INCLUDED_ */

View File

@ -127,8 +127,7 @@ void Query::Native::setDbWideQTermsFreqs()
for (vector<string>::const_iterator qit = qterms.begin();
qit != qterms.end(); qit++) {
termfreqs[*qit] = xrdb.get_termfreq(*qit) / doccnt;
LOGABS(("setDbWideQTermFreqs: [%s] db freq %.1e\n", qit->c_str(),
termfreqs[*qit]));
LOGABS("setDbWideQTermFreqs: [" << (qit) << "] db freq " << (termfreqs[*qit]) << "\n" );
}
}
@ -147,7 +146,7 @@ double Query::Native::qualityTerms(Xapian::docid docid,
const vector<string>& terms,
multimap<double, vector<string> >& byQ)
{
LOGABS(("qualityTerms\n"));
LOGABS("qualityTerms\n" );
setDbWideQTermsFreqs();
map<string, double> termQcoefs;
@ -166,7 +165,7 @@ double Query::Native::qualityTerms(Xapian::docid docid,
{
string deb;
hld.toString(deb);
LOGABS(("qualityTerms: hld: %s\n", deb.c_str()));
LOGABS("qualityTerms: hld: " << (deb) << "\n" );
}
#endif
@ -195,7 +194,7 @@ double Query::Native::qualityTerms(Xapian::docid docid,
}
byRootstr.append("\n");
}
LOGABS(("\nqualityTerms: uterms to terms: %s\n", byRootstr.c_str()));
LOGABS("\nqualityTerms: uterms to terms: " << (byRootstr) << "\n" );
}
#endif
@ -243,10 +242,10 @@ double Query::Native::qualityTerms(Xapian::docid docid,
#ifdef DEBUGABSTRACT
for (multimap<double, vector<string> >::reverse_iterator mit= byQ.rbegin();
mit != byQ.rend(); mit++) {
LOGABS(("qualityTerms: group\n"));
LOGABS("qualityTerms: group\n" );
for (vector<string>::const_iterator qit = mit->second.begin();
qit != mit->second.end(); qit++) {
LOGABS(("%.1e->[%s]\n", mit->first, qit->c_str()));
LOGABS("" << (mit->first) << "->[" << (qit) << "]\n" );
}
}
#endif
@ -315,8 +314,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
int imaxoccs, int ictxwords)
{
Chrono chron;
LOGABS(("makeAbstract: docid %ld imaxoccs %d ictxwords %d\n",
long(docid), imaxoccs, ictxwords));
LOGABS("makeAbstract: docid " << (long(docid)) << " imaxoccs " << (imaxoccs) << " ictxwords " << (ictxwords) << "\n" );
// The (unprefixed) terms matched by this document
vector<string> matchedTerms;
@ -339,7 +337,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
// aggregated by the qualityTerms() routine.
multimap<double, vector<string> > byQ;
double totalweight = qualityTerms(docid, matchedTerms, byQ);
LOGABS(("makeAbstract:%d: computed Qcoefs.\n", chron.ms()));
LOGABS("makeAbstract:" << (chron.ms()) << ": computed Qcoefs.\n" );
// This can't happen, but would crash us
if (totalweight == 0.0) {
LOGERR("makeAbstract: totalweight == 0.0 !\n" );
@ -376,8 +374,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
const unsigned int maxtotaloccs = imaxoccs > 0 ? imaxoccs :
m_q->m_db->getAbsLen() /(7 * (m_q->m_db->getAbsCtxLen() + 1));
int ctxwords = ictxwords == -1 ? m_q->m_db->getAbsCtxLen() : ictxwords;
LOGABS(("makeAbstract:%d: mxttloccs %d ctxwords %d\n",
chron.ms(), maxtotaloccs, ctxwords));
LOGABS("makeAbstract:" << (chron.ms()) << ": mxttloccs " << (maxtotaloccs) << " ctxwords " << (ctxwords) << "\n" );
int ret = ABSRES_OK;
@ -405,8 +402,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
string qterm = *qit;
LOGABS(("makeAbstract: [%s] %d max grp occs (coef %.2f)\n",
qterm.c_str(), maxgrpoccs, q));
LOGABS("makeAbstract: [" << (qterm) << "] " << (maxgrpoccs) << " max grp occs (coef " << (q) << ")\n" );
// The match term may span several words
int qtrmwrdcnt =
@ -425,8 +421,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
int ipos = *pos;
if (ipos < int(baseTextPosition)) // Not in text body
continue;
LOGABS(("makeAbstract: [%s] at pos %d grpoccs %d maxgrpoccs"
" %d\n", qterm.c_str(), ipos, grpoccs, maxgrpoccs));
LOGABS("makeAbstract: [" << (qterm) << "] at pos " << (ipos) << " grpoccs " << (grpoccs) << " maxgrpoccs " << (maxgrpoccs) << "\n" );
totaloccs++;
grpoccs++;
@ -466,13 +461,13 @@ int Query::Native::makeAbstract(Xapian::docid docid,
// Group done ?
if (grpoccs >= maxgrpoccs) {
ret |= ABSRES_TRUNC;
LOGABS(("Db::makeAbstract: max group occs cutoff\n"));
LOGABS("Db::makeAbstract: max group occs cutoff\n" );
break;
}
// Global done ?
if (totaloccs >= maxtotaloccs) {
ret |= ABSRES_TRUNC;
LOGABS(("Db::makeAbstract: max occurrences cutoff\n"));
LOGABS("Db::makeAbstract: max occurrences cutoff\n" );
break;
}
}
@ -482,15 +477,14 @@ int Query::Native::makeAbstract(Xapian::docid docid,
if (totaloccs >= maxtotaloccs) {
ret |= ABSRES_TRUNC;
LOGABS(("Db::makeAbstract: max1 occurrences cutoff\n"));
LOGABS("Db::makeAbstract: max1 occurrences cutoff\n" );
break;
}
}
}
maxpos += ctxwords + 1;
LOGABS(("makeAbstract:%d:chosen number of positions %d\n",
chron.millis(), totaloccs));
LOGABS("makeAbstract:" << (chron.millis()) << ":chosen number of positions " << (totaloccs) << "\n" );
// This can happen if there are term occurences in the keywords
// etc. but not elsewhere ?
if (totaloccs == 0) {
@ -566,8 +560,7 @@ int Query::Native::makeAbstract(Xapian::docid docid,
vector<int> vpbreaks;
ndb->getPagePositions(docid, vpbreaks);
LOGABS(("makeAbstract:%d: extracting. Got %u pages\n", chron.millis(),
vpbreaks.size()));
LOGABS("makeAbstract:" << (chron.millis()) << ": extracting. Got " << (vpbreaks.size()) << " pages\n" );
// Finally build the abstract by walking the map (in order of position)
vabs.clear();
string chunk;
@ -615,3 +608,5 @@ int Query::Native::makeAbstract(Xapian::docid docid,
}

View File

@ -129,15 +129,14 @@ public:
m_totalterms++;
string otrm;
if (!unacmaybefold(itrm, otrm, "UTF-8", UNACOP_UNACFOLD)) {
LOGDEB(("splitter::takeword: unac [%s] failed\n", itrm.c_str()));
LOGDEB("splitter::takeword: unac [" << (itrm) << "] failed\n" );
m_unacerrors++;
// We don't generate a fatal error because of a bad term,
// but one has to put the limit somewhere
if (m_unacerrors > 500 &&
(double(m_totalterms) / double(m_unacerrors)) < 2.0) {
// More than 1 error for every other term
LOGERR(("splitter::takeword: too many unac errors %d/%d\n",
m_unacerrors, m_totalterms));
LOGERR("splitter::takeword: too many unac errors " << (m_unacerrors) << "/" << (m_totalterms) << "\n" );
return false;
}
return true;
@ -227,8 +226,7 @@ public:
virtual bool takeword(const string& term, int pos, int bs, int be)
{
LOGDEB1(("TermProcCom::takeword: pos %d %d %d [%s]\n",
pos, bs, be, term.c_str()));
LOGDEB1("TermProcCom::takeword: pos " << (pos) << " " << (bs) << " " << (be) << " [" << (term) << "]\n" );
bool isstop = m_stops.isStop(term);
bool twogramemit = false;
@ -300,3 +298,4 @@ private:
} // End namespace Rcl
#endif /* _TERMPROC_H_INCLUDED_ */