Generate const keyword_hash maps at compile time

Or else the map gets pointlessly recreated during footprint loading thousands of times
This commit is contained in:
Marek Roszko 2022-08-09 00:45:37 -04:00
parent 70cc6e780d
commit f695ae5522
5 changed files with 54 additions and 33 deletions

View File

@ -238,6 +238,7 @@ class ${LEXERCLASS} : public DSNLEXER
{
/// Auto generated lexer keywords table and length:
static const KEYWORD keywords[];
static const KEYWORD_MAP keywords_hash;
static const unsigned keyword_count;
public:
@ -248,7 +249,7 @@ public:
* If left empty, then _(\"clipboard\") is used.
*/
${LEXERCLASS}( const std::string& aSExpression, const wxString& aSource = wxEmptyString ) :
DSNLEXER( keywords, keyword_count, aSExpression, aSource )
DSNLEXER( keywords, keyword_count, &keywords_hash, aSExpression, aSource )
{
}
@ -262,7 +263,7 @@ public:
* @param aFilename is the name of the opened file, needed for error reporting.
*/
${LEXERCLASS}( FILE* aFile, const wxString& aFilename ) :
DSNLEXER( keywords, keyword_count, aFile, aFilename )
DSNLEXER( keywords, keyword_count, &keywords_hash, aFile, aFilename )
{
}
@ -278,7 +279,7 @@ public:
* STRING_LINE_READER or FILE_LINE_READER. No ownership is taken of aLineReader.
*/
${LEXERCLASS}( LINE_READER* aLineReader ) :
DSNLEXER( keywords, keyword_count, aLineReader )
DSNLEXER( keywords, keyword_count, &keywords_hash, aLineReader )
{
}
@ -394,3 +395,30 @@ const char* ${LEXERCLASS}::TokenName( T aTok )
}
"
)
file( APPEND "${outCppFile}"
"
const KEYWORD_MAP ${LEXERCLASS}::keywords_hash({
"
)
set( TOKEN_NUM 0 )
math( EXPR tokensAfter "${tokensAfter} - 1" )
foreach( token ${tokens} )
file(APPEND "${outCppFile}" " { \"${token}\", ${TOKEN_NUM} }" )
if( TOKEN_NUM EQUAL tokensAfter )
file( APPEND "${outCppFile}" "\n" )
else( TOKEN_NUM EQUAL tokensAfter )
file( APPEND "${outCppFile}" ",\n" )
endif()
math( EXPR TOKEN_NUM "${TOKEN_NUM} + 1" )
endforeach()
file( APPEND "${outCppFile}"
"});")

View File

@ -49,27 +49,11 @@ void DSNLEXER::init()
commentsAreTokens = false;
curOffset = 0;
#if 1
if( keywordCount > 11 )
{
// resize the hashtable bucket count
keyword_hash.reserve( keywordCount );
}
// fill the specialized "C string" hashtable from keywords[]
const KEYWORD* it = keywords;
const KEYWORD* end = it + keywordCount;
for( ; it < end; ++it )
{
keyword_hash[it->name] = it->token;
}
#endif
}
DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
const KEYWORD_MAP* aKeywordMap,
FILE* aFile, const wxString& aFilename ) :
iOwnReaders( true ),
start( nullptr ),
@ -77,7 +61,8 @@ DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
limit( nullptr ),
reader( nullptr ),
keywords( aKeywordTable ),
keywordCount( aKeywordCount )
keywordCount( aKeywordCount ),
keywordsLookup( aKeywordMap )
{
FILE_LINE_READER* fileReader = new FILE_LINE_READER( aFile, aFilename );
PushReader( fileReader );
@ -86,6 +71,7 @@ DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
const KEYWORD_MAP* aKeywordMap,
const std::string& aClipboardTxt, const wxString& aSource ) :
iOwnReaders( true ),
start( nullptr ),
@ -93,7 +79,8 @@ DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
limit( nullptr ),
reader( nullptr ),
keywords( aKeywordTable ),
keywordCount( aKeywordCount )
keywordCount( aKeywordCount ),
keywordsLookup( aKeywordMap )
{
STRING_LINE_READER* stringReader = new STRING_LINE_READER( aClipboardTxt, aSource.IsEmpty() ?
wxString( FMT_CLIPBOARD ) : aSource );
@ -103,6 +90,7 @@ DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
const KEYWORD_MAP* aKeywordMap,
LINE_READER* aLineReader ) :
iOwnReaders( false ),
start( nullptr ),
@ -110,7 +98,8 @@ DSNLEXER::DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
limit( nullptr ),
reader( nullptr ),
keywords( aKeywordTable ),
keywordCount( aKeywordCount )
keywordCount( aKeywordCount ),
keywordsLookup( aKeywordMap )
{
if( aLineReader )
PushReader( aLineReader );
@ -127,7 +116,8 @@ DSNLEXER::DSNLEXER( const std::string& aSExpression, const wxString& aSource ) :
limit( nullptr ),
reader( nullptr ),
keywords( empty_keywords ),
keywordCount( 0 )
keywordCount( 0 ),
keywordsLookup( nullptr )
{
STRING_LINE_READER* stringReader = new STRING_LINE_READER( aSExpression, aSource.IsEmpty() ?
wxString( FMT_CLIPBOARD ) : aSource );
@ -239,10 +229,13 @@ LINE_READER* DSNLEXER::PopReader()
int DSNLEXER::findToken( const std::string& tok ) const
{
KEYWORD_MAP::const_iterator it = keyword_hash.find( tok.c_str() );
if( keywordsLookup != nullptr )
{
KEYWORD_MAP::const_iterator it = keywordsLookup->find( tok.c_str() );
if( it != keyword_hash.end() )
return it->second;
if( it != keywordsLookup->end() )
return it->second;
}
return DSN_SYMBOL; // not a keyword, some arbitrary symbol.
}

View File

@ -2341,7 +2341,7 @@ XNODE* CADSTAR_ARCHIVE_PARSER::LoadArchiveFile( const wxString& aFileName,
long fileSize = ftell( fp );
rewind( fp );
DSNLEXER lexer( emptyKeywords, 0, fp, aFileName );
DSNLEXER lexer( emptyKeywords, 0, nullptr, fp, aFileName );
auto currentProgress = [&]() -> double
{

View File

@ -89,7 +89,7 @@ public:
* @param aFile is an open file, which will be closed when this is destructed.
* @param aFileName is the name of the file
*/
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount, const KEYWORD_MAP* aKeywordMap,
FILE* aFile, const wxString& aFileName );
/**
@ -101,7 +101,7 @@ public:
* @param aSExpression is text to feed through a STRING_LINE_READER
* @param aSource is a description of aSExpression, used for error reporting.
*/
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount, const KEYWORD_MAP* aKeywordMap,
const std::string& aSExpression, const wxString& aSource = wxEmptyString );
/**
@ -126,7 +126,7 @@ public:
* @param aLineReader is any subclassed instance of LINE_READER, such as
* #STRING_LINE_READER or #FILE_LINE_READER. No ownership is taken.
*/
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount,
DSNLEXER( const KEYWORD* aKeywordTable, unsigned aKeywordCount, const KEYWORD_MAP* aKeywordMap,
LINE_READER* aLineReader = nullptr );
virtual ~DSNLEXER();
@ -530,7 +530,7 @@ protected:
const KEYWORD* keywords; ///< table sorted by CMake for bsearch()
unsigned keywordCount; ///< count of keywords table
KEYWORD_MAP keyword_hash; ///< fast, specialized "C string" hashtable
const KEYWORD_MAP* keywordsLookup; ///< fast, specialized "C string" hashtable
#endif // SWIG
};

View File

@ -60,7 +60,7 @@ static const char ACCEL_ASCII_KEYWORD[] = "ACCEL_ASCII";
fseek( fp, 0, SEEK_SET );
// lexer now owns fp, will close on exception or return
DSNLEXER lexer( empty_keywords, 0, fp, aFileName );
DSNLEXER lexer( empty_keywords, 0, nullptr, fp, aFileName );
iNode = new XNODE( wxXML_ELEMENT_NODE, wxT( "www.lura.sk" ) );