Fix incorrect usage of a UTF8 char in a C string in code.

Using a UTF8 char inside a C string can work on unix, but not on Windows.
It must be explicitly converted in a unicode value inside a wide string
(like a wxString)
This commit is contained in:
jean-pierre charras 2020-02-16 10:21:36 +01:00
parent 0ae3d0216a
commit 021eb2f3a8
3 changed files with 12 additions and 2 deletions

View File

@ -336,6 +336,16 @@ protected:
unsigned int aCol,
wxDataViewItemAttr& aAttr ) const override;
/**
* @return a unicode string to mark a node name like
* a pinned library name
* This is not an ascii7 char, but a unicode char
*/
const wxString GetPinningSymbol() const
{
return wxString::FromUTF8( "" );
}
private:
EDA_BASE_FRAME* m_parent;

View File

@ -216,7 +216,7 @@ void SYMBOL_TREE_SYNCHRONIZING_ADAPTER::GetValue( wxVariant& aVariant, wxDataVie
aVariant = node->m_Name;
if( node->m_Pinned )
aVariant = "" + node->m_Name;
aVariant = GetPinningSymbol() + node->m_Name;
// mark modified libs with an asterisk
if( node->m_Type == LIB_TREE_NODE::LIB && m_libMgr->IsLibraryModified( node->m_Name ) )

View File

@ -184,7 +184,7 @@ void FP_TREE_SYNCHRONIZING_ADAPTER::GetValue( wxVariant& aVariant, wxDataViewIte
aVariant = currentFPName;
}
else if( node->m_Pinned )
aVariant = "" + node->m_Name;
aVariant = GetPinningSymbol() + node->m_Name;
else
aVariant = node->m_Name;
break;