Overview
Comment: | [graphspell][core][build][lo] remove extended dictionary |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | trunk | core | build | lo | graphspell |
Files: | files | file ages | folders |
SHA3-256: |
51a40c07e38502d8732c022b17231a85 |
User & Date: | olr on 2019-04-04 07:23:07 |
Other Links: | manifest | tags |
Context
2019-04-04
| ||
08:03 | [graphspell] suggestions with trailing numbers: avoid to repeat splitting for each dictionary check-in: e32c092585 user: olr tags: graphspell, trunk | |
07:23 | [graphspell][core][build][lo] remove extended dictionary check-in: 51a40c07e3 user: olr tags: build, core, graphspell, lo, trunk | |
2019-04-03
| ||
20:04 | [fr] phonet_simil: quota/cota check-in: 8fced24da3 user: olr tags: fr, trunk | |
Changes
Modified gc_core/js/lang_core/gc_engine.js from [65c6c687b8] to [827604c5fa].
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
//// Initialization load: function (sContext="JavaScript", sColorType="aRGB", sPath="") { try { if(typeof(process) !== 'undefined') { var spellchecker = require("../graphspell/spellchecker.js"); _oSpellChecker = new spellchecker.SpellChecker("${lang}", "", "${dic_main_filename_js}", "${dic_extended_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } else if (typeof(require) !== 'undefined') { var spellchecker = require("resource://grammalecte/graphspell/spellchecker.js"); _oSpellChecker = new spellchecker.SpellChecker("${lang}", "", "${dic_main_filename_js}", "${dic_extended_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } else { _oSpellChecker = new SpellChecker("${lang}", sPath, "${dic_main_filename_js}", "${dic_extended_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } _sAppContext = sContext; _dOptions = gc_options.getOptions(sContext).gl_shallowCopy(); // duplication necessary, to be able to reset to default _dOptionsColors = gc_options.getOptionsColors(sContext, sColorType); _oTokenizer = _oSpellChecker.getTokenizer(); _oSpellChecker.activateStorage(); } |
| | | |
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
//// Initialization load: function (sContext="JavaScript", sColorType="aRGB", sPath="") { try { if(typeof(process) !== 'undefined') { var spellchecker = require("../graphspell/spellchecker.js"); _oSpellChecker = new spellchecker.SpellChecker("${lang}", "", "${dic_main_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } else if (typeof(require) !== 'undefined') { var spellchecker = require("resource://grammalecte/graphspell/spellchecker.js"); _oSpellChecker = new spellchecker.SpellChecker("${lang}", "", "${dic_main_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } else { _oSpellChecker = new SpellChecker("${lang}", sPath, "${dic_main_filename_js}", "${dic_community_filename_js}", "${dic_personal_filename_js}"); } _sAppContext = sContext; _dOptions = gc_options.getOptions(sContext).gl_shallowCopy(); // duplication necessary, to be able to reset to default _dOptionsColors = gc_options.getOptionsColors(sContext, sColorType); _oTokenizer = _oSpellChecker.getTokenizer(); _oSpellChecker.activateStorage(); } |
Modified gc_core/py/lang_core/gc_engine.py from [7f3fb3a1a4] to [731ea086d0].
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
"initialization of the grammar checker"
global _oSpellChecker
global _sAppContext
global _dOptions
global _dOptionsColors
global _oTokenizer
try:
_oSpellChecker = SpellChecker("${lang}", "${dic_main_filename_py}", "${dic_extended_filename_py}", "${dic_community_filename_py}", "${dic_personal_filename_py}")
_sAppContext = sContext
_dOptions = dict(gc_options.getOptions(sContext)) # duplication necessary, to be able to reset to default
_dOptionsColors = gc_options.getOptionsColors(sContext, sColorType)
_oTokenizer = _oSpellChecker.getTokenizer()
_oSpellChecker.activateStorage()
except:
traceback.print_exc()
|
| |
58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
"initialization of the grammar checker" global _oSpellChecker global _sAppContext global _dOptions global _dOptionsColors global _oTokenizer try: _oSpellChecker = SpellChecker("${lang}", "${dic_main_filename_py}", "${dic_community_filename_py}", "${dic_personal_filename_py}") _sAppContext = sContext _dOptions = dict(gc_options.getOptions(sContext)) # duplication necessary, to be able to reset to default _dOptionsColors = gc_options.getOptionsColors(sContext, sColorType) _oTokenizer = _oSpellChecker.getTokenizer() _oSpellChecker.activateStorage() except: traceback.print_exc() |
Modified gc_lang/fr/config.ini from [eb65238ea7] to [7cdb20c7c4].
18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
lexicon_src = lexicons/French.lex dic_filenames = fr-allvars,fr-classic,fr-reform dic_name = fr-allvars,fr-classic,fr-reform dic_description = Français (Toutes variantes),Français (Classique),Français (Réforme 1990) dic_filter = ,[*CMPX]$,[*RPX]$ dic_default_filename_py = fr-allvars dic_default_filename_js = fr-allvars # extended dictionary lexicon_extended_src = lexicons/French.extended.lex dic_extended_filename = fr.extended dic_extended_name = fr.extended dic_extended_description = Français - dictionnaire étendu # community dictionary lexicon_community_src = lexicons/French.community.lex dic_community_filename = fr.community dic_community_name = fr.community dic_community_description = Français - dictionnaire communautaire # personal dictionary lexicon_personal_src = lexicons/French.personal.lex |
< < < < < |
18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
lexicon_src = lexicons/French.lex dic_filenames = fr-allvars,fr-classic,fr-reform dic_name = fr-allvars,fr-classic,fr-reform dic_description = Français (Toutes variantes),Français (Classique),Français (Réforme 1990) dic_filter = ,[*CMPX]$,[*RPX]$ dic_default_filename_py = fr-allvars dic_default_filename_js = fr-allvars # community dictionary lexicon_community_src = lexicons/French.community.lex dic_community_filename = fr.community dic_community_name = fr.community dic_community_description = Français - dictionnaire communautaire # personal dictionary lexicon_personal_src = lexicons/French.personal.lex |
Modified gc_lang/fr/oxt/Graphspell.py from [d8a06b777e] to [b9945ec9c3].
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 ... 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 ... 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 |
sPersonalDicJSON = self.xOptionNode.getPropertyValue("personal_dic") if sPersonalDicJSON: try: personal_dic = json.loads(sPersonalDicJSON) except: print("Graphspell: wrong personal_dic") traceback.print_exc() self.oGraphspell = SpellChecker("fr", "fr-"+sMainDicName+".bdic", "", "", personal_dic) self.loadHunspell() # print("Graphspell: init done") except: print("Graphspell: init failed") traceback.print_exc() def loadHunspell (self): # Hunspell is a fallback spellchecker try: self.xHunspell = self.xSvMgr.createInstance("org.openoffice.lingu.MySpellSpellChecker") except: print("Hunspell: init failed") traceback.print_exc() ................................................................................ def hasLocale (self, aLocale): if aLocale in self.locales: return True for e in self.locales: if aLocale.Language == e.Language and (e.Country == aLocale.Country or e.Country == ""): return True return False def getLocales (self): return self.locales # XSpellChecker # http://www.openoffice.org/api/docs/common/ref/com/sun/star/linguistic2/XSpellChecker.html def isValid (self, aWord, rLocale, aProperties): try: aWord = zElidedWords.sub("", aWord.rstrip("."), count=1) return self.oGraphspell.isValidToken(aWord) # return self.xHunspell.isValid(aWord, self.xHunspellLocale, aProperties) ................................................................................ #self.xFallbackSpellChecker = self.xSvMgr.createInstance(sSpellchecker) #if self.xFallbackSpellChecker: # print("Spell checker: %s" % xSpellChecker) # break class SpellAlternatives (unohelper.Base, XSpellAlternatives): def __init__ (self, sWord, lSugg): try: self.sWord = sWord self.lSugg = lSugg self.xLocale = Locale('fr', 'FR', '') except: traceback.print_exc() # XSpellAlternatives # http://www.openoffice.org/api/docs/common/ref/com/sun/star/linguistic2/XSpellAlternatives.html def getWord (self): return self.sWord def getLocale (self): return self.xLocale def getFailureType (self): return 4 # IS_NEGATIVE_WORD = 2 # The word is a negative one, that is, it should not be used. # CAPTION_ERROR = 3 # The capitalization of the word is wrong. # SPELLING_ERROR = 4 # The spelling of the word is wrong (or at least not known to be correct). # No difference -> red underline def getAlternativesCount (self): return len(self.lSugg) def getAlternatives (self): return self.lSugg g_ImplementationHelper = unohelper.ImplementationHelper() g_ImplementationHelper.addImplementation(Graphspell, "net.grammalecte.graphspell", ("com.sun.star.linguistic2.SpellChecker",),) |
| | | | | | | | | | |
62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 ... 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 ... 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 |
sPersonalDicJSON = self.xOptionNode.getPropertyValue("personal_dic") if sPersonalDicJSON: try: personal_dic = json.loads(sPersonalDicJSON) except: print("Graphspell: wrong personal_dic") traceback.print_exc() self.oGraphspell = SpellChecker("fr", "fr-"+sMainDicName+".bdic", "", personal_dic) self.loadHunspell() # print("Graphspell: init done") except: print("Graphspell: init failed") traceback.print_exc() def loadHunspell (self): # Hunspell is a fallback spellchecker try: self.xHunspell = self.xSvMgr.createInstance("org.openoffice.lingu.MySpellSpellChecker") except: print("Hunspell: init failed") traceback.print_exc() ................................................................................ def hasLocale (self, aLocale): if aLocale in self.locales: return True for e in self.locales: if aLocale.Language == e.Language and (e.Country == aLocale.Country or e.Country == ""): return True return False def getLocales (self): return self.locales # XSpellChecker # http://www.openoffice.org/api/docs/common/ref/com/sun/star/linguistic2/XSpellChecker.html def isValid (self, aWord, rLocale, aProperties): try: aWord = zElidedWords.sub("", aWord.rstrip("."), count=1) return self.oGraphspell.isValidToken(aWord) # return self.xHunspell.isValid(aWord, self.xHunspellLocale, aProperties) ................................................................................ #self.xFallbackSpellChecker = self.xSvMgr.createInstance(sSpellchecker) #if self.xFallbackSpellChecker: # print("Spell checker: %s" % xSpellChecker) # break class SpellAlternatives (unohelper.Base, XSpellAlternatives): def __init__ (self, sWord, lSugg): try: self.sWord = sWord self.lSugg = lSugg self.xLocale = Locale('fr', 'FR', '') except: traceback.print_exc() # XSpellAlternatives # http://www.openoffice.org/api/docs/common/ref/com/sun/star/linguistic2/XSpellAlternatives.html def getWord (self): return self.sWord def getLocale (self): return self.xLocale def getFailureType (self): return 4 # IS_NEGATIVE_WORD = 2 # The word is a negative one, that is, it should not be used. # CAPTION_ERROR = 3 # The capitalization of the word is wrong. # SPELLING_ERROR = 4 # The spelling of the word is wrong (or at least not known to be correct). # No difference -> red underline def getAlternativesCount (self): return len(self.lSugg) def getAlternatives (self): return self.lSugg g_ImplementationHelper = unohelper.ImplementationHelper() g_ImplementationHelper.addImplementation(Graphspell, "net.grammalecte.graphspell", ("com.sun.star.linguistic2.SpellChecker",),) |
Modified graphspell-js/spellchecker.js from [a3d5cf0515] to [2783c2d059].
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 .. 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 .. 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 ... 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 ... 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 ... 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 ... 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 ... 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 ... 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 |
// Spellchecker // Wrapper for the IBDAWG class. // Useful to check several dictionaries at once. // To avoid iterating over a pile of dictionaries, it is assumed that 3 are enough: // - the main dictionary, bundled with the package // - the extended dictionary // - the community dictionary, added by an organization // - the personal dictionary, created by the user for its own convenience /* jshint esversion:6, -W097 */ /* jslint esversion:6 */ /* global require, exports, console, IBDAWG, Tokenizer */ "use strict"; ................................................................................ ["fr", "fr-allvars.json"], ["en", "en.json"] ]); class SpellChecker { constructor (sLangCode, sPath="", mainDic="", extentedDic="", communityDic="", personalDic="") { // returns true if the main dictionary is loaded this.sLangCode = sLangCode; if (!mainDic) { mainDic = dDefaultDictionaries.gl_get(sLangCode, ""); } this.oMainDic = this._loadDictionary(mainDic, sPath, true); this.oExtendedDic = this._loadDictionary(extentedDic, sPath); this.oCommunityDic = this._loadDictionary(communityDic, sPath); this.oPersonalDic = this._loadDictionary(personalDic, sPath); this.bExtendedDic = Boolean(this.oExtendedDic); this.bCommunityDic = Boolean(this.oCommunityDic); this.bPersonalDic = Boolean(this.oPersonalDic); this.oTokenizer = null; // storage this.bStorage = false; this._dMorphologies = new Map(); // key: flexion, value: list of morphologies this._dLemmas = new Map(); // key: flexion, value: list of lemmas ................................................................................ setMainDictionary (dictionary, sPath="") { // returns true if the dictionary is loaded this.oMainDic = this._loadDictionary(dictionary, sPath, true); return Boolean(this.oMainDic); } setExtendedDictionary (dictionary, sPath="", bActivate=true) { // returns true if the dictionary is loaded this.oExtendedDic = this._loadDictionary(dictionary, sPath); this.bExtendedDic = (bActivate) ? Boolean(this.oExtendedDic) : false; return Boolean(this.oExtendedDic); } setCommunityDictionary (dictionary, sPath="", bActivate=true) { // returns true if the dictionary is loaded this.oCommunityDic = this._loadDictionary(dictionary, sPath); this.bCommunityDic = (bActivate) ? Boolean(this.oCommunityDic) : false; return Boolean(this.oCommunityDic); } ................................................................................ setPersonalDictionary (dictionary, sPath="", bActivate=true) { // returns true if the dictionary is loaded this.oPersonalDic = this._loadDictionary(dictionary, sPath); this.bPersonalDic = (bActivate) ? Boolean(this.oPersonalDic) : false; return Boolean(this.oPersonalDic); } activateExtendedDictionary () { this.bExtendedDic = Boolean(this.oExtendedDic); } activateCommunityDictionary () { this.bCommunityDic = Boolean(this.oCommunityDic); } activatePersonalDictionary () { this.bPersonalDic = Boolean(this.oPersonalDic); } deactivateExtendedDictionary () { this.bExtendedDic = false; } deactivateCommunityDictionary () { this.bCommunityDic = false; } deactivatePersonalDictionary () { this.bPersonalDic = false; } ................................................................................ // IBDAWG functions isValidToken (sToken) { // checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked) if (this.oMainDic.isValidToken(sToken)) { return true; } if (this.bExtendedDic && this.oExtendedDic.isValidToken(sToken)) { return true; } if (this.bCommunityDic && this.oCommunityDic.isValidToken(sToken)) { return true; } if (this.bPersonalDic && this.oPersonalDic.isValidToken(sToken)) { return true; } return false; ................................................................................ } isValid (sWord) { // checks if sWord is valid (different casing tested if the first letter is a capital) if (this.oMainDic.isValid(sWord)) { return true; } if (this.bExtendedDic && this.oExtendedDic.isValid(sWord)) { return true; } if (this.bCommunityDic && this.oCommunityDic.isValid(sWord)) { return true; } if (this.bPersonalDic && this.oPersonalDic.isValid(sWord)) { return true; } return false; ................................................................................ } lookup (sWord) { // checks if sWord is in dictionary as is (strict verification) if (this.oMainDic.lookup(sWord)) { return true; } if (this.bExtendedDic && this.oExtendedDic.lookup(sWord)) { return true; } if (this.bCommunityDic && this.oCommunityDic.lookup(sWord)) { return true; } if (this.bPersonalDic && this.oPersonalDic.lookup(sWord)) { return true; } return false; ................................................................................ getMorph (sWord) { // retrieves morphologies list, different casing allowed if (this.bStorage && this._dMorphologies.has(sWord)) { return this._dMorphologies.get(sWord); } let lMorph = this.oMainDic.getMorph(sWord); if (this.bExtendedDic) { lMorph.push(...this.oExtendedDic.getMorph(sWord)); } if (this.bCommunityDic) { lMorph.push(...this.oCommunityDic.getMorph(sWord)); } if (this.bPersonalDic) { lMorph.push(...this.oPersonalDic.getMorph(sWord)); } if (this.bStorage) { ................................................................................ } return Array.from(new Set(this.getMorph(sWord).map((sMorph) => { return sMorph.slice(1, sMorph.indexOf("/")); }))); } * suggest (sWord, nSuggLimit=10) { // generator: returns 1, 2 or 3 lists of suggestions yield this.oMainDic.suggest(sWord, nSuggLimit); if (this.bExtendedDic) { yield this.oExtendedDic.suggest(sWord, nSuggLimit); } if (this.bCommunityDic) { yield this.oCommunityDic.suggest(sWord, nSuggLimit); } if (this.bPersonalDic) { yield this.oPersonalDic.suggest(sWord, nSuggLimit); } } * select (sFlexPattern="", sTagsPattern="") { // generator: returns all entries which flexion fits <sFlexPattern> and morphology fits <sTagsPattern> yield* this.oMainDic.select(sFlexPattern, sTagsPattern); if (this.bExtendedDic) { yield* this.oExtendedDic.select(sFlexPattern, sTagsPattern); } if (this.bCommunityDic) { yield* this.oCommunityDic.select(sFlexPattern, sTagsPattern); } if (this.bPersonalDic) { yield* this.oPersonalDic.select(sFlexPattern, sTagsPattern); } } getSimilarEntries (sWord, nSuggLimit=10) { // return a list of tuples (similar word, stem, morphology) let lResult = this.oMainDic.getSimilarEntries(sWord, nSuggLimit); if (this.bExtendedDic) { lResult.push(...this.oExtendedDic.getSimilarEntries(sWord, nSuggLimit)); } if (this.bCommunityDic) { lResult.push(...this.oCommunityDic.getSimilarEntries(sWord, nSuggLimit)); } if (this.bPersonalDic) { lResult.push(...this.oPersonalDic.getSimilarEntries(sWord, nSuggLimit)); } return lResult; } } if (typeof(exports) !== 'undefined') { exports.SpellChecker = SpellChecker; } |
< | | < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 .. 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 .. 90 91 92 93 94 95 96 97 98 99 100 101 102 103 ... 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 ... 159 160 161 162 163 164 165 166 167 168 169 170 171 172 ... 173 174 175 176 177 178 179 180 181 182 183 184 185 186 ... 187 188 189 190 191 192 193 194 195 196 197 198 199 200 ... 202 203 204 205 206 207 208 209 210 211 212 213 214 215 ... 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 |
// Spellchecker // Wrapper for the IBDAWG class. // Useful to check several dictionaries at once. // To avoid iterating over a pile of dictionaries, it is assumed that 3 are enough: // - the main dictionary, bundled with the package // - the community dictionary, a merge of different external dictionaries // - the personal dictionary, created by the user for its own convenience /* jshint esversion:6, -W097 */ /* jslint esversion:6 */ /* global require, exports, console, IBDAWG, Tokenizer */ "use strict"; ................................................................................ ["fr", "fr-allvars.json"], ["en", "en.json"] ]); class SpellChecker { constructor (sLangCode, sPath="", mainDic="", communityDic="", personalDic="") { // returns true if the main dictionary is loaded this.sLangCode = sLangCode; if (!mainDic) { mainDic = dDefaultDictionaries.gl_get(sLangCode, ""); } this.oMainDic = this._loadDictionary(mainDic, sPath, true); this.oCommunityDic = this._loadDictionary(communityDic, sPath); this.oPersonalDic = this._loadDictionary(personalDic, sPath); this.bCommunityDic = Boolean(this.oCommunityDic); this.bPersonalDic = Boolean(this.oPersonalDic); this.oTokenizer = null; // storage this.bStorage = false; this._dMorphologies = new Map(); // key: flexion, value: list of morphologies this._dLemmas = new Map(); // key: flexion, value: list of lemmas ................................................................................ setMainDictionary (dictionary, sPath="") { // returns true if the dictionary is loaded this.oMainDic = this._loadDictionary(dictionary, sPath, true); return Boolean(this.oMainDic); } setCommunityDictionary (dictionary, sPath="", bActivate=true) { // returns true if the dictionary is loaded this.oCommunityDic = this._loadDictionary(dictionary, sPath); this.bCommunityDic = (bActivate) ? Boolean(this.oCommunityDic) : false; return Boolean(this.oCommunityDic); } ................................................................................ setPersonalDictionary (dictionary, sPath="", bActivate=true) { // returns true if the dictionary is loaded this.oPersonalDic = this._loadDictionary(dictionary, sPath); this.bPersonalDic = (bActivate) ? Boolean(this.oPersonalDic) : false; return Boolean(this.oPersonalDic); } activateCommunityDictionary () { this.bCommunityDic = Boolean(this.oCommunityDic); } activatePersonalDictionary () { this.bPersonalDic = Boolean(this.oPersonalDic); } deactivateCommunityDictionary () { this.bCommunityDic = false; } deactivatePersonalDictionary () { this.bPersonalDic = false; } ................................................................................ // IBDAWG functions isValidToken (sToken) { // checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked) if (this.oMainDic.isValidToken(sToken)) { return true; } if (this.bCommunityDic && this.oCommunityDic.isValidToken(sToken)) { return true; } if (this.bPersonalDic && this.oPersonalDic.isValidToken(sToken)) { return true; } return false; ................................................................................ } isValid (sWord) { // checks if sWord is valid (different casing tested if the first letter is a capital) if (this.oMainDic.isValid(sWord)) { return true; } if (this.bCommunityDic && this.oCommunityDic.isValid(sWord)) { return true; } if (this.bPersonalDic && this.oPersonalDic.isValid(sWord)) { return true; } return false; ................................................................................ } lookup (sWord) { // checks if sWord is in dictionary as is (strict verification) if (this.oMainDic.lookup(sWord)) { return true; } if (this.bCommunityDic && this.oCommunityDic.lookup(sWord)) { return true; } if (this.bPersonalDic && this.oPersonalDic.lookup(sWord)) { return true; } return false; ................................................................................ getMorph (sWord) { // retrieves morphologies list, different casing allowed if (this.bStorage && this._dMorphologies.has(sWord)) { return this._dMorphologies.get(sWord); } let lMorph = this.oMainDic.getMorph(sWord); if (this.bCommunityDic) { lMorph.push(...this.oCommunityDic.getMorph(sWord)); } if (this.bPersonalDic) { lMorph.push(...this.oPersonalDic.getMorph(sWord)); } if (this.bStorage) { ................................................................................ } return Array.from(new Set(this.getMorph(sWord).map((sMorph) => { return sMorph.slice(1, sMorph.indexOf("/")); }))); } * suggest (sWord, nSuggLimit=10) { // generator: returns 1, 2 or 3 lists of suggestions yield this.oMainDic.suggest(sWord, nSuggLimit); if (this.bCommunityDic) { yield this.oCommunityDic.suggest(sWord, nSuggLimit); } if (this.bPersonalDic) { yield this.oPersonalDic.suggest(sWord, nSuggLimit); } } * select (sFlexPattern="", sTagsPattern="") { // generator: returns all entries which flexion fits <sFlexPattern> and morphology fits <sTagsPattern> yield* this.oMainDic.select(sFlexPattern, sTagsPattern); if (this.bCommunityDic) { yield* this.oCommunityDic.select(sFlexPattern, sTagsPattern); } if (this.bPersonalDic) { yield* this.oPersonalDic.select(sFlexPattern, sTagsPattern); } } getSimilarEntries (sWord, nSuggLimit=10) { // return a list of tuples (similar word, stem, morphology) let lResult = this.oMainDic.getSimilarEntries(sWord, nSuggLimit); if (this.bCommunityDic) { lResult.push(...this.oCommunityDic.getSimilarEntries(sWord, nSuggLimit)); } if (this.bPersonalDic) { lResult.push(...this.oPersonalDic.getSimilarEntries(sWord, nSuggLimit)); } return lResult; } } if (typeof(exports) !== 'undefined') { exports.SpellChecker = SpellChecker; } |
Modified graphspell/spellchecker.py from [2e8bbcc172] to [fe2798d7b3].
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 .. 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 ... 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 ... 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 |
"en": "en.bdic" } class SpellChecker (): "SpellChecker: wrapper for the IBDAWG class" def __init__ (self, sLangCode, sfMainDic="", sfExtendedDic="", sfCommunityDic="", sfPersonalDic=""): "returns True if the main dictionary is loaded" self.sLangCode = sLangCode if not sfMainDic: sfMainDic = dDefaultDictionaries.get(sLangCode, "") self.oMainDic = self._loadDictionary(sfMainDic, True) self.oExtendedDic = self._loadDictionary(sfExtendedDic) self.oCommunityDic = self._loadDictionary(sfCommunityDic) self.oPersonalDic = self._loadDictionary(sfPersonalDic) self.bExtendedDic = bool(self.oExtendedDic) self.bCommunityDic = bool(self.oCommunityDic) self.bPersonalDic = bool(self.oPersonalDic) self.oTokenizer = None # Default suggestions self.dDefaultSugg = None self.loadSuggestions(sLangCode) # storage ................................................................................ return self.oTokenizer def setMainDictionary (self, source): "returns True if the dictionary is loaded" self.oMainDic = self._loadDictionary(source, True) return bool(self.oMainDic) def setExtendedDictionary (self, source, bActivate=True): "returns True if the dictionary is loaded" self.oExtendedDic = self._loadDictionary(source) self.bExtendedDic = False if not bActivate else bool(self.oExtendedDic) return bool(self.oExtendedDic) def setCommunityDictionary (self, source, bActivate=True): "returns True if the dictionary is loaded" self.oCommunityDic = self._loadDictionary(source) self.bCommunityDic = False if not bActivate else bool(self.oCommunityDic) return bool(self.oCommunityDic) def setPersonalDictionary (self, source, bActivate=True): "returns True if the dictionary is loaded" self.oPersonalDic = self._loadDictionary(source) self.bPersonalDic = False if not bActivate else bool(self.oPersonalDic) return bool(self.oPersonalDic) def activateExtendedDictionary (self): "activate extended dictionary (if available)" self.bExtendedDic = bool(self.oExtendedDic) def activateCommunityDictionary (self): "activate community dictionary (if available)" self.bCommunityDic = bool(self.oCommunityDic) def activatePersonalDictionary (self): "activate personal dictionary (if available)" self.bPersonalDic = bool(self.oPersonalDic) def deactivateExtendedDictionary (self): "deactivate extended dictionary" self.bExtendedDic = False def deactivateCommunityDictionary (self): "deactivate community dictionary" self.bCommunityDic = False def deactivatePersonalDictionary (self): "deactivate personal dictionary" self.bPersonalDic = False ................................................................................ # IBDAWG functions def isValidToken (self, sToken): "checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked)" if self.oMainDic.isValidToken(sToken): return True if self.bExtendedDic and self.oExtendedDic.isValidToken(sToken): return True if self.bCommunityDic and self.oCommunityDic.isValidToken(sToken): return True if self.bPersonalDic and self.oPersonalDic.isValidToken(sToken): return True return False def isValid (self, sWord): "checks if sWord is valid (different casing tested if the first letter is a capital)" if self.oMainDic.isValid(sWord): return True if self.bExtendedDic and self.oExtendedDic.isValid(sWord): return True if self.bCommunityDic and self.oCommunityDic.isValid(sWord): return True if self.bPersonalDic and self.oPersonalDic.isValid(sWord): return True return False def lookup (self, sWord): "checks if sWord is in dictionary as is (strict verification)" if self.oMainDic.lookup(sWord): return True if self.bExtendedDic and self.oExtendedDic.lookup(sWord): return True if self.bCommunityDic and self.oCommunityDic.lookup(sWord): return True if self.bPersonalDic and self.oPersonalDic.lookup(sWord): return True return False def getMorph (self, sWord): "retrieves morphologies list, different casing allowed" if self.bStorage and sWord in self._dMorphologies: return self._dMorphologies[sWord] lMorph = self.oMainDic.getMorph(sWord) if self.bExtendedDic: lMorph.extend(self.oExtendedDic.getMorph(sWord)) if self.bCommunityDic: lMorph.extend(self.oCommunityDic.getMorph(sWord)) if self.bPersonalDic: lMorph.extend(self.oPersonalDic.getMorph(sWord)) if self.bStorage: self._dMorphologies[sWord] = lMorph self._dLemmas[sWord] = set([ s[1:s.find("/")] for s in lMorph ]) ................................................................................ elif sWord.istitle() and sWord.lower() in self.dDefaultSugg: lRes = self.dDefaultSugg[sWord.lower()].split("|") yield list(map(lambda sSugg: sSugg[0:1].upper()+sSugg[1:], lRes)) else: yield self.oMainDic.suggest(sWord, nSuggLimit) else: yield self.oMainDic.suggest(sWord, nSuggLimit) if self.bExtendedDic: yield self.oExtendedDic.suggest(sWord, nSuggLimit) if self.bCommunityDic: yield self.oCommunityDic.suggest(sWord, nSuggLimit) if self.bPersonalDic: yield self.oPersonalDic.suggest(sWord, nSuggLimit) def select (self, sFlexPattern="", sTagsPattern=""): "generator: returns all entries which flexion fits <sFlexPattern> and morphology fits <sTagsPattern>" yield from self.oMainDic.select(sFlexPattern, sTagsPattern) if self.bExtendedDic: yield from self.oExtendedDic.select(sFlexPattern, sTagsPattern) if self.bCommunityDic: yield from self.oCommunityDic.select(sFlexPattern, sTagsPattern) if self.bPersonalDic: yield from self.oPersonalDic.select(sFlexPattern, sTagsPattern) def drawPath (self, sWord): "draw the path taken by <sWord> within the word graph: display matching nodes and their arcs" self.oMainDic.drawPath(sWord) if self.bExtendedDic: print("-----") self.oExtendedDic.drawPath(sWord) if self.bCommunityDic: print("-----") self.oCommunityDic.drawPath(sWord) if self.bPersonalDic: print("-----") self.oPersonalDic.drawPath(sWord) def getSimilarEntries (self, sWord, nSuggLimit=10): "return a list of tuples (similar word, stem, morphology)" lResult = self.oMainDic.getSimilarEntries(sWord, nSuggLimit) if self.bExtendedDic: lResult.extend(self.oExtendedDic.getSimilarEntries(sWord, nSuggLimit)) if self.bCommunityDic: lResult.extend(self.oCommunityDic.getSimilarEntries(sWord, nSuggLimit)) if self.bPersonalDic: lResult.extend(self.oPersonalDic.getSimilarEntries(sWord, nSuggLimit)) return lResult |
| < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < < |
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 .. 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 ... 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 ... 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 |
"en": "en.bdic" } class SpellChecker (): "SpellChecker: wrapper for the IBDAWG class" def __init__ (self, sLangCode, sfMainDic="", sfCommunityDic="", sfPersonalDic=""): "returns True if the main dictionary is loaded" self.sLangCode = sLangCode if not sfMainDic: sfMainDic = dDefaultDictionaries.get(sLangCode, "") self.oMainDic = self._loadDictionary(sfMainDic, True) self.oCommunityDic = self._loadDictionary(sfCommunityDic) self.oPersonalDic = self._loadDictionary(sfPersonalDic) self.bCommunityDic = bool(self.oCommunityDic) self.bPersonalDic = bool(self.oPersonalDic) self.oTokenizer = None # Default suggestions self.dDefaultSugg = None self.loadSuggestions(sLangCode) # storage ................................................................................ return self.oTokenizer def setMainDictionary (self, source): "returns True if the dictionary is loaded" self.oMainDic = self._loadDictionary(source, True) return bool(self.oMainDic) def setCommunityDictionary (self, source, bActivate=True): "returns True if the dictionary is loaded" self.oCommunityDic = self._loadDictionary(source) self.bCommunityDic = False if not bActivate else bool(self.oCommunityDic) return bool(self.oCommunityDic) def setPersonalDictionary (self, source, bActivate=True): "returns True if the dictionary is loaded" self.oPersonalDic = self._loadDictionary(source) self.bPersonalDic = False if not bActivate else bool(self.oPersonalDic) return bool(self.oPersonalDic) def activateCommunityDictionary (self): "activate community dictionary (if available)" self.bCommunityDic = bool(self.oCommunityDic) def activatePersonalDictionary (self): "activate personal dictionary (if available)" self.bPersonalDic = bool(self.oPersonalDic) def deactivateCommunityDictionary (self): "deactivate community dictionary" self.bCommunityDic = False def deactivatePersonalDictionary (self): "deactivate personal dictionary" self.bPersonalDic = False ................................................................................ # IBDAWG functions def isValidToken (self, sToken): "checks if sToken is valid (if there is hyphens in sToken, sToken is split, each part is checked)" if self.oMainDic.isValidToken(sToken): return True if self.bCommunityDic and self.oCommunityDic.isValidToken(sToken): return True if self.bPersonalDic and self.oPersonalDic.isValidToken(sToken): return True return False def isValid (self, sWord): "checks if sWord is valid (different casing tested if the first letter is a capital)" if self.oMainDic.isValid(sWord): return True if self.bCommunityDic and self.oCommunityDic.isValid(sWord): return True if self.bPersonalDic and self.oPersonalDic.isValid(sWord): return True return False def lookup (self, sWord): "checks if sWord is in dictionary as is (strict verification)" if self.oMainDic.lookup(sWord): return True if self.bCommunityDic and self.oCommunityDic.lookup(sWord): return True if self.bPersonalDic and self.oPersonalDic.lookup(sWord): return True return False def getMorph (self, sWord): "retrieves morphologies list, different casing allowed" if self.bStorage and sWord in self._dMorphologies: return self._dMorphologies[sWord] lMorph = self.oMainDic.getMorph(sWord) if self.bCommunityDic: lMorph.extend(self.oCommunityDic.getMorph(sWord)) if self.bPersonalDic: lMorph.extend(self.oPersonalDic.getMorph(sWord)) if self.bStorage: self._dMorphologies[sWord] = lMorph self._dLemmas[sWord] = set([ s[1:s.find("/")] for s in lMorph ]) ................................................................................ elif sWord.istitle() and sWord.lower() in self.dDefaultSugg: lRes = self.dDefaultSugg[sWord.lower()].split("|") yield list(map(lambda sSugg: sSugg[0:1].upper()+sSugg[1:], lRes)) else: yield self.oMainDic.suggest(sWord, nSuggLimit) else: yield self.oMainDic.suggest(sWord, nSuggLimit) if self.bCommunityDic: yield self.oCommunityDic.suggest(sWord, nSuggLimit) if self.bPersonalDic: yield self.oPersonalDic.suggest(sWord, nSuggLimit) def select (self, sFlexPattern="", sTagsPattern=""): "generator: returns all entries which flexion fits <sFlexPattern> and morphology fits <sTagsPattern>" yield from self.oMainDic.select(sFlexPattern, sTagsPattern) if self.bCommunityDic: yield from self.oCommunityDic.select(sFlexPattern, sTagsPattern) if self.bPersonalDic: yield from self.oPersonalDic.select(sFlexPattern, sTagsPattern) def drawPath (self, sWord): "draw the path taken by <sWord> within the word graph: display matching nodes and their arcs" self.oMainDic.drawPath(sWord) if self.bCommunityDic: print("-----") self.oCommunityDic.drawPath(sWord) if self.bPersonalDic: print("-----") self.oPersonalDic.drawPath(sWord) def getSimilarEntries (self, sWord, nSuggLimit=10): "return a list of tuples (similar word, stem, morphology)" lResult = self.oMainDic.getSimilarEntries(sWord, nSuggLimit) if self.bCommunityDic: lResult.extend(self.oCommunityDic.getSimilarEntries(sWord, nSuggLimit)) if self.bPersonalDic: lResult.extend(self.oPersonalDic.getSimilarEntries(sWord, nSuggLimit)) return lResult |
Modified make.py from [c862a44ad1] to [5247125cb0].
307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 ... 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 ... 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 ... 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 ... 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 |
dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read() for sf in os.listdir("graphspell-js"): if not os.path.isdir("graphspell-js/"+sf): file_util.copy_file("graphspell-js/"+sf, "grammalecte-js/graphspell") helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars) def copyGraphspellDictionaries (dVars, bJavaScript=False, bExtendedDict=False, bCommunityDict=False, bPersonalDict=False): "copy requested Graphspell dictionaries in Grammalecte package" dVars["dic_main_filename_py"] = "" dVars["dic_main_filename_js"] = "" dVars["dic_extended_filename_py"] = "" dVars["dic_extended_filename_js"] = "" dVars["dic_community_filename_py"] = "" dVars["dic_community_filename_js"] = "" dVars["dic_personal_filename_py"] = "" dVars["dic_personal_filename_js"] = "" lDict = [ ("main", s) for s in dVars['dic_filenames'].split(",") ] if bExtendedDict: lDict.append(("extended", dVars['dic_extended_filename'])) if bCommunityDict: lDict.append(("community", dVars['dic_community_filename'])) if bPersonalDict: lDict.append(("personal", dVars['dic_personal_filename'])) for sType, sFileName in lDict: spfPyDic = "graphspell/_dictionaries/" + sFileName + ".bdic" spfJSDic = "graphspell-js/_dictionaries/" + sFileName + ".json" ................................................................................ lSfDictDst = dVars['dic_filenames'].split(",") lDicName = dVars['dic_name'].split(",") lDescription = dVars['dic_description'].split(",") lFilter = dVars['dic_filter'].split(",") for sfDictDst, sDicName, sDescription, sFilter in zip(lSfDictDst, lDicName, lDescription, lFilter): lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, sDescription, sFilter, dVars['stemming_method'], int(dVars['fsa_method'])) else: if sType == "extended": spfLexSrc = dVars['lexicon_extended_src'] sfDictDst = dVars['dic_extended_filename'] sDicName = dVars['dic_extended_name'] sDescription = dVars['dic_extended_description'] elif sType == "community": spfLexSrc = dVars['lexicon_community_src'] sfDictDst = dVars['dic_community_filename'] sDicName = dVars['dic_community_name'] sDescription = dVars['dic_community_description'] elif sType == "personal": spfLexSrc = dVars['lexicon_personal_src'] sfDictDst = dVars['dic_personal_filename'] ................................................................................ xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true") xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true") xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true") xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true") xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true") xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true") xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true") xParser.add_argument("-aed", "--add_extended_dictionary", help="add extended dictionary to the build", action="store_true") xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true") xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true") xParser.add_argument("-fx", "--firefox", help="Launch Firefox Developper for WebExtension testing", action="store_true") xParser.add_argument("-we", "--web_ext", help="Launch Firefox Nightly for WebExtension testing", action="store_true") xParser.add_argument("-l", "--lint_web_ext", help="web-ext lint on the WebExtension", action="store_true") xParser.add_argument("-tb", "--thunderbird", help="Launch Thunderbird", action="store_true") xParser.add_argument("-tbb", "--thunderbird_beta", help="Launch Thunderbird Beta", action="store_true") ................................................................................ copyGraphspellCore(xArgs.javascript) for sLang in xArgs.lang: if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang): xConfig = getConfig(sLang) dVars = xConfig._sections['args'] if not dVars["lexicon_extended_src"]: xArgs.add_extended_dictionary = False if not dVars["lexicon_community_src"]: xArgs.add_community_dictionary = False if not dVars["lexicon_personal_src"]: xArgs.add_personal_dictionary = False # build data databuild = None ................................................................................ databuild = importlib.import_module("gc_lang."+sLang+".build_data") except ImportError: print("# Error. Couldn’t import file build_data.py in folder gc_lang/"+sLang) if databuild and xArgs.build_data_before: databuild.before('gc_lang/'+sLang, dVars, xArgs.javascript) if xArgs.dict: buildDictionary(dVars, "main", xArgs.javascript) if xArgs.add_extended_dictionary: buildDictionary(dVars, "extended", xArgs.javascript) if xArgs.add_community_dictionary: buildDictionary(dVars, "community", xArgs.javascript) if xArgs.add_personal_dictionary: buildDictionary(dVars, "personal", xArgs.javascript) if databuild and xArgs.build_data_after: databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript) # copy dictionaries from Graphspell copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_extended_dictionary, xArgs.add_community_dictionary, xArgs.add_personal_dictionary) # make sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, xArgs.use_cache) # tests if xArgs.tests or xArgs.perf or xArgs.perf_memo: print("> Running tests") |
| < < < < < < < < < | < < < < < | |
307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 ... 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 ... 374 375 376 377 378 379 380 381 382 383 384 385 386 387 ... 400 401 402 403 404 405 406 407 408 409 410 411 412 413 ... 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 |
dVars[sf[:-3]] = open("js_extension/"+sf, "r", encoding="utf-8").read() for sf in os.listdir("graphspell-js"): if not os.path.isdir("graphspell-js/"+sf): file_util.copy_file("graphspell-js/"+sf, "grammalecte-js/graphspell") helpers.copyAndFileTemplate("graphspell-js/"+sf, "grammalecte-js/graphspell/"+sf, dVars) def copyGraphspellDictionaries (dVars, bJavaScript=False, bCommunityDict=False, bPersonalDict=False): "copy requested Graphspell dictionaries in Grammalecte package" dVars["dic_main_filename_py"] = "" dVars["dic_main_filename_js"] = "" dVars["dic_community_filename_py"] = "" dVars["dic_community_filename_js"] = "" dVars["dic_personal_filename_py"] = "" dVars["dic_personal_filename_js"] = "" lDict = [ ("main", s) for s in dVars['dic_filenames'].split(",") ] if bCommunityDict: lDict.append(("community", dVars['dic_community_filename'])) if bPersonalDict: lDict.append(("personal", dVars['dic_personal_filename'])) for sType, sFileName in lDict: spfPyDic = "graphspell/_dictionaries/" + sFileName + ".bdic" spfJSDic = "graphspell-js/_dictionaries/" + sFileName + ".json" ................................................................................ lSfDictDst = dVars['dic_filenames'].split(",") lDicName = dVars['dic_name'].split(",") lDescription = dVars['dic_description'].split(",") lFilter = dVars['dic_filter'].split(",") for sfDictDst, sDicName, sDescription, sFilter in zip(lSfDictDst, lDicName, lDescription, lFilter): lex_build.build(spfLexSrc, dVars['lang'], dVars['lang_name'], sfDictDst, bJavaScript, sDicName, sDescription, sFilter, dVars['stemming_method'], int(dVars['fsa_method'])) else: if sType == "community": spfLexSrc = dVars['lexicon_community_src'] sfDictDst = dVars['dic_community_filename'] sDicName = dVars['dic_community_name'] sDescription = dVars['dic_community_description'] elif sType == "personal": spfLexSrc = dVars['lexicon_personal_src'] sfDictDst = dVars['dic_personal_filename'] ................................................................................ xParser.add_argument("-bb", "--build_data_before", help="launch build_data.py (only part 1: before dictionary building)", action="store_true") xParser.add_argument("-ba", "--build_data_after", help="launch build_data.py (only part 2: before dictionary building)", action="store_true") xParser.add_argument("-d", "--dict", help="generate FSA dictionary", action="store_true") xParser.add_argument("-t", "--tests", help="run unit tests", action="store_true") xParser.add_argument("-p", "--perf", help="run performance tests", action="store_true") xParser.add_argument("-pm", "--perf_memo", help="run performance tests and store results in perf_memo.txt", action="store_true") xParser.add_argument("-js", "--javascript", help="JavaScript build for Firefox", action="store_true") xParser.add_argument("-acd", "--add_community_dictionary", help="add community dictionary to the build", action="store_true") xParser.add_argument("-apd", "--add_personal_dictionary", help="add personal dictionary to the build", action="store_true") xParser.add_argument("-fx", "--firefox", help="Launch Firefox Developper for WebExtension testing", action="store_true") xParser.add_argument("-we", "--web_ext", help="Launch Firefox Nightly for WebExtension testing", action="store_true") xParser.add_argument("-l", "--lint_web_ext", help="web-ext lint on the WebExtension", action="store_true") xParser.add_argument("-tb", "--thunderbird", help="Launch Thunderbird", action="store_true") xParser.add_argument("-tbb", "--thunderbird_beta", help="Launch Thunderbird Beta", action="store_true") ................................................................................ copyGraphspellCore(xArgs.javascript) for sLang in xArgs.lang: if os.path.exists("gc_lang/"+sLang) and os.path.isdir("gc_lang/"+sLang): xConfig = getConfig(sLang) dVars = xConfig._sections['args'] if not dVars["lexicon_community_src"]: xArgs.add_community_dictionary = False if not dVars["lexicon_personal_src"]: xArgs.add_personal_dictionary = False # build data databuild = None ................................................................................ databuild = importlib.import_module("gc_lang."+sLang+".build_data") except ImportError: print("# Error. Couldn’t import file build_data.py in folder gc_lang/"+sLang) if databuild and xArgs.build_data_before: databuild.before('gc_lang/'+sLang, dVars, xArgs.javascript) if xArgs.dict: buildDictionary(dVars, "main", xArgs.javascript) if xArgs.add_community_dictionary: buildDictionary(dVars, "community", xArgs.javascript) if xArgs.add_personal_dictionary: buildDictionary(dVars, "personal", xArgs.javascript) if databuild and xArgs.build_data_after: databuild.after('gc_lang/'+sLang, dVars, xArgs.javascript) # copy dictionaries from Graphspell copyGraphspellDictionaries(dVars, xArgs.javascript, xArgs.add_community_dictionary, xArgs.add_personal_dictionary) # make sVersion = create(sLang, xConfig, xArgs.install, xArgs.javascript, xArgs.use_cache) # tests if xArgs.tests or xArgs.perf or xArgs.perf_memo: print("> Running tests") |