[Libreoffice-commits] .: 2 commits - dictionaries/en dictionaries/hu_HU
Andras Timar
timar at kemper.freedesktop.org
Tue Dec 6 09:20:35 PST 2011
dictionaries/en/Lightproof.py | 410 ++++++++++++++
dictionaries/en/Linguistic.xcu | 18
dictionaries/en/README_lightproof_en.txt | 3
dictionaries/en/description.xml | 2
dictionaries/en/dialog/OptionsDialog.xcs | 74 ++
dictionaries/en/dialog/OptionsDialog.xcu | 37 +
dictionaries/en/dialog/en.xdl | 26
dictionaries/en/dialog/en_en_US.properties | 37 +
dictionaries/en/makefile.mk | 14
dictionaries/en/manifest.xml | 9
dictionaries/en/pythonpath/lightproof_en.py | 1
dictionaries/en/pythonpath/lightproof_handler_en.py | 121 ++++
dictionaries/en/pythonpath/lightproof_impl_en.py | 6
dictionaries/en/pythonpath/lightproof_opts_en.py | 4
dictionaries/hu_HU/Lightproof.py | 323 +++++++++++
dictionaries/hu_HU/Linguistic.xcu | 18
dictionaries/hu_HU/README_lightproof_hu_HU.txt | 8
dictionaries/hu_HU/description.xml | 2
dictionaries/hu_HU/dialog/OptionsDialog.xcs | 107 +++
dictionaries/hu_HU/dialog/OptionsDialog.xcu | 39 +
dictionaries/hu_HU/dialog/hu_HU.xdl | 37 +
dictionaries/hu_HU/dialog/hu_HU_en_US.properties | 32 +
dictionaries/hu_HU/dialog/hu_HU_hu_HU.properties | 53 +
dictionaries/hu_HU/makefile.mk | 15
dictionaries/hu_HU/manifest.xml | 9
dictionaries/hu_HU/pythonpath/lightproof_handler_hu_HU.py | 121 ++++
dictionaries/hu_HU/pythonpath/lightproof_hu_HU.py | 1
dictionaries/hu_HU/pythonpath/lightproof_impl_hu_HU.py | 6
dictionaries/hu_HU/pythonpath/lightproof_opts_hu_HU.py | 4
29 files changed, 1533 insertions(+), 4 deletions(-)
New commits:
commit 95445a70a29c6a1166a19f483566f2c9cfc7563b
Author: László Németh <nemeth at numbertext.org>
Date: Mon Dec 5 19:18:07 2011 +0100
English sentence checking
diff --git a/dictionaries/en/Lightproof.py b/dictionaries/en/Lightproof.py
new file mode 100644
index 0000000..a6b669c
--- /dev/null
+++ b/dictionaries/en/Lightproof.py
@@ -0,0 +1,410 @@
+# -*- encoding: UTF-8 -*-
+# Lightproof grammar checker for LibreOffice and OpenOffice.org
+# http://launchpad.net/lightproof
+# version 1.4.3 (2011-12-05)
+#
+# 2009-2011 (c) László Németh (nemeth at numbertext org), license: MPL 1.1 / GPLv3+ / LGPLv3+
+
+import uno, unohelper, sys, traceback, re
+from lightproof_impl_en import locales
+from lightproof_impl_en import pkg
+import lightproof_handler_en
+from string import join
+
+from com.sun.star.linguistic2 import XProofreader, XSupportedLocales
+from com.sun.star.linguistic2 import ProofreadingResult, SingleProofreadingError
+from com.sun.star.lang import XServiceInfo, XServiceName, XServiceDisplayName
+from com.sun.star.lang import Locale
+from com.sun.star.text.TextMarkupType import PROOFREADING
+
+# loaded rules
+langrule = {}
+# ignored rules
+ignore = {}
+
+# cache for morphogical analyses
+analyses = {}
+stems = {}
+suggestions = {}
+
+# assign Calc functions
+calcfunc = None
+
+# check settings
+def option(lang, opt):
+ return lightproof_handler_en.get_option(lang.Language + "_" + lang.Country, opt)
+
+# filtering affix fields (ds, is, ts etc.)
+def onlymorph(st):
+ if st != None:
+ st = re.sub(r"^.*(st:|po:)", r"\1", st) # keep last word part
+ st = re.sub(r"\b(?=[dit][sp]:)","@", st) # and its affixes
+ st = re.sub(r"(?<!@)\b\w\w:\w+","", st).replace('@','').strip()
+ return st
+
+# if the pattern matches all analyses of the input word,
+# return the last matched substring
+def _morph(rLoc, word, pattern, all, onlyaffix):
+ global analyses
+ if word == None:
+ return None
+ if word not in analyses:
+ x = spellchecker.spell(u"<?xml?><query type='analyze'><word>" + word + "</word></query>", rLoc, ())
+ if not x:
+ return None
+ t = x.getAlternatives()
+ if not t:
+ t = [""]
+ analyses[word] = t[0]
+ a = analyses[word].split("</a>")[:-1]
+ result = None
+ p = re.compile(pattern)
+ for i in a:
+ if onlyaffix:
+ i = onlymorph(i)
+ result = p.search(i)
+ if result:
+ result = result.group(0)
+ if not all:
+ return result
+ elif all:
+ return None
+ return result
+
+def morph(rLoc, word, pattern, all=True):
+ return _morph(rLoc, word, pattern, all, False)
+
+def affix(rLoc, word, pattern, all=True):
+ return _morph(rLoc, word, pattern, all, True)
+
+def spell(rLoc, word):
+ if word == None:
+ return None
+ return spellchecker.isValid(word, rLoc, ())
+
+# get the tuple of the stem of the word or an empty array
+def stem(rLoc, word):
+ global stems
+ if word == None:
+ return []
+ if not word in stems:
+ x = spellchecker.spell(u"<?xml?><query type='stem'><word>" + word + "</word></query>", rLoc, ())
+ if not x:
+ return []
+ t = x.getAlternatives()
+ if not t:
+ t = []
+ stems[word] = list(t)
+ return stems[word]
+
+# get the tuple of the morphological generation of a word or an empty array
+def generate(rLoc, word, example):
+ if word == None:
+ return []
+ x = spellchecker.spell(u"<?xml?><query type='generate'><word>" + word + "</word><word>" + example + "</word></query>", rLoc, ())
+ if not x:
+ return []
+ t = x.getAlternatives()
+ if not t:
+ t = []
+ return list(t)
+
+# get suggestions
+def suggest(rLoc, word):
+ global suggestions
+ if word == None:
+ return word
+ if word not in suggestions:
+ x = spellchecker.spell("_" + word, rLoc, ())
+ if not x:
+ return word
+ t = x.getAlternatives()
+ suggestions[word] = join(t, "\n")
+ return suggestions[word]
+
+# get the nth word of the input string or None
+def word(s, n):
+ a = re.match("(?u)( [-.\w%]+){" + str(n-1) + "}( [-.\w%]+)", s)
+ if not a:
+ return None
+ return a.group(2)[1:]
+
+# get the (-)nth word of the input string or None
+def wordmin(s, n):
+ a = re.search("(?u)([-.\w%]+ )([-.\w%]+ ){" + str(n-1) + "}$", s)
+ if not a:
+ return None
+ return a.group(1)[:-1]
+
+def calc(funcname, par):
+ global calcfunc
+ global SMGR
+ if calcfunc == None:
+ calcfunc = SMGR.createInstance( "com.sun.star.sheet.FunctionAccess")
+ if calcfunc == None:
+ return None
+ return calcfunc.callFunction(funcname, par)
+
+def proofread( nDocId, TEXT, LOCALE, nStartOfSentencePos, nSuggestedSentenceEndPos, rProperties ):
+ global ignore
+ aErrs = []
+ s = TEXT[nStartOfSentencePos:nSuggestedSentenceEndPos]
+ for i in get_rule(LOCALE):
+ if i[0] and not str(i[0]) in ignore:
+ for m in i[0].finditer(s):
+ if not i[3] or eval(i[3]):
+ aErr = uno.createUnoStruct( "com.sun.star.linguistic2.SingleProofreadingError" )
+ aErr.nErrorStart = nStartOfSentencePos + m.start(0) # nStartOfSentencePos
+ aErr.nErrorLength = m.end(0) - m.start(0)
+ aErr.nErrorType = PROOFREADING
+ aErr.aRuleIdentifier = str(i[0])
+ iscap = (i[4] and m.group(0)[0:1].isupper())
+ if i[1][0:1] == "=":
+ aErr.aSuggestions = tuple(cap(eval(i[1][1:]).split("\n"), iscap, LOCALE))
+ else:
+ aErr.aSuggestions = tuple(cap(m.expand(i[1]).split("\n"), iscap, LOCALE))
+ comment = i[2]
+ if comment[0:1] == "=":
+ comment = eval(comment[1:])
+ aErr.aShortComment = comment.split("\n")[0]
+ aErr.aFullComment = comment.split("\n")[-1]
+ aErr.aProperties = ()
+ aErrs = aErrs + [aErr]
+ return tuple(aErrs)
+
+def cap(a, iscap, rLoc):
+ if iscap:
+ for i in range(0, len(a)):
+ if a[i][0:1] == "i":
+ if rLoc.Language == "tr" or rLoc.Language == "az":
+ a[i] = u"\u0130" + a[i][1:]
+ elif a[i][1:2] == "j" and rLoc.Language == "nl":
+ a[i] = "IJ" + a[i][2:]
+ else:
+ a[i] = "I" + a[i][1:]
+ else:
+ a[i] = a[i].capitalize()
+ return a
+
+def get_rule(rLocale):
+ module = rLocale.Language
+ if rLocale.Country != "":
+ module = module + "_" + rLocale.Country
+ try:
+ return langrule[module]
+ except:
+ try:
+ module = rLocale.Language
+ return langrule[module]
+ except:
+ try:
+ d = __import__("lightproof_" + pkg)
+ except:
+ print "Error: missing language data: " + module
+ return None
+ # compile regular expressions
+ for i in d.dic:
+ try:
+ if re.compile("[(][?]iu[)]").match(i[0]):
+ i += [True]
+ i[0] = re.sub("[(][?]iu[)]", "(?u)", i[0])
+ else:
+ i += [False]
+ i[0] = re.compile(i[0])
+ except:
+ print "Lightproof: bad rule -- ", i[0]
+ i[0] = None
+ langrule[module] = d.dic
+ return langrule[module]
+
+class Lightproof( unohelper.Base, XProofreader, XServiceInfo, XServiceName, XServiceDisplayName, XSupportedLocales):
+
+ def __init__( self, ctx, *args ):
+ global spellchecker
+ global SMGR
+ self.ctx = ctx
+ self.ServiceName = "com.sun.star.linguistic2.Proofreader"
+ self.ImplementationName = "org.openoffice.comp.pyuno.Lightproof." + pkg
+ self.SupportedServiceNames = (self.ServiceName, )
+ self.locales = []
+ for i in locales:
+ l = locales[i]
+ self.locales += [Locale(l[0], l[1], l[2])]
+ self.locales = tuple(self.locales)
+ currentContext = uno.getComponentContext()
+ SMGR = currentContext.ServiceManager
+ spellchecker = SMGR.createInstanceWithContext("com.sun.star.linguistic2.SpellChecker", currentContext)
+ lightproof_handler_en.load(currentContext)
+
+ # XServiceName method implementations
+ def getServiceName(self):
+ return self.ImplementationName
+
+ # XServiceInfo method implementations
+ def getImplementationName (self):
+ return self.ImplementationName
+
+ def supportsService(self, ServiceName):
+ return (ServiceName in self.SupportedServiceNames)
+
+ def getSupportedServiceNames (self):
+ return self.SupportedServiceNames
+
+ # XSupportedLocales
+ def hasLocale(self, aLocale):
+ if aLocale in self.locales:
+ return True
+ for i in self.locales:
+ if (i.Country == aLocale.Country or i.Country == "") and aLocale.Language == i.Language:
+ return True
+ return False
+
+ def getLocales(self):
+ return self.locales
+
+ # XProofreader
+ def isSpellChecker(self):
+ return False
+
+ def doProofreading(self, nDocId, rText, rLocale, nStartOfSentencePos, \
+ nSuggestedSentenceEndPos, rProperties):
+ aRes = uno.createUnoStruct( "com.sun.star.linguistic2.ProofreadingResult" )
+ aRes.aDocumentIdentifier = nDocId
+ aRes.aText = rText
+ aRes.aLocale = rLocale
+ aRes.nStartOfSentencePosition = nStartOfSentencePos
+ aRes.nStartOfNextSentencePosition = nSuggestedSentenceEndPos
+ l = rText[nSuggestedSentenceEndPos:nSuggestedSentenceEndPos+1]
+ while l == " ":
+ aRes.nStartOfNextSentencePosition = aRes.nStartOfNextSentencePosition + 1
+ l = rText[aRes.nStartOfNextSentencePosition:aRes.nStartOfNextSentencePosition+1]
+ if aRes.nStartOfNextSentencePosition == nSuggestedSentenceEndPos and l!="":
+ aRes.nStartOfNextSentencePosition = nSuggestedSentenceEndPos + 1
+ aRes.nBehindEndOfSentencePosition = aRes.nStartOfNextSentencePosition
+
+ try:
+ aRes.aErrors = proofread( nDocId, rText, rLocale, \
+ nStartOfSentencePos, aRes.nBehindEndOfSentencePosition, rProperties)
+ except:
+ traceback.print_exc(file=sys.stdout)
+ aRes.aErrors = ()
+ aRes.aProperties = ()
+ aRes.xProofreader = self
+ return aRes
+
+ def ignoreRule(self, rid, aLocale):
+ global ignore
+ ignore[rid] = 1
+
+ def resetIgnoreRules(self):
+ global ignore
+ ignore = {}
+
+ # XServiceDisplayName
+ def getServiceDisplayName(self, aLocale):
+ return "Lightproof Grammar Checker (" + pkg + ")"
+
+g_ImplementationHelper = unohelper.ImplementationHelper()
+g_ImplementationHelper.addImplementation( Lightproof, \
+ "org.openoffice.comp.pyuno.Lightproof." + pkg,
+ ("com.sun.star.linguistic2.Proofreader",),)
+
+g_ImplementationHelper.addImplementation( lightproof_handler_en.LightproofOptionsEventHandler, \
+ "org.openoffice.comp.pyuno.LightproofOptionsEventHandler." + pkg,
+ ("com.sun.star.awt.XContainerWindowEventHandler",),)
+# pattern matching for common English abbreviations
+abbrev = re.compile("(?i)\\b([a-z]|acct|approx|appt|apr|apt|assoc|asst|aug|ave|avg|co(nt|rp)?|ct|dec|defn|dept|dr|eg|equip|esp|est|etc|excl|ext|feb|fri|ft|govt?|hrs?|ib(id)?|ie|in(c|t)?|jan|jr|jul|lit|ln|mar|max|mi(n|sc)?|mon|Mrs?|mun|natl?|neg?|no(rm|s|v)?|nw|obj|oct|org|orig|pl|pos|prev|proj|psi|qty|rd|rec|rel|reqd?|resp|rev|sat|sci|se(p|pt)?|spec(if)?|sq|sr|st|subj|sun|sw|temp|thurs|tot|tues|univ|var|vs)\\.")
+
+
+punct = { "?": "question mark", "!": "exclamation mark",
+ ",": "comma", ":": "colon", ";": "semicolon",
+ "(": "opening parenthesis", ")": "closing parenthesis",
+ "[": "opening square bracket", "]": "closing square bracket",
+ u"\u201c": "opening quotation mark", u"\u201d": "closing quotation mark"}
+
+
+aA = set(["eucalypti", "eucalyptus", "Eucharist", "Eucharistic",
+"euchre", "euchred", "euchring", "Euclid", "euclidean", "Eudora",
+"eugene", "Eugenia", "eugenic", "eugenically", "eugenicist",
+"eugenicists", "eugenics", "Eugenio", "eukaryote", "Eula", "eulogies",
+"eulogist", "eulogists", "eulogistic", "eulogized", "eulogizer",
+"eulogizers", "eulogizing", "eulogy", "eulogies", "Eunice", "eunuch",
+"eunuchs", "Euphemia", "euphemism", "euphemisms", "euphemist",
+"euphemists", "euphemistic", "euphemistically", "euphonious",
+"euphoniously", "euphonium", "euphony", "euphoria", "euphoric",
+"Euphrates", "euphuism", "Eurasia", "Eurasian", "Eurasians", "eureka",
+"eurekas", "eurhythmic", "eurhythmy", "Euridyce", "Euripides", "euripus",
+"Euro", "Eurocentric", "Euroclydon", "Eurocommunism", "Eurocrat",
+"eurodollar", "Eurodollar", "Eurodollars", "Euromarket", "Europa",
+"Europe", "European", "Europeanisation", "Europeanise", "Europeanised",
+"Europeanization", "Europeanize", "Europeanized", "Europeans", "europium",
+"Eurovision", "Eustace", "Eustachian", "Eustacia", "euthanasia",
+"Ewart", "ewe", "Ewell", "ewer", "ewers", "Ewing", "once", "one",
+"oneness", "ones", "oneself", "onetime", "oneway", "oneyear", "u",
+"U", "UART", "ubiquitous", "ubiquity", "Udale", "Udall", "UEFA",
+"Uganda", "Ugandan", "ugric", "UK", "ukase", "Ukraine", "Ukrainian",
+"Ukrainians", "ukulele", "Ula", "ululated", "ululation", "Ulysses",
+"UN", "unanimity", "unanimous", "unanimously", "unary", "Unesco",
+"UNESCO", "UNHCR", "uni", "unicameral", "unicameralism", "Unicef",
+"UNICEF", "unicellular", "Unicode", "unicorn", "unicorns", "unicycle",
+"unicyclist", "unicyclists", "unidimensional", "unidirectional",
+"unidirectionality", "unifiable", "unification", "unified", "unifier",
+"unifilar", "uniform", "uniformally", "uniformed", "uniformer",
+"uniforming", "uniformisation", "uniformise", "uniformitarian",
+"uniformitarianism", "uniformity", "uniformly", "uniformness", "uniforms",
+"unify", "unifying", "unijugate", "unilateral", "unilateralisation",
+"unilateralise", "unilateralism", "unilateralist", "unilaterally",
+"unilinear", "unilingual", "uniliteral", "uniliteralism", "uniliteralist",
+"unimodal", "union", "unionism", "unionist", "unionists", "unionisation",
+"unionise", "unionised", "unionising", "unionization", "unionize",
+"unionized", "unionizing", "unions", "unipolar", "uniprocessor",
+"unique", "uniquely", "uniqueness", "uniquer", "Uniroyal", "unisex",
+"unison", "Unisys", "unit", "Unitarian", "Unitarianism", "Unitarians",
+"unitary", "unite", "united", "unitedly", "uniter", "unites", "uniting",
+"unitize", "unitizing", "unitless", "units", "unity", "univ", "Univac",
+"univalent", "univalve", "univariate", "universal", "universalisation",
+"universalise", "universalised", "universaliser", "universalisers",
+"universalising", "universalism", "universalist", "universalistic",
+"universality", "universalisation", "universalization", "universalize",
+"universalized", "universalizer", "universalizers", "universalizing",
+"universally", "universalness", "universe", "universes", "universities",
+"university", "univocal", "Unix", "uracil", "Urals", "uranium", "Uranus",
+"uranyl", "urate", "urea", "uremia", "uremic", "ureter", "urethane",
+"urethra", "urethral", "urethritis", "Urey", "Uri", "uric", "urinal",
+"urinalysis", "urinary", "urinated", "urinating", "urination", "urine",
+"urogenital", "urokinase", "urologist", "urologists", "urology",
+"Uruguay", "Uruguayan", "Uruguayans", "US", "USA", "usable", "usage",
+"usages", "use", "used", "useful", "usefulness", "usefully", "useless",
+"uselessly", "uselessness", "Usenet", "user", "users", "uses", "using",
+"usual", "usually", "usurer", "usurers", "usuress", "usurial", "usurious",
+"usurp", "usurpation", "usurped", "usurper", "usurping", "usurps",
+"usury", "Utah", "utensil", "utensils", "uterine", "uterus", "Utica",
+"utilitarian", "utilitarianism", "utilities", "utility", "utilizable",
+"utilization", "utilize", "utilized", "utilizes", "utilizing", "utopia",
+"utopian", "utopians", "utopias", "Utrecht", "Uttoxeter", "uvula",
+"uvular"])
+
+aAN = set(["f", "F", "FBI", "FDA", "heir", "heirdom", "heired",
+"heirer", "heiress", "heiring", "heirloom", "heirship", "honest",
+"honester", "honestly", "honesty", "honor", "honorable", "honorableness",
+"honorably", "honorarium", "honorary", "honored", "honorer", "honorific",
+"honoring", "honors", "honour", "honourable", "honourableness",
+"honourably", "honourarium", "honourary", "honoured", "honourer",
+"honourific", "honouring", "Honours", "hors", "hour", "hourglass", "hourlong",
+"hourly", "hours", "l", "L", "LCD", "m", "M", "MBA", "MP", "mpg", "mph",
+"MRI", "MSc", "MTV", "n", "N", "NBA", "NBC", "NFL", "NGO", "NHL", "r",
+"R", "s", "S", "SMS", "sos", "SOS", "SPF", "std", "STD", "SUV", "x",
+"X", "XML"])
+
+aB = set(["H", "hallucination", "haute", "hauteur", "herb", "herbaceous", "herbal",
+"herbalist", "herbalism", "heroic", "hilarious", "historian", "historic", "historical",
+"homage", "homophone", "horrendous", "hospitable", "horrific", "hotel", "hypothesis", "Xmas"])
+
+def measurement(mnum, min, mout, mstr, decimal, remove):
+ if min == "ft" or min == "in" or min == "mi":
+ mnum = mnum.replace(" 1/2", ".5").replace(u" \xbd", ".5").replace(u"\xbd",".5")
+ m = calc("CONVERT_ADD", (float(eval(mnum.replace(remove, "").replace(decimal, ".").replace(u"\u2212", "-"))), min, mout))
+ a = list(set([str(calc("ROUND", (m, 0)))[:-2], str(calc("ROUND", (m, 1))), str(calc("ROUND", (m, 2))), str(m)])) # remove duplicated rounded items
+ a.sort(lambda x, y: len(x) - len(y)) # sort by string length
+ return join(a, mstr + "\n").replace(".", decimal).replace("-", u"\u2212") + mstr
+
diff --git a/dictionaries/en/Linguistic.xcu b/dictionaries/en/Linguistic.xcu
new file mode 100644
index 0000000..78fdd21
--- /dev/null
+++ b/dictionaries/en/Linguistic.xcu
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<oor:component-data oor:name="Linguistic"
+ oor:package="org.openoffice.Office" xmlns:oor="http://openoffice.org/2001/registry"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+ <node oor:name="ServiceManager">
+
+ <node oor:name="GrammarCheckers">
+ <node oor:name="org.openoffice.comp.pyuno.Lightproof.en"
+ oor:op="fuse">
+ <prop oor:name="Locales" oor:type="oor:string-list">
+ <value>en-GB en-US en-PH en-ZA en-NA en-ZW en-AU en-CA en-IE en-IN en-BZ en-BS en-GH en-JM en-NZ en-TT</value>
+ </prop>
+ </node>
+ </node>
+
+ </node>
+
+</oor:component-data>
diff --git a/dictionaries/en/README_lightproof_en.txt b/dictionaries/en/README_lightproof_en.txt
new file mode 100644
index 0000000..0e6f33b
--- /dev/null
+++ b/dictionaries/en/README_lightproof_en.txt
@@ -0,0 +1,3 @@
+English sentence checker for LibreOffice
+see http://launchpad.net/lightproof and http://numbertext.org/lightproof
+2011 (c) László Németh, license: MPL 1.1 / GPLv3+ / LGPLv3+
diff --git a/dictionaries/en/description.xml b/dictionaries/en/description.xml
index 0ace9f5..f34df5b 100644
--- a/dictionaries/en/description.xml
+++ b/dictionaries/en/description.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<description xmlns="http://openoffice.org/extensions/description/2006" xmlns:d="http://openoffice.org/extensions/description/2006" xmlns:xlink="http://www.w3.org/1999/xlink">
- <version value="2010.03.16" />
+ <version value="2011.12.05" />
<identifier value="org.openoffice.en.hunspell.dictionaries" />
<display-name>
<name lang="en-US">English (Australia, Canada, South Africa, UK, USA) spelling dictionaries, hyphenation rules, and thesaurus</name>
diff --git a/dictionaries/en/dialog/OptionsDialog.xcs b/dictionaries/en/dialog/OptionsDialog.xcs
new file mode 100644
index 0000000..a5a9963
--- /dev/null
+++ b/dictionaries/en/dialog/OptionsDialog.xcs
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<oor:component-schema xmlns:oor="http://openoffice.org/2001/registry" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+oor:name="Lightproof_en" oor:package="org.openoffice" xml:lang="en-US">
+<info>
+<author></author>
+<desc>Contains the options data used for the test extensions.</desc>
+</info>
+<templates>
+
+ <group oor:name="en">
+ <info>
+ <desc>The data for one leaf.</desc>
+ </info>
+<prop oor:name="grammar" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="cap" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dup" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="pair" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="spaces" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="mdash" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="quotation" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="times" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="spaces2" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="ndash" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="apostrophe" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="ellipsis" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="spaces3" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="minus" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="metric" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="numsep" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="nonmetric" oor:type="xs:string">
+ <value></value>
+</prop>
+ </group>
+ </templates>
+ <component>
+ <group oor:name="Leaves">
+
+ <node-ref oor:name="en" oor:node-type="en"/>
+ </group>
+ </component>
+
+</oor:component-schema>
diff --git a/dictionaries/en/dialog/OptionsDialog.xcu b/dictionaries/en/dialog/OptionsDialog.xcu
new file mode 100644
index 0000000..2ccb6db
--- /dev/null
+++ b/dictionaries/en/dialog/OptionsDialog.xcu
@@ -0,0 +1,37 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!DOCTYPE oor:component-data SYSTEM "../../../../component-update.dtd">
+<oor:component-data oor:name="OptionsDialog" oor:package="org.openoffice.Office" xmlns:oor="http://openoffice.org/2001/registry" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <node oor:name="Nodes">
+ <node oor:name="org.openoffice.lightproof" oor:op="fuse">
+ <prop oor:name="Label">
+ <value xml:lang="en">Dictionaries</value>
+ <value xml:lang="hu">Szótárak</value>
+ </prop>
+ <node oor:name="Leaves">
+
+ <node oor:name="org.openoffice.lightproof.en" oor:op="fuse">
+
+ <prop oor:name="Id">
+ <value>org.openoffice.en.hunspell.dictionaries</value>
+ </prop>
+
+ <prop oor:name="Label">
+
+ <value xml:lang="en">English sentence checking</value>
+
+ </prop>
+
+ <prop oor:name="OptionsPage">
+ <value>%origin%/en.xdl</value>
+ </prop>
+
+ <prop oor:name="EventHandlerService">
+ <value>org.openoffice.comp.pyuno.LightproofOptionsEventHandler.en</value>
+ </prop>
+
+ </node>
+
+ </node>
+ </node>
+ </node>
+</oor:component-data>
diff --git a/dictionaries/en/dialog/en.xdl b/dictionaries/en/dialog/en.xdl
new file mode 100644
index 0000000..6e161bd
--- /dev/null
+++ b/dictionaries/en/dialog/en.xdl
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE dlg:window PUBLIC "-//OpenOffice.org//DTD OfficeDocument 1.0//EN" "dialog.dtd">
+<dlg:window xmlns:dlg="http://openoffice.org/2000/dialog" xmlns:script="http://openoffice.org/2000/script" dlg:id="en" dlg:left="101" dlg:top="52" dlg:width="196" dlg:height="72" dlg:closeable="true" dlg:moveable="true" dlg:withtitlebar="false">
+ <dlg:bulletinboard>
+<dlg:fixedline dlg:id="spelling" dlg:tab-index="0" dlg:left="5" dlg:top="5" dlg:width="240" dlg:height="10" dlg:value="&spelling"/>
+<dlg:checkbox dlg:id="grammar" dlg:tab-index="1" dlg:left="10" dlg:top="15" dlg:width="60" dlg:height="10" dlg:value="&grammar" dlg:checked="false" dlg:help-text="&hlp_grammar"/>
+<dlg:checkbox dlg:id="cap" dlg:tab-index="2" dlg:left="70" dlg:top="15" dlg:width="60" dlg:height="10" dlg:value="&cap" dlg:checked="false" dlg:help-text="&hlp_cap"/>
+<dlg:checkbox dlg:id="dup" dlg:tab-index="3" dlg:left="130" dlg:top="15" dlg:width="60" dlg:height="10" dlg:value="&dup" dlg:checked="false" dlg:help-text="&hlp_dup"/>
+<dlg:checkbox dlg:id="pair" dlg:tab-index="4" dlg:left="190" dlg:top="15" dlg:width="60" dlg:height="10" dlg:value="&pair" dlg:checked="false" dlg:help-text="&hlp_pair"/>
+<dlg:fixedline dlg:id="punctuation" dlg:tab-index="6" dlg:left="5" dlg:top="25" dlg:width="240" dlg:height="10" dlg:value="&punctuation"/>
+<dlg:checkbox dlg:id="spaces" dlg:tab-index="7" dlg:left="10" dlg:top="35" dlg:width="60" dlg:height="10" dlg:value="&spaces" dlg:checked="true" dlg:help-text="&hlp_spaces"/>
+<dlg:checkbox dlg:id="mdash" dlg:tab-index="8" dlg:left="70" dlg:top="35" dlg:width="60" dlg:height="10" dlg:value="&mdash" dlg:checked="false" dlg:help-text="&hlp_mdash"/>
+<dlg:checkbox dlg:id="quotation" dlg:tab-index="9" dlg:left="130" dlg:top="35" dlg:width="60" dlg:height="10" dlg:value="&quotation" dlg:checked="false" dlg:help-text="&hlp_quotation"/>
+<dlg:checkbox dlg:id="times" dlg:tab-index="10" dlg:left="190" dlg:top="35" dlg:width="60" dlg:height="10" dlg:value="&times" dlg:checked="true" dlg:help-text="&hlp_times"/>
+<dlg:checkbox dlg:id="spaces2" dlg:tab-index="12" dlg:left="10" dlg:top="45" dlg:width="60" dlg:height="10" dlg:value="&spaces2" dlg:checked="false" dlg:help-text="&hlp_spaces2"/>
+<dlg:checkbox dlg:id="ndash" dlg:tab-index="13" dlg:left="70" dlg:top="45" dlg:width="60" dlg:height="10" dlg:value="&ndash" dlg:checked="false" dlg:help-text="&hlp_ndash"/>
+<dlg:checkbox dlg:id="apostrophe" dlg:tab-index="14" dlg:left="130" dlg:top="45" dlg:width="60" dlg:height="10" dlg:value="&apostrophe" dlg:checked="false" dlg:help-text="&hlp_apostrophe"/>
+<dlg:checkbox dlg:id="ellipsis" dlg:tab-index="15" dlg:left="190" dlg:top="45" dlg:width="60" dlg:height="10" dlg:value="&ellipsis" dlg:checked="false" dlg:help-text="&hlp_ellipsis"/>
+<dlg:checkbox dlg:id="spaces3" dlg:tab-index="17" dlg:left="10" dlg:top="55" dlg:width="60" dlg:height="10" dlg:value="&spaces3" dlg:checked="false" dlg:help-text="&hlp_spaces3"/>
+<dlg:checkbox dlg:id="minus" dlg:tab-index="18" dlg:left="70" dlg:top="55" dlg:width="60" dlg:height="10" dlg:value="&minus" dlg:checked="false" dlg:help-text="&hlp_minus"/>
+<dlg:fixedline dlg:id="others" dlg:tab-index="20" dlg:left="5" dlg:top="65" dlg:width="240" dlg:height="10" dlg:value="&others"/>
+<dlg:checkbox dlg:id="metric" dlg:tab-index="21" dlg:left="10" dlg:top="75" dlg:width="120" dlg:height="10" dlg:value="&metric" dlg:checked="false" dlg:help-text="&hlp_metric"/>
+<dlg:checkbox dlg:id="numsep" dlg:tab-index="22" dlg:left="130" dlg:top="75" dlg:width="120" dlg:height="10" dlg:value="&numsep" dlg:checked="false" dlg:help-text="&hlp_numsep"/>
+<dlg:checkbox dlg:id="nonmetric" dlg:tab-index="24" dlg:left="10" dlg:top="85" dlg:width="240" dlg:height="10" dlg:value="&nonmetric" dlg:checked="false" dlg:help-text="&hlp_nonmetric"/>
+</dlg:bulletinboard>
+</dlg:window>
diff --git a/dictionaries/en/dialog/en_en_US.default b/dictionaries/en/dialog/en_en_US.default
new file mode 100644
index 0000000..e69de29
diff --git a/dictionaries/en/dialog/en_en_US.properties b/dictionaries/en/dialog/en_en_US.properties
new file mode 100644
index 0000000..6bc215e
--- /dev/null
+++ b/dictionaries/en/dialog/en_en_US.properties
@@ -0,0 +1,37 @@
+spelling=Grammar checking
+hlp_grammar= Check more grammar errors.
+grammar=Possible mistakes
+hlp_cap= Check missing capitalization of sentences.
+cap=Capitalization
+hlp_dup= Check repeated words.
+dup=Word duplication
+hlp_pair= Check missing or extra parentheses and quotation marks.
+pair=Parentheses
+punctuation=Punctuation
+hlp_spaces=Check single spaces between words.
+spaces=Word spacing
+hlp_mdash=Force unspaced em dash instead of spaced en dash.
+mdash=Em dash
+hlp_ndash=Force spaced en dash instead of unspaced em dash.
+ndash=En dash
+hlp_quotation=Check double quotation marks: "x" \u2192 \u201cx\u201d
+quotation=Quotation marks
+hlp_times=Check true multipliction sign: 5x5 \u2192 5\u00d75
+times=Multiplication sign
+hlp_spaces2=Check single spaces between sentences.
+spaces2=Sentence spacing
+hlp_spaces3=Check more than two extra space characters between words and sentences.
+spaces3=More spaces
+hlp_minus=Change hyphen characters to real minus signs.
+minus=Minus sign
+hlp_apostrophe=Change typewriter apostrophe, single quotation marks and correct double primes.
+apostrophe=Apostrophe
+hlp_ellipsis=Change three dots with ellipsis.
+ellipsis=Ellipsis
+others=Others
+hlp_metric=Measurement conversion from \u00b0F, mph, ft, in, lb, gal and miles.
+metric=Convert to metric (\u00b0C, km/h, m, kg, l)
+hlp_numsep=Common (1000000 \u2192 1,000,000) or ISO (1000000 \u2192 1 000 000).
+numsep=Thousand separation of large numbers
+hlp_nonmetric=Measurement conversion from \u00b0C; km/h; cm, m, km; kg; l.
+nonmetric=Convert to non-metric (\u00b0F, mph, ft, lb, gal)
diff --git a/dictionaries/en/makefile.mk b/dictionaries/en/makefile.mk
index e2c7091..fc51ec1 100644
--- a/dictionaries/en/makefile.mk
+++ b/dictionaries/en/makefile.mk
@@ -77,7 +77,19 @@ COMPONENT_FILES= \
$(EXTENSIONDIR)$/README_hyph_en_US.txt \
$(EXTENSIONDIR)$/README.txt \
$(EXTENSIONDIR)$/th_en_US_v2.dat \
- $(EXTENSIONDIR)$/WordNet_license.txt
+ $(EXTENSIONDIR)$/WordNet_license.txt \
+ $(EXTENSIONDIR)$/README_lightproof_en.txt \
+ $(EXTENSIONDIR)$/Lightproof.py \
+ $(EXTENSIONDIR)$/Linguistic.xcu \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_en.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_handler_en.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_impl_en.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_opts_en.py \
+ $(EXTENSIONDIR)$/dialog/en_en_US.default \
+ $(EXTENSIONDIR)$/dialog/en_en_US.properties \
+ $(EXTENSIONDIR)$/dialog/en.xdl \
+ $(EXTENSIONDIR)$/dialog/OptionsDialog.xcu \
+ $(EXTENSIONDIR)$/dialog/OptionsDialog.xcs
COMPONENT_CONFIGDEST=.
COMPONENT_XCU= \
diff --git a/dictionaries/en/manifest.xml b/dictionaries/en/manifest.xml
index f54199b..9a1b9c3 100644
--- a/dictionaries/en/manifest.xml
+++ b/dictionaries/en/manifest.xml
@@ -3,4 +3,13 @@
<manifest:manifest xmlns:manifest="http://openoffice.org/2001/manifest">
<manifest:file-entry manifest:media-type="application/vnd.sun.star.configuration-data"
manifest:full-path="dictionaries.xcu"/>
+ <manifest:file-entry manifest:full-path="dialog/OptionsDialog.xcs"
+ manifest:media-type="application/vnd.sun.star.configuration-schema" />
+ <manifest:file-entry manifest:full-path="dialog/OptionsDialog.xcu"
+ manifest:media-type="application/vnd.sun.star.configuration-data" />
+ <manifest:file-entry manifest:media-type="application/vnd.sun.star.uno-component;type=Python"
+ manifest:full-path="Lightproof.py"/>
+ <manifest:file-entry
+ manifest:media-type="application/vnd.sun.star.configuration-data"
+ manifest:full-path="Linguistic.xcu" />
</manifest:manifest>
diff --git a/dictionaries/en/pythonpath/lightproof_en.py b/dictionaries/en/pythonpath/lightproof_en.py
new file mode 100644
index 0000000..d0e48b5
--- /dev/null
+++ b/dictionaries/en/pythonpath/lightproof_en.py
@@ -0,0 +1 @@
+dic = [[u'(?u)(?<![-\\w\u2013.,\xad])and and(?![-\\w\u2013\xad])', u'and', u'Did you mean:', False], [u'(?u)(?<![-\\w\u2013.,\xad])or or(?![-\\w\u2013\xad])', u'or', u'Did you mean:', False], [u'(?u)(?<![-\\w\u2013.,\xad])for for(?![-\\w\u2013\xad])', u'for', u'Did you mean:', False], [u'(?u)(?<![-\\w\u2013.,\xad])the the(?![-\\w\u2013\xad])', u'the', u'Did you mean:', False], [u'(?iu)(?<![-\\w\u2013.,\xad])[Yy][Ii][Nn][Gg] [Aa][Nn][Dd] [Yy][Aa][Nn][Gg](?![-\\w\u2013\xad])', u'yin and yang', u'Did you mean:', False], [u'(?iu)(?<![-\\w\u2013.,\xad])[Ss][Cc][Oo][Tt] [Ff][Rr][Ee][Ee](?![-\\w\u2013\xad])', u'scot-free\\nscotfree', u'Did you mean:', False], [u"(?iu)(?<![-\\w\u2013.,\xad])([Yy][Oo][Uu][Rr]|[Hh][Ee][Rr]|[Oo][Uu][Rr]|[Tt][Hh][Ee][Ii][Rr])['\u2019][Ss](?![-\\w\u2013\xad])", u'\\1s', u'Possessive pronoun:', False], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u
'\\g<a_1>\\g<_>\\g<vow_1>\\g<etc_1>', u'Did you mean:', u'm.group("vow_1") in aA or m.group("vow_1").lower() in aA'], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<vow_1>\\g<etc_1>', u'Bad article?', u'(m.group("vow_1") <> m.group("vow_1").upper()) and not (m.group("vow_1") in aA or m.group("vow_1").lower() in aA) and spell(LOCALE,m.group("vow_1"))'], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<con_1>\\g<etc_1>', u'Did you mean:', u'm.group("con_1") in aAN or m.group("con_1").lower() in aAN'], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'\\g<a_1>\\g<_>\\g<con_1>\\g<etc_1>', u'Bad article?', u'(m.group("con_1")
<> m.group("con_1").upper()) and not (m.group("con_1") in aA or m.group("con_1").lower() in aAN) and not m.group("con_1") in aB and spell(LOCALE,m.group("con_1"))'], [u'(?u)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<vow_1>[aeiouAEIOU]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<vow_1>\\g<etc_1>', u'Bad article?', u'(m.group("vow_1") <> m.group("vow_1").upper()) and not (m.group("vow_1") in aA or m.group("vow_1").lower() in aA) and spell(LOCALE,m.group("vow_1"))'], [u'(?u)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<con_1>[bcdfghj-np-tv-zBCDFGHJ-NP-TV-Z]\\w*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<con_1>\\g<etc_1>', u'Did you mean:', u'm.group("con_1") in aAN or m.group("con_1").lower() in aAN'], [u'(?u)(?<![-\\w\u2013.,\xad])a(?P<_>[ ][\'\u2018"\u201c]?)(?P<nvow_1>(8[0-9]*|1[18](000)*)(th)?)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'an\\g<_>\\g<nvow_1>\\g<etc_1>', u'Did you mean:', False], [u'(?u
)((?<=[!?.] )|^)A(?P<_>[ ][\'\u2018"\u201c]?)(?P<nvow_1>(8[0-9]*|1[18](000)*)(th)?)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'An\\g<_>\\g<nvow_1>\\g<etc_1>', u'Did you mean:', False], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<a_1>[Aa])n(?P<_>[ ][\'\u2018"\u201c]?)(?P<ncon_1>[0-79][0-9]*)(?P<etc_1>[-\u2013\'\u2019\\w]*)(?![-\\w\u2013\xad])', u'\\g<a_1>\\g<_>\\g<ncon_1>\\g<etc_1>', u'Did you mean:', u'not m.group("ncon_1")[:2] in ["11", "18"]'], [u'(?u)((?<=[!?.] )|^)(?P<low_1>[a-z]+)(?![-\\w\u2013\xad])', u'= m.group("low_1").capitalize()', u'Missing capitalization?', u're.match("[a-z].*[.?!] [A-Z]", TEXT) and not abbrev.search(TEXT)'], [u'(?u)((?<=[!?.] )|^)(?P<low_1>[a-z]+)(?![-\\w\u2013\xad])', u'= m.group("low_1").capitalize()', u'Missing capitalization?', u'option(LOCALE,"cap") and not abbrev.search(TEXT)'], [u'(?u) ([.?!,:;)\u201d\\[\\]])\\b', u'\\1 ', u'Reversed space and punctuation?', False], [u'(?u) +[.]', u'.', u'Extra space before the period?', u'LOCALE.Coun
try == "US"'], [u'(?u) +[.]', u'.', u'Extra space before the full stop?', u'LOCALE.Country != "US"'], [u'(?u) +([?!,:;)\u201d\\]])', u'\\1', u'= "Extra space before the " + punct[m.group(1)] + "?"', False], [u'(?u)([([\u201c]) ', u'\\1', u'= "Extra space after the " + punct[m.group(1)] + "?"', False], [u'(?u)\\b(---?| --? )\\b', u' \u2013 \\n\u2014', u'En dash or em dash:', u'not option(LOCALE,"ndash") and not option(LOCALE,"mdash")'], [u'(?u)\\b(---?| --? |\u2014)\\b', u' \u2013 ', u'En dash:', u'option(LOCALE,"ndash") and not option(LOCALE,"mdash")'], [u'(?u)\\b(---?| --? | \u2013 )\\b', u'\u2014', u'Em dash:', u'option(LOCALE,"mdash")'], [u'(?u)(?P<number_1>\\d+([.]\\d+)?)(x| x )(?P<number_2>\\d+([.]\\d+)?)', u'\\g<number_1>\xd7\\g<number_2>', u'Multiplication sign.', u'option(LOCALE,"times")'], [u'(?u)(?P<Abc_1>[a-zA-Z]+)(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])(?P<Abc_2>[a-zA-Z]+)', u'\\g<Abc_1>\\g<pun_1> \\g<Abc_2>', u'Missing space?', False], [u'(?u)(?P<
abc_1>[a-z]+)[.](?P<ABC_1>[A-Z]+)', u'\\g<abc_1>. \\g<ABC_1>', u'Missing space?', False], [u'(?u)[)]', u'', u'Extra closing parenthesis?', u'option(LOCALE,"pair") and not "(" in TEXT'], [u'(?u)[(]', u'', u'Extra opening parenthesis?', u'option(LOCALE,"pair") and TEXT[-1] in u"?!;:\u201d\u2019" and not ")" in TEXT'], [u'(?u)(?<![0-9])\u201d', u'', u'Extra quotation mark?', u'option(LOCALE,"pair") and not u"\u201c" in TEXT'], [u'(?u)(?<=[0-9])\u201d', u'\u2033\\n', u'Bad double prime or extra quotation mark?', u'option(LOCALE,"apostrophe") and not u"\u201c" in TEXT'], [u'(?u)\u201c', u'', u'Extra quotation mark?', u'option(LOCALE,"pair") and TEXT[-1] in u"?!;:\u201d\u2019" and not u"\u201d" in TEXT'], [u'(?u)[.]{3}', u'\u2026', u'Ellipsis.', u'option(LOCALE,"ellipsis")'], [u'(?u)\\b {2,3}(\\b|$)', u'\\1 ', u'Extra space.', u'option(LOCALE,"spaces")'], [u'(?u)(^|\\b|(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])|[.]) {2,3}(\\b|$)', u'\\1 ', u'Extra space.', u'option(LO
CALE,"spaces2")'], [u'(?u)(^|\\b|(?P<pun_1>[?!,:;%\u2030\u2031\u02da\u201c\u201d\u2018])|[.]) {4,}(\\b|$)', u'\\1 \\n\t', u'Change multiple spaces to a single space or a tabulator:', u'option(LOCALE,"spaces3")'], [u'(?iu)[\\"\u201c\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"quotation")'], [u'(?iu)[\\"\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201d\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"quotation")'], [u"(?iu)'(?P<abc_1>[a-zA-Z]+)'", u'\u2018\\g<abc_1>\u2019', u'Quotation marks.', u'option(LOCALE,"apostrophe")'], [u'(?iu)[\\"\u201d\u201f\u201e]((?P<abc_1>[a-zA-Z]+)[^\\"\u201c\u201d\u201f\u201e]*)[\\"\u201c\u201d\u201f]', u'\u201c\\1\u201d', u'Quotation marks.', u'option(LOCALE,"apostrophe")'], [u"(?iu)(?P<Abc_1>[a-zA-ZA-Z]+)'(?P<w_1>\\w*)", u'\\g<Abc_1>\u2019\\g<w_1>', u'Replace typewriter apostrophe or
quotation mark:', u'option(LOCALE,"apostrophe")'], [u"(?u)(?<= )'(?P<Abc_1>[a-zA-Z]+)", u'\u2018\\g<Abc_1>\\n\u2019\\g<Abc_1>', u'Replace typewriter quotation mark or apostrophe:', u'option(LOCALE,"apostrophe")'], [u"(?u)^'(?P<Abc_1>[a-zA-Z]+)", u'\u2018\\g<Abc_1>\\n\u2019\\g<Abc_1>', u'Replace typewriter quotation mark or apostrophe:', u'option(LOCALE,"apostrophe")'], [u'(?u)\\b(?P<d2_1>\\d\\d)(?P<d_1>\\d\\d\\d)\\b', u'\\g<d2_1>,\\g<d_1>\\n\\g<d2_1>\u202f\\g<d_1>', u'Use thousand separator (common or ISO).', u'option(LOCALE,"numsep")'], [u'(?u)\\b(?P<D_1>\\d{1,3})(?P<d_1>\\d\\d\\d)(?P<d_2>\\d\\d\\d)\\b', u'\\g<D_1>,\\g<d_1>,\\g<d_2>\\n\\g<D_1>\u202f\\g<d_1>\u202f\\g<d_2>', u'Use thousand separators (common or ISO).', u'option(LOCALE,"numsep")'], [u'(?u)\\b(?P<D_1>\\d{1,3})(?P<d_1>\\d\\d\\d)(?P<d_2>\\d\\d\\d)(?P<d_3>\\d\\d\\d)\\b', u'\\g<D_1>,\\g<d_1>,\\g<d_2>,\\g<d_3>\\n\\g<D_1>\u202f\\g<d_1>\u202f\\g<d_2>\u202f\\g<d_3>', u'Use thousand separators (common or ISO).', u'optio
n(LOCALE,"numsep")'], [u'(?u)(?<![-\\w\u2013.,\xad])(?P<Abc_1>[a-zA-Z]+) \\1(?![-\\w\u2013\xad])', u'\\g<Abc_1>', u'Word duplication?', u'option(LOCALE,"dup")'], [u'(?u)(?<![-\\w\u2013.,\xad])([Tt])his (?P<abc_1>[a-z]+)(?![-\\w\u2013\xad])', u'\\1hese \\g<abc_1>\\n\\1his, \\g<abc_1>', u'Did you mean:', u'option(LOCALE,"grammar") and morph(LOCALE,m.group("abc_1"), "Ns")'], [u"(?u)(?<![-\\w\u2013.,\xad])with it['\u2019]s(?![-\\w\u2013\xad])", u'with its\\nwith, it\u2019s', u'Did you mean:', u'option(LOCALE,"grammar")'], [u"(?iu)(?<![-\\w\u2013.,\xad])([Ii][Tt]|[Ss]?[Hh][Ee]) [Dd][Oo][Nn]['\u2019][Tt](?![-\\w\u2013\xad])", u'\\1 doesn\u2019t', u'Did you mean:', u'option(LOCALE,"grammar")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (\xb0F|Fahrenheit)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "F", "C", u" \xb0C", ".", ",")', u'Convert to Celsius:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (\xb0C|Celsius)(?
![-\\w\u2013\xad])', u'= measurement(m.group(1), "C", "F", u" \xb0F", ".", ",")', u'Convert to Fahrenheit:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) (ft|foot|feet)(?! [1-9])(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "ft", "cm", " cm", ".", ",") + "\\n" + measurement(m.group(1), "ft", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) ft[.]? ([0-9]+(?: 1/2| ?\xbd)?) in(?![-\\w\u2013\xad])', u'= measurement(m.group(1) + "*12+" + m.group(2), "in", "cm", " cm", ".", ",") + "\\n" + measurement(m.group(1) + "*12+" + m.group(2), "in", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*(?: 1/2| ?\xbd)?) in(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "in", "mm", " mm", ".", ",") + "\\n" + measurement(m.group(1), "in", "cm",
" cm", ".", ",") + "\\n" + measurement(m.group(1), "in", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) mm(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mm", "in", " in", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) cm(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "cm", "in", " in", ".", ",") + "\\n" + measurement(m.group(1), "cm", "ft", " ft", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) (m|meter|metre)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "m", "in", " in", ".", ",") + "\\n" + measurement(m.group(1), "m", "ft", " ft", ".", ",") + "\\n" + measurement(m.group(1), "m", "mi", " mi", ".", ",")', u'Convert from metric:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d
+)*(?: 1/2| ?\xbd)?) miles?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mi", "m", " m", ".", ",") + "\\n" + measurement(m.group(1), "mi", "km", " km", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) km(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "km", "mi", " mi", ".", ",")', u'Convert to miles:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (yd|yards?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "yd", "m", " m", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (gal(lons?)?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "gal", "l", " l", ".", ",") + "\\n" + measurement(m.group(1), "uk_gal", "l", " l (in UK)", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (pint)(?![-\\w\u2013\xad])', u'= mea
surement(m.group(1), "pt", "dl", " dl", ".", ",") + "\\n" + measurement(m.group(1), "uk_pt", "dl", " dl (in UK)", ".", ",") + "\\n" + measurement(m.group(1), "pt", "l", " l", ".", ",") + "\\n" + measurement(m.group(1), "uk_pt", "l", " l (in UK)", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:,\\d+)?) (l|L|litres?|liters?)(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "l", "gal", " gal", ".", ",") + "\\n" + measurement(m.group(1), "l", "gal", " gal (in UK)", ".", ",")', u'Convert to gallons:', u'option(LOCALE,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) lbs?[.]?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "lbm", "kg", " kg", ".", ",")', u'Convert to metric:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) kg[.]?(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "kg", "lbm", " lb", ".", ",")', u'Convert to pounds:', u'option(LOCAL
E,"nonmetric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) mph(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "mph", "km/h", " km/h", ".", ",")', u'Convert to km/hour:', u'option(LOCALE,"metric")'], [u'(?u)(?<![-\\w\u2013.,\xad])([-\u2212]?\\d+(?:[,.]\\d+)*) km/h(?![-\\w\u2013\xad])', u'= measurement(m.group(1), "km/h", "mph", " mph", ".", ",")', u'Convert to miles/hour:', u'option(LOCALE,"nonmetric")']]
\ No newline at end of file
diff --git a/dictionaries/en/pythonpath/lightproof_handler_en.py b/dictionaries/en/pythonpath/lightproof_handler_en.py
new file mode 100644
index 0000000..e8c458a
--- /dev/null
+++ b/dictionaries/en/pythonpath/lightproof_handler_en.py
@@ -0,0 +1,121 @@
+import uno
+import unohelper
+
+from lightproof_opts_en import lopts
+from lightproof_opts_en import lopts_default
+from lightproof_impl_en import pkg
+
+from com.sun.star.lang import XServiceInfo
+from com.sun.star.awt import XContainerWindowEventHandler
+
+# options
+options = {}
+
+def load(context):
+ try:
+ l = LightproofOptionsEventHandler(context)
+ for i in lopts:
+ l.load(i)
+ except:
+ pass
+
+def get_option(page, option):
+ try:
+ return options[page + "," + option]
+ except:
+ try:
+ return options[page[:2] + "," + option]
+ except:
+ return 0
+
+def set_option(page, option, value):
+ options[page + "," + option] = int(value)
+
+class LightproofOptionsEventHandler( unohelper.Base, XServiceInfo, XContainerWindowEventHandler ):
+ def __init__( self, ctx ):
+ p = uno.createUnoStruct( "com.sun.star.beans.PropertyValue" )
+ p.Name = "nodepath"
+ p.Value = "/org.openoffice.Lightproof_%s/Leaves"%pkg
+ self.xConfig = ctx.ServiceManager.createInstance( 'com.sun.star.configuration.ConfigurationProvider' )
+ self.node = self.xConfig.createInstanceWithArguments( 'com.sun.star.configuration.ConfigurationUpdateAccess', (p, ) )
+ self.service = "org.openoffice.comp.pyuno.LightproofOptionsEventHandler." + pkg
+ self.ImplementationName = self.service
+ self.services = (self.service, )
+
+ # XContainerWindowEventHandler
+ def callHandlerMethod(self, aWindow, aEventObject, sMethod):
+ if sMethod == "external_event":
+ return self.handleExternalEvent(aWindow, aEventObject)
+
+ def getSupportedMethodNames(self):
+ return ("external_event", )
+
+ def handleExternalEvent(self, aWindow, aEventObject):
+ sMethod = aEventObject
+ if sMethod == "ok":
+ self.saveData(aWindow)
+ elif sMethod == "back" or sMethod == "initialize":
+ self.loadData(aWindow)
+ return True
+
+ def load(self, sWindowName):
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ sValue = child.getPropertyValue(i)
+ if sValue == '':
+ if i in lopts_default[sWindowName]:
+ sValue = 1
+ else:
+ sValue = 0
+ set_option(sWindowName, i, sValue)
+
+ def loadData(self, aWindow):
+ sWindowName = self.getWindowName(aWindow)
+ if (sWindowName == None):
+ return
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ sValue = child.getPropertyValue(i)
+ if sValue == '':
+ if i in lopts_default[sWindowName]:
+ sValue = 1
+ else:
+ sValue = 0
+ xControl = aWindow.getControl(i)
+ xControl.State = sValue
+ set_option(sWindowName, i, sValue)
+
+ def saveData(self, aWindow):
+ sWindowName = self.getWindowName(aWindow)
+ if (sWindowName == None):
+ return
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ xControl = aWindow.getControl(i)
+ sValue = xControl.State
+ child.setPropertyValue(i, str(sValue))
+ set_option(sWindowName, i, sValue)
+ self.commitChanges()
+
+ def getWindowName(self, aWindow):
+ sName = aWindow.getModel().Name
+ if sName in lopts:
+ return sName
+ return None
+
+ # XServiceInfo method implementations
+ def getImplementationName (self):
+ return self.ImplementationName
+
+ def supportsService(self, ServiceName):
+ return (ServiceName in self.services)
+
+ def getSupportedServiceNames (self):
+ return self.services
+
+ def getChild(self, name):
+ return self.node.getByName(name)
+
+ def commitChanges(self):
+ self.node.commitChanges()
+ return True
diff --git a/dictionaries/en/pythonpath/lightproof_impl_en.py b/dictionaries/en/pythonpath/lightproof_impl_en.py
new file mode 100644
index 0000000..b09902b
--- /dev/null
+++ b/dictionaries/en/pythonpath/lightproof_impl_en.py
@@ -0,0 +1,6 @@
+# -*- encoding: UTF-8 -*-
+pkg = "en"
+lang = "en"
+locales = {'en-GB': ['en', 'GB', ''], 'en-ZW': ['en', 'ZW', ''], 'en-PH': ['en', 'PH', ''], 'en-TT': ['en', 'TT', ''], 'en-BZ': ['en', 'BZ', ''], 'en-NA': ['en', 'NA', ''], 'en-IE': ['en', 'IE', ''], 'en-GH': ['en', 'GH', ''], 'en-US': ['en', 'US', ''], 'en-IN': ['en', 'IN', ''], 'en-BS': ['en', 'BS', ''], 'en-JM': ['en', 'JM', ''], 'en-AU': ['en', 'AU', ''], 'en-NZ': ['en', 'NZ', ''], 'en-ZA': ['en', 'ZA', ''], 'en-CA': ['en', 'CA', '']}
+version = "0.2"
+author = ""
diff --git a/dictionaries/en/pythonpath/lightproof_opts_en.py b/dictionaries/en/pythonpath/lightproof_opts_en.py
new file mode 100644
index 0000000..aa43156
--- /dev/null
+++ b/dictionaries/en/pythonpath/lightproof_opts_en.py
@@ -0,0 +1,4 @@
+lopts = {}
+lopts_default = {}
+lopts['en'] = [u'grammar', u'cap', u'dup', u'pair', u'spaces', u'mdash', u'quotation', u'times', u'spaces2', u'ndash', u'apostrophe', u'ellipsis', u'spaces3', u'minus', u'metric', u'numsep', u'nonmetric']
+lopts_default['en'] = [u'spaces', u'times']
commit 2214a3731aaffc4a51d1c712dbee553a0ba35af0
Author: László Németh <nemeth at numbertext.org>
Date: Mon Dec 5 19:11:32 2011 +0100
Hungarian sentence checking
diff --git a/dictionaries/hu_HU/Lightproof.py b/dictionaries/hu_HU/Lightproof.py
new file mode 100644
index 0000000..24eac51
--- /dev/null
+++ b/dictionaries/hu_HU/Lightproof.py
@@ -0,0 +1,323 @@
+# -*- encoding: UTF-8 -*-
+# Lightproof grammar checker for LibreOffice and OpenOffice.org
+# http://launchpad.net/lightproof
+# version 1.4.3 (2011-12-05)
+#
+# 2009-2011 (c) László Németh (nemeth at numbertext org), license: MPL 1.1 / GPLv3+ / LGPLv3+
+
+import uno, unohelper, sys, traceback, re
+from lightproof_impl_hu_HU import locales
+from lightproof_impl_hu_HU import pkg
+import lightproof_handler_hu_HU
+from string import join
+
+from com.sun.star.linguistic2 import XProofreader, XSupportedLocales
+from com.sun.star.linguistic2 import ProofreadingResult, SingleProofreadingError
+from com.sun.star.lang import XServiceInfo, XServiceName, XServiceDisplayName
+from com.sun.star.lang import Locale
+from com.sun.star.text.TextMarkupType import PROOFREADING
+
+# loaded rules
+langrule = {}
+# ignored rules
+ignore = {}
+
+# cache for morphogical analyses
+analyses = {}
+stems = {}
+suggestions = {}
+
+# assign Calc functions
+calcfunc = None
+
+# check settings
+def option(lang, opt):
+ return lightproof_handler_hu_HU.get_option(lang.Language + "_" + lang.Country, opt)
+
+# filtering affix fields (ds, is, ts etc.)
+def onlymorph(st):
+ if st != None:
+ st = re.sub(r"^.*(st:|po:)", r"\1", st) # keep last word part
+ st = re.sub(r"\b(?=[dit][sp]:)","@", st) # and its affixes
+ st = re.sub(r"(?<!@)\b\w\w:\w+","", st).replace('@','').strip()
+ return st
+
+# if the pattern matches all analyses of the input word,
+# return the last matched substring
+def _morph(rLoc, word, pattern, all, onlyaffix):
+ global analyses
+ if word == None:
+ return None
+ if word not in analyses:
+ x = spellchecker.spell(u"<?xml?><query type='analyze'><word>" + word + "</word></query>", rLoc, ())
+ if not x:
+ return None
+ t = x.getAlternatives()
+ if not t:
+ t = [""]
+ analyses[word] = t[0]
+ a = analyses[word].split("</a>")[:-1]
+ result = None
+ p = re.compile(pattern)
+ for i in a:
+ if onlyaffix:
+ i = onlymorph(i)
+ result = p.search(i)
+ if result:
+ result = result.group(0)
+ if not all:
+ return result
+ elif all:
+ return None
+ return result
+
+def morph(rLoc, word, pattern, all=True):
+ return _morph(rLoc, word, pattern, all, False)
+
+def affix(rLoc, word, pattern, all=True):
+ return _morph(rLoc, word, pattern, all, True)
+
+def spell(rLoc, word):
+ if word == None:
+ return None
+ return spellchecker.isValid(word, rLoc, ())
+
+# get the tuple of the stem of the word or an empty array
+def stem(rLoc, word):
+ global stems
+ if word == None:
+ return []
+ if not word in stems:
+ x = spellchecker.spell(u"<?xml?><query type='stem'><word>" + word + "</word></query>", rLoc, ())
+ if not x:
+ return []
+ t = x.getAlternatives()
+ if not t:
+ t = []
+ stems[word] = list(t)
+ return stems[word]
+
+# get the tuple of the morphological generation of a word or an empty array
+def generate(rLoc, word, example):
+ if word == None:
+ return []
+ x = spellchecker.spell(u"<?xml?><query type='generate'><word>" + word + "</word><word>" + example + "</word></query>", rLoc, ())
+ if not x:
+ return []
+ t = x.getAlternatives()
+ if not t:
+ t = []
+ return list(t)
+
+# get suggestions
+def suggest(rLoc, word):
+ global suggestions
+ if word == None:
+ return word
+ if word not in suggestions:
+ x = spellchecker.spell("_" + word, rLoc, ())
+ if not x:
+ return word
+ t = x.getAlternatives()
+ suggestions[word] = join(t, "\n")
+ return suggestions[word]
+
+# get the nth word of the input string or None
+def word(s, n):
+ a = re.match("(?u)( [-.\w%]+){" + str(n-1) + "}( [-.\w%]+)", s)
+ if not a:
+ return None
+ return a.group(2)[1:]
+
+# get the (-)nth word of the input string or None
+def wordmin(s, n):
+ a = re.search("(?u)([-.\w%]+ )([-.\w%]+ ){" + str(n-1) + "}$", s)
+ if not a:
+ return None
+ return a.group(1)[:-1]
+
+def calc(funcname, par):
+ global calcfunc
+ global SMGR
+ if calcfunc == None:
+ calcfunc = SMGR.createInstance( "com.sun.star.sheet.FunctionAccess")
+ if calcfunc == None:
+ return None
+ return calcfunc.callFunction(funcname, par)
+
+def proofread( nDocId, TEXT, LOCALE, nStartOfSentencePos, nSuggestedSentenceEndPos, rProperties ):
+ global ignore
+ aErrs = []
+ s = TEXT[nStartOfSentencePos:nSuggestedSentenceEndPos]
+ for i in get_rule(LOCALE):
+ if i[0] and not str(i[0]) in ignore:
+ for m in i[0].finditer(s):
+ if not i[3] or eval(i[3]):
+ aErr = uno.createUnoStruct( "com.sun.star.linguistic2.SingleProofreadingError" )
+ aErr.nErrorStart = nStartOfSentencePos + m.start(0) # nStartOfSentencePos
+ aErr.nErrorLength = m.end(0) - m.start(0)
+ aErr.nErrorType = PROOFREADING
+ aErr.aRuleIdentifier = str(i[0])
+ iscap = (i[4] and m.group(0)[0:1].isupper())
+ if i[1][0:1] == "=":
+ aErr.aSuggestions = tuple(cap(eval(i[1][1:]).split("\n"), iscap, LOCALE))
+ else:
+ aErr.aSuggestions = tuple(cap(m.expand(i[1]).split("\n"), iscap, LOCALE))
+ comment = i[2]
+ if comment[0:1] == "=":
+ comment = eval(comment[1:])
+ aErr.aShortComment = comment.split("\n")[0]
+ aErr.aFullComment = comment.split("\n")[-1]
+ aErr.aProperties = ()
+ aErrs = aErrs + [aErr]
+ return tuple(aErrs)
+
+def cap(a, iscap, rLoc):
+ if iscap:
+ for i in range(0, len(a)):
+ if a[i][0:1] == "i":
+ if rLoc.Language == "tr" or rLoc.Language == "az":
+ a[i] = u"\u0130" + a[i][1:]
+ elif a[i][1:2] == "j" and rLoc.Language == "nl":
+ a[i] = "IJ" + a[i][2:]
+ else:
+ a[i] = "I" + a[i][1:]
+ else:
+ a[i] = a[i].capitalize()
+ return a
+
+def get_rule(rLocale):
+ module = rLocale.Language
+ if rLocale.Country != "":
+ module = module + "_" + rLocale.Country
+ try:
+ return langrule[module]
+ except:
+ try:
+ module = rLocale.Language
+ return langrule[module]
+ except:
+ try:
+ d = __import__("lightproof_" + pkg)
+ except:
+ print "Error: missing language data: " + module
+ return None
+ # compile regular expressions
+ for i in d.dic:
+ try:
+ if re.compile("[(][?]iu[)]").match(i[0]):
+ i += [True]
+ i[0] = re.sub("[(][?]iu[)]", "(?u)", i[0])
+ else:
+ i += [False]
+ i[0] = re.compile(i[0])
+ except:
+ print "Lightproof: bad rule -- ", i[0]
+ i[0] = None
+ langrule[module] = d.dic
+ return langrule[module]
+
+class Lightproof( unohelper.Base, XProofreader, XServiceInfo, XServiceName, XServiceDisplayName, XSupportedLocales):
+
+ def __init__( self, ctx, *args ):
+ global spellchecker
+ global SMGR
+ self.ctx = ctx
+ self.ServiceName = "com.sun.star.linguistic2.Proofreader"
+ self.ImplementationName = "org.openoffice.comp.pyuno.Lightproof." + pkg
+ self.SupportedServiceNames = (self.ServiceName, )
+ self.locales = []
+ for i in locales:
+ l = locales[i]
+ self.locales += [Locale(l[0], l[1], l[2])]
+ self.locales = tuple(self.locales)
+ currentContext = uno.getComponentContext()
+ SMGR = currentContext.ServiceManager
+ spellchecker = SMGR.createInstanceWithContext("com.sun.star.linguistic2.SpellChecker", currentContext)
+ lightproof_handler_hu_HU.load(currentContext)
+
+ # XServiceName method implementations
+ def getServiceName(self):
+ return self.ImplementationName
+
+ # XServiceInfo method implementations
+ def getImplementationName (self):
+ return self.ImplementationName
+
+ def supportsService(self, ServiceName):
+ return (ServiceName in self.SupportedServiceNames)
+
+ def getSupportedServiceNames (self):
+ return self.SupportedServiceNames
+
+ # XSupportedLocales
+ def hasLocale(self, aLocale):
+ if aLocale in self.locales:
+ return True
+ for i in self.locales:
+ if (i.Country == aLocale.Country or i.Country == "") and aLocale.Language == i.Language:
+ return True
+ return False
+
+ def getLocales(self):
+ return self.locales
+
+ # XProofreader
+ def isSpellChecker(self):
+ return False
+
+ def doProofreading(self, nDocId, rText, rLocale, nStartOfSentencePos, \
+ nSuggestedSentenceEndPos, rProperties):
+ aRes = uno.createUnoStruct( "com.sun.star.linguistic2.ProofreadingResult" )
+ aRes.aDocumentIdentifier = nDocId
+ aRes.aText = rText
+ aRes.aLocale = rLocale
+ aRes.nStartOfSentencePosition = nStartOfSentencePos
+ aRes.nStartOfNextSentencePosition = nSuggestedSentenceEndPos
+ l = rText[nSuggestedSentenceEndPos:nSuggestedSentenceEndPos+1]
+ while l == " ":
+ aRes.nStartOfNextSentencePosition = aRes.nStartOfNextSentencePosition + 1
+ l = rText[aRes.nStartOfNextSentencePosition:aRes.nStartOfNextSentencePosition+1]
+ if aRes.nStartOfNextSentencePosition == nSuggestedSentenceEndPos and l!="":
+ aRes.nStartOfNextSentencePosition = nSuggestedSentenceEndPos + 1
+ aRes.nBehindEndOfSentencePosition = aRes.nStartOfNextSentencePosition
+
+ try:
+ aRes.aErrors = proofread( nDocId, rText, rLocale, \
+ nStartOfSentencePos, aRes.nBehindEndOfSentencePosition, rProperties)
+ except:
+ traceback.print_exc(file=sys.stdout)
+ aRes.aErrors = ()
+ aRes.aProperties = ()
+ aRes.xProofreader = self
+ return aRes
+
+ def ignoreRule(self, rid, aLocale):
+ global ignore
+ ignore[rid] = 1
+
+ def resetIgnoreRules(self):
+ global ignore
+ ignore = {}
+
+ # XServiceDisplayName
+ def getServiceDisplayName(self, aLocale):
+ return "Lightproof Grammar Checker (" + pkg + ")"
+
+g_ImplementationHelper = unohelper.ImplementationHelper()
+g_ImplementationHelper.addImplementation( Lightproof, \
+ "org.openoffice.comp.pyuno.Lightproof." + pkg,
+ ("com.sun.star.linguistic2.Proofreader",),)
+
+g_ImplementationHelper.addImplementation( lightproof_handler_hu_HU.LightproofOptionsEventHandler, \
+ "org.openoffice.comp.pyuno.LightproofOptionsEventHandler." + pkg,
+ ("com.sun.star.awt.XContainerWindowEventHandler",),)
+
+abbrev=re.compile(ur"(?i)\\b([a-z\xf6\xfc\xf3\u0151\xfa\xe9\xe1\u0171\xed\xd6\xdc\xd3\u0150\xda\xc9\xc1\u0170\xcd]|\xc1e|\xc1ht|AkH|al|\xe1lt|\xe1pr|aug|Avtv|bek|Bp|br|bt|Btk|cca|ci(i|ii|v|x)?|cl(i|ii|iii|iv|ix|v|vi|vii|viii|x|xi|xii|xiii|xiv|xix|xv|xvi|xvii|xviii|xx|xxi|xxii|xxiii|xxiv|xxix|xxv|xxvi|xxvii|xxviii|xxx|xxxi|xxxii|xxxiii|xxxiv|xxxix|xxxv|xxxvi|xxxvii|xxxviii)?|Co|cv(i|ii|iii)?|cx(c|ci|cii|ciii|civ|cix|cv|cvi|cvii|cviii|i|ii|iii|iv|ix|l|li|lii|liii|liv|lix|lv|lvi|lvii|lviii|v|vi|vii|viii|x|xi|xii|xiii|xiv|xix|xv|xvi|xvii|xviii|xx|xxi|xxii|xxiii|xxiv|xxix|xxv|xxvi|xxvii|xxviii)?|cs|Csjt|Cstv|cs\xfct|dec|dk|dny|dr|du|dz(s)?|egy|\xe9k|\xc9Ksz|em|\xe9ny|\xc9pt|\xe9rk|etc|Etv|e\xfc|ev|\xe9vf|febr|felv|Flt|ford|f\u0151isk|fsz(la|t)?|Ftv|gimn|g\xf6r|gr|Gt|gy|Gyvt|habil|hg|hiv|Hjt|honv|Hpt|hrsz|hsz|Hszt|htb|id|ifj|ig(h)?|ii(i)?|ill|Inc|ind|isk|iv|ix|izr|jan|jegyz|j\xfal|j\xfan|kat|kb|Kbt|ker|kft|kgy|kht|kir|kiv|Kjt|kk(t)?|koll|korm|k\xf6v|kp|Kr|krt|Kt(v)?|ld|li(i|ii|v|x
)?|Ltd|ltp|Ltv|luth|lv(i|ii|iii)?|lx(i|ii|iii|iv|ix|v|vi|vii|viii|x|xi|xii|xiii|xiv|xix|xv|xvi|xvii|xviii|xx|xxi|xxii|xxiii|xxiv|xxix|xxv|xxvi|xxvii|xxviii)?|ly|m\xe1j|m\xe1rc|mat|max|mb|megh|megj|MHSz|min|mk|Mo|Mt|NB|nov|ny(\xe1)?|Nyilv|nyrt|okl|okt|olv|op|orsz|ort|ov(h)?|\xf6ssz|\xd6tv|\xf6zv|Pf|pl(d)?|prof|prot|Ptk|pu|ref|rk(p)?|r\xf3m|r\xf6v|rt|sgt|spec|stb|sz(ept|erk)?|Szjt|szoc|Szt(v)?|sz\xfcl|Tbj|tc|tel|tkp|tszf|tvr|ty|ua|ui|\xfam|\xfan|uo|Ve|Vhr|vi(i|ii)?|v\xf6|vsz|Vt(v)?|xc(i|ii|iii|iv|ix|v|vi|vii|viii)?|xi(i|ii|v|x)?|xl(i|ii|iii|iv|ix|v|vi|vii|viii)?|xv(i|ii|iii)?|xx(i|ii|iii|iv|ix|v|vi|vii|viii|x|xi|xii|xiii|xiv|xix|xv|xvi|xvii|xviii)?|zrt)\\.")
+
+
+def measurement(mnum, min, mout, mstr):
+ m = calc("CONVERT_ADD", (float(mnum.replace(",", ".").replace(u"\u2212", "-")), min, mout))
+ a = list(set([str(calc("ROUND", (m, 0)))[:-2], str(calc("ROUND", (m, 1))), str(calc("ROUND", (m, 2))), str(m)])) # remove duplicated rounded items
+ a.sort(lambda x, y: len(x) - len(y)) # sort by string length
+ return join(a, mstr + "\n").replace(".", ",").replace("-", u"\u2212") + mstr
diff --git a/dictionaries/hu_HU/Linguistic.xcu b/dictionaries/hu_HU/Linguistic.xcu
new file mode 100644
index 0000000..cb327ec
--- /dev/null
+++ b/dictionaries/hu_HU/Linguistic.xcu
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<oor:component-data oor:name="Linguistic"
+ oor:package="org.openoffice.Office" xmlns:oor="http://openoffice.org/2001/registry"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+ <node oor:name="ServiceManager">
+
+ <node oor:name="GrammarCheckers">
+ <node oor:name="org.openoffice.comp.pyuno.Lightproof.hu_HU"
+ oor:op="fuse">
+ <prop oor:name="Locales" oor:type="oor:string-list">
+ <value>hu-HU</value>
+ </prop>
+ </node>
+ </node>
+
+ </node>
+
+</oor:component-data>
diff --git a/dictionaries/hu_HU/README_lightproof_hu_HU.txt b/dictionaries/hu_HU/README_lightproof_hu_HU.txt
new file mode 100644
index 0000000..e10ee4d
--- /dev/null
+++ b/dictionaries/hu_HU/README_lightproof_hu_HU.txt
@@ -0,0 +1,8 @@
+Hungarian grammar checker extension for OpenOffice.org
+
+(developed by the Lightproof grammar checker extension generator,
+see http://launchpad.net/lightproof)
+
+Hungarian grammar checker rules
+
+2009-2011 (c) László Németh, license: MPL 1.1 / GPLv3+ / LGPLv3+
diff --git a/dictionaries/hu_HU/description.xml b/dictionaries/hu_HU/description.xml
index f4e14cc..9b81bf9 100644
--- a/dictionaries/hu_HU/description.xml
+++ b/dictionaries/hu_HU/description.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<description xmlns="http://openoffice.org/extensions/description/2006" xmlns:d="http://openoffice.org/extensions/description/2006" xmlns:xlink="http://www.w3.org/1999/xlink">
- <version value="2010.10.19" />
+ <version value="2011.12.05" />
<identifier value="org.openoffice.hu.hunspell.dictionaries" />
<display-name>
<name lang="en-US">Hungarian spelling dictionary, hyphenation rules, and thesaurus</name>
diff --git a/dictionaries/hu_HU/dialog/OptionsDialog.xcs b/dictionaries/hu_HU/dialog/OptionsDialog.xcs
new file mode 100644
index 0000000..247f312
--- /dev/null
+++ b/dictionaries/hu_HU/dialog/OptionsDialog.xcs
@@ -0,0 +1,107 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<oor:component-schema xmlns:oor="http://openoffice.org/2001/registry" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+oor:name="Lightproof_hu_HU" oor:package="org.openoffice" xml:lang="en-US">
+<info>
+<author></author>
+<desc>Contains the options data used for the test extensions.</desc>
+</info>
+<templates>
+
+ <group oor:name="hu_HU">
+ <info>
+ <desc>The data for one leaf.</desc>
+ </info>
+<prop oor:name="cap" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="par" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="quot" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="wordpart" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dash" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="comma" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="numpart" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="grammar" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="style" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dup0" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="compound" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dup" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="allcompound" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dup2" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="money" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="dup3" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="SI" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="hyphen" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="apost" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="spaces" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="frac" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="ligature" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="elli" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="spaces2" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="thin" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="noligature" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="idx" oor:type="xs:string">
+ <value></value>
+</prop>
+<prop oor:name="minus" oor:type="xs:string">
+ <value></value>
+</prop>
+ </group>
+ </templates>
+ <component>
+ <group oor:name="Leaves">
+
+ <node-ref oor:name="hu_HU" oor:node-type="hu_HU"/>
+ </group>
+ </component>
+
+</oor:component-schema>
diff --git a/dictionaries/hu_HU/dialog/OptionsDialog.xcu b/dictionaries/hu_HU/dialog/OptionsDialog.xcu
new file mode 100644
index 0000000..989c254
--- /dev/null
+++ b/dictionaries/hu_HU/dialog/OptionsDialog.xcu
@@ -0,0 +1,39 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!DOCTYPE oor:component-data SYSTEM "../../../../component-update.dtd">
+<oor:component-data oor:name="OptionsDialog" oor:package="org.openoffice.Office" xmlns:oor="http://openoffice.org/2001/registry" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <node oor:name="Nodes">
+ <node oor:name="org.openoffice.lightproof" oor:op="fuse">
+ <prop oor:name="Label">
+ <value xml:lang="en">Dictionaries</value>
+ <value xml:lang="hu">Szótárak</value>
+ </prop>
+ <node oor:name="Leaves">
+
+ <node oor:name="org.openoffice.lightproof.hu_HU" oor:op="fuse">
+
+ <prop oor:name="Id">
+ <value>org.openoffice.hu.hunspell.dictionaries</value>
+ </prop>
+
+ <prop oor:name="Label">
+
+ <value xml:lang="en">Hungarian sentence checking</value>
+
+ <value xml:lang="hu">Magyar mondatellenÅrzés</value>
+
+ </prop>
+
+ <prop oor:name="OptionsPage">
+ <value>%origin%/hu_HU.xdl</value>
+ </prop>
+
+ <prop oor:name="EventHandlerService">
+ <value>org.openoffice.comp.pyuno.LightproofOptionsEventHandler.hu_HU</value>
+ </prop>
+
+ </node>
+
+ </node>
+ </node>
+ </node>
+</oor:component-data>
diff --git a/dictionaries/hu_HU/dialog/hu_HU.xdl b/dictionaries/hu_HU/dialog/hu_HU.xdl
new file mode 100644
index 0000000..c62bd3a
--- /dev/null
+++ b/dictionaries/hu_HU/dialog/hu_HU.xdl
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE dlg:window PUBLIC "-//OpenOffice.org//DTD OfficeDocument 1.0//EN" "dialog.dtd">
+<dlg:window xmlns:dlg="http://openoffice.org/2000/dialog" xmlns:script="http://openoffice.org/2000/script" dlg:id="hu_HU" dlg:left="101" dlg:top="52" dlg:width="196" dlg:height="72" dlg:closeable="true" dlg:moveable="true" dlg:withtitlebar="false">
+ <dlg:bulletinboard>
+<dlg:fixedline dlg:id="spelling" dlg:tab-index="0" dlg:left="5" dlg:top="5" dlg:width="240" dlg:height="10" dlg:value="&spelling"/>
+<dlg:checkbox dlg:id="cap" dlg:tab-index="1" dlg:left="10" dlg:top="15" dlg:width="120" dlg:height="10" dlg:value="&cap" dlg:checked="false" dlg:help-text="&hlp_cap"/>
+<dlg:checkbox dlg:id="par" dlg:tab-index="2" dlg:left="130" dlg:top="15" dlg:width="120" dlg:height="10" dlg:value="&par" dlg:checked="false" dlg:help-text="&hlp_par"/>
+<dlg:checkbox dlg:id="quot" dlg:tab-index="4" dlg:left="10" dlg:top="25" dlg:width="120" dlg:height="10" dlg:value="&quot" dlg:checked="false" dlg:help-text="&hlp_quot"/>
+<dlg:checkbox dlg:id="wordpart" dlg:tab-index="5" dlg:left="130" dlg:top="25" dlg:width="120" dlg:height="10" dlg:value="&wordpart" dlg:checked="false" />
+<dlg:checkbox dlg:id="dash" dlg:tab-index="7" dlg:left="10" dlg:top="35" dlg:width="120" dlg:height="10" dlg:value="&dash" dlg:checked="true" dlg:help-text="&hlp_dash"/>
+<dlg:checkbox dlg:id="comma" dlg:tab-index="8" dlg:left="130" dlg:top="35" dlg:width="120" dlg:height="10" dlg:value="&comma" dlg:checked="false" dlg:help-text="&hlp_comma"/>
+<dlg:checkbox dlg:id="numpart" dlg:tab-index="10" dlg:left="10" dlg:top="45" dlg:width="120" dlg:height="10" dlg:value="&numpart" dlg:checked="false" dlg:help-text="&hlp_numpart"/>
+<dlg:checkbox dlg:id="grammar" dlg:tab-index="11" dlg:left="130" dlg:top="45" dlg:width="120" dlg:height="10" dlg:value="&grammar" dlg:checked="false" />
+<dlg:fixedline dlg:id="proofreading" dlg:tab-index="13" dlg:left="5" dlg:top="55" dlg:width="240" dlg:height="10" dlg:value="&proofreading"/>
+<dlg:checkbox dlg:id="style" dlg:tab-index="14" dlg:left="10" dlg:top="65" dlg:width="120" dlg:height="10" dlg:value="&style" dlg:checked="false" />
+<dlg:checkbox dlg:id="dup0" dlg:tab-index="15" dlg:left="130" dlg:top="65" dlg:width="120" dlg:height="10" dlg:value="&dup0" dlg:checked="true" dlg:help-text="&hlp_dup0"/>
+<dlg:checkbox dlg:id="compound" dlg:tab-index="17" dlg:left="10" dlg:top="75" dlg:width="120" dlg:height="10" dlg:value="&compound" dlg:checked="false" dlg:help-text="&hlp_compound"/>
+<dlg:checkbox dlg:id="dup" dlg:tab-index="18" dlg:left="130" dlg:top="75" dlg:width="120" dlg:height="10" dlg:value="&dup" dlg:checked="false" />
+<dlg:checkbox dlg:id="allcompound" dlg:tab-index="20" dlg:left="10" dlg:top="85" dlg:width="120" dlg:height="10" dlg:value="&allcompound" dlg:checked="false" dlg:help-text="&hlp_allcompound"/>
+<dlg:checkbox dlg:id="dup2" dlg:tab-index="21" dlg:left="130" dlg:top="85" dlg:width="120" dlg:height="10" dlg:value="&dup2" dlg:checked="false" />
+<dlg:checkbox dlg:id="money" dlg:tab-index="23" dlg:left="10" dlg:top="95" dlg:width="120" dlg:height="10" dlg:value="&money" dlg:checked="true" dlg:help-text="&hlp_money"/>
+<dlg:checkbox dlg:id="dup3" dlg:tab-index="24" dlg:left="130" dlg:top="95" dlg:width="120" dlg:height="10" dlg:value="&dup3" dlg:checked="false" />
+<dlg:checkbox dlg:id="SI" dlg:tab-index="26" dlg:left="10" dlg:top="105" dlg:width="120" dlg:height="10" dlg:value="&SI" dlg:checked="false" dlg:help-text="&hlp_SI"/>
+<dlg:checkbox dlg:id="hyphen" dlg:tab-index="27" dlg:left="130" dlg:top="105" dlg:width="120" dlg:height="10" dlg:value="&hyphen" dlg:checked="false" dlg:help-text="&hlp_hyphen"/>
+<dlg:fixedline dlg:id="typography" dlg:tab-index="29" dlg:left="5" dlg:top="115" dlg:width="240" dlg:height="10" dlg:value="&typography"/>
+<dlg:checkbox dlg:id="apost" dlg:tab-index="30" dlg:left="10" dlg:top="125" dlg:width="60" dlg:height="10" dlg:value="&apost" dlg:checked="true" dlg:help-text="&hlp_apost"/>
+<dlg:checkbox dlg:id="spaces" dlg:tab-index="31" dlg:left="70" dlg:top="125" dlg:width="60" dlg:height="10" dlg:value="&spaces" dlg:checked="true" dlg:help-text="&hlp_spaces"/>
+<dlg:checkbox dlg:id="frac" dlg:tab-index="32" dlg:left="130" dlg:top="125" dlg:width="60" dlg:height="10" dlg:value="&frac" dlg:checked="false" dlg:help-text="&hlp_frac"/>
+<dlg:checkbox dlg:id="ligature" dlg:tab-index="33" dlg:left="190" dlg:top="125" dlg:width="60" dlg:height="10" dlg:value="&ligature" dlg:checked="false" dlg:help-text="&hlp_ligature"/>
+<dlg:checkbox dlg:id="elli" dlg:tab-index="35" dlg:left="10" dlg:top="135" dlg:width="60" dlg:height="10" dlg:value="&elli" dlg:checked="false" dlg:help-text="&hlp_elli"/>
+<dlg:checkbox dlg:id="spaces2" dlg:tab-index="36" dlg:left="70" dlg:top="135" dlg:width="60" dlg:height="10" dlg:value="&spaces2" dlg:checked="false" dlg:help-text="&hlp_spaces2"/>
+<dlg:checkbox dlg:id="thin" dlg:tab-index="37" dlg:left="130" dlg:top="135" dlg:width="60" dlg:height="10" dlg:value="&thin" dlg:checked="false" dlg:help-text="&hlp_thin"/>
+<dlg:checkbox dlg:id="noligature" dlg:tab-index="38" dlg:left="190" dlg:top="135" dlg:width="60" dlg:height="10" dlg:value="&noligature" dlg:checked="false" dlg:help-text="&hlp_noligature"/>
+<dlg:checkbox dlg:id="idx" dlg:tab-index="40" dlg:left="10" dlg:top="145" dlg:width="60" dlg:height="10" dlg:value="&idx" dlg:checked="false" dlg:help-text="&hlp_idx"/>
+<dlg:checkbox dlg:id="minus" dlg:tab-index="41" dlg:left="70" dlg:top="145" dlg:width="60" dlg:height="10" dlg:value="&minus" dlg:checked="false" dlg:help-text="&hlp_minus"/>
+</dlg:bulletinboard>
+</dlg:window>
diff --git a/dictionaries/hu_HU/dialog/hu_HU_en_US.default b/dictionaries/hu_HU/dialog/hu_HU_en_US.default
new file mode 100644
index 0000000..e69de29
diff --git a/dictionaries/hu_HU/dialog/hu_HU_en_US.properties b/dictionaries/hu_HU/dialog/hu_HU_en_US.properties
new file mode 100644
index 0000000..22ba5ee
--- /dev/null
+++ b/dictionaries/hu_HU/dialog/hu_HU_en_US.properties
@@ -0,0 +1,32 @@
+spelling=Spelling
+cap=Capitalization
+par=Parentheses
+wordpart=Word parts of compounds
+comma=Comma usage
+proofreading=Proofreading
+style=Style checking
+compound=Underline typo-like compound words
+allcompound=Underline all generated compound words
+grammar=Possible mistakes
+money=Consistency of money amounts
+duplication=Word duplication
+dup0=Word duplication
+dup=Duplication within clauses
+dup2=Duplication within sentences
+dup3=Allow previous checkings with affixes
+numpart=Thousand separation of numbers
+typography=Typography
+quot=Quotation marks
+apost=Apostrophe
+dash=En dash
+elli=Ellipsis
+ligature=Ligature suggestion
+noligature=Underline ligatures
+frac=Fractions
+thin=Thin space
+spaces=Double spaces
+spaces2=More spaces
+idx=Indices
+minus=Minus
+SI=Measurements
+hyphen=Hyphenation of ambiguous words
diff --git a/dictionaries/hu_HU/dialog/hu_HU_hu_HU.properties b/dictionaries/hu_HU/dialog/hu_HU_hu_HU.properties
new file mode 100644
index 0000000..ddf6da1
--- /dev/null
+++ b/dictionaries/hu_HU/dialog/hu_HU_hu_HU.properties
@@ -0,0 +1,53 @@
+spelling=Helyes\u00edr\u00e1s
+hlp_cap=Mondatok nagy kezd\u0151bet\u0171j\u00e9nek ellen\u0151rz\u00e9se.
+cap=Nagy kezd\u0151bet\u0171
+hlp_par=Z\u00e1r\u00f3jelp\u00e1rok ellen\u0151rz\u00e9se.
+par=Z\u00e1r\u00f3jelek
+wordpart=Egybe- \u00e9s k\u00fcl\u00f6n\u00edr\u00e1si javaslatok
+hlp_comma=A val\u00f3sz\u00edn\u0171leg hi\u00e1nyz\u00f3 \u00e9s felesleges vessz\u0151k jelz\u00e9se.
+comma=Vessz\u0151haszn\u00e1latra vonatkoz\u00f3 javaslatok
+hlp_numpart=Ezrestagol\u00e1s nem-t\u00f6r\u0151 sz\u00f3k\u00f6z\u00f6kkel (10000 \u2192 10\u202f000).
+numpart=Nagy sz\u00e1mok tagol\u00e1sa sz\u00f3k\u00f6z\u00f6kkel
+proofreading=Korrekt\u00fara
+style=St\u00edlusellen\u0151rz\u00e9s
+hlp_compound=R\u00f6vid, vagy r\u00f6vid tagot tartalmaz\u00f3, sz\u00f3\u00f6sszet\u00e9teli szab\u00e1lyok alapj\u00e1n gyakran hib\u00e1san elfogadott alakok al\u00e1h\u00faz\u00e1sa.
+compound=Egyszer\u0171 nem sz\u00f3t\u00e1ri \u00f6sszetett szavak
+hlp_allcompound=Minden sz\u00f3\u00f6sszet\u00e9teli szab\u00e1ly alapj\u00e1n elfogadott alak al\u00e1h\u00faz\u00e1sa.
+allcompound=Minden nem sz\u00f3t\u00e1ri \u00f6sszetett sz\u00f3
+grammar=Javaslat kev\u00e9sb\u00e9 egy\u00e9rtelm\u0171 esetben is
+hlp_dup0=Sz\u00f3ism\u00e9tl\u00e9s egym\u00e1st k\u00f6zvetlen\u00fcl k\u00f6vet\u0151 szavak eset\u00e9ben.
+dup0=Sz\u00f3ism\u00e9tl\u00e9s
+dup=Sz\u00f3ism\u00e9tl\u00e9s tagmondaton bel\u00fcl
+dup2=Sz\u00f3ism\u00e9tl\u00e9s mondaton bel\u00fcl
+dup3=Sz\u00f3ism\u00e9tl\u00e9s elt\u00e9r\u0151 toldal\u00e9kok eset\u00e9n is
+hlp_money=A sz\u00e1mmal \u00e9s sz\u00e1mn\u00e9vvel is le\u00edrt mennyis\u00e9g megegyezik-e? (Pl. 10, azaz t\u00edz Ft.)
+money=Sz\u00e1mok \u00e9s \u00e1t\u00edr\u00e1suk
+typography=Tipogr\u00e1fia
+hlp_quot=A magyar \u201ek\u00fcls\u0151\u201d \u00e9s \u00bbbels\u0151\u00ab id\u00e9z\u0151jelek ellen\u0151rz\u00e9se.
+quot=Id\u00e9z\u0151jelek
+hlp_apost=Az \u00edr\u00f3g\u00e9pes aposztr\u00f3f cser\u00e9je a tipogr\u00e1fiailag megfelel\u0151re (' \u2192 \u2019).
+apost=Aposztr\u00f3f
+hlp_dash=A kisk\u00f6t\u0151jel cser\u00e9je a nagyk\u00f6t\u0151jelre (- \u2192 \u2013).
+dash=Nagyk\u00f6t\u0151jel \u00e9s gondolatjel
+hlp_elli=H\u00e1rom pont cser\u00e9je az egaliz\u00e1lt h\u00e1rom pont karakterre (...\u2192\u2026).
+elli=H\u00e1rom pont
+hlp_frac=Perjellel elv\u00e1lasztott t\u00f6rtek cser\u00e9je Unicode karakterre (1/2 \u2192 \u00bd).
+frac=T\u00f6rtek
+hlp_thin=Keskeny sz\u00f3k\u00f6z (sp\u00e1cium) haszn\u00e1lata ezrestagol\u00e1shoz \u00e9s m\u00e1s esetekben.
+thin=Keskeny sz\u00f3k\u00f6z
+hlp_ligature=Csere Unicode f-ligat\u00far\u00e1ra.
+ligature=f-ligat\u00fara javaslata
+hlp_noligature=Unicode f-ligat\u00fara cser\u00e9je k\u00fcl\u00f6n\u00e1ll\u00f3 bet\u0171kre.
+noligature=f-ligat\u00fara tilt\u00e1sa
+hlp_spaces=K\u00e9t vagy h\u00e1rom ism\u00e9tl\u0151d\u0151 sz\u00f3k\u00f6z cser\u00e9je egyre.
+spaces=Dupla sz\u00f3k\u00f6z
+hlp_spaces2=N\u00e9gy vagy t\u00f6bb ism\u00e9tl\u0151d\u0151 sz\u00f3k\u00f6z cser\u00e9je egy tabul\u00e1torra.
+spaces2=Sok sz\u00f3k\u00f6z
+hlp_idx=Sz\u00e1mok cser\u00e9je m\u00e9rt\u00e9kegys\u00e9gekben \u00e9s k\u00e9miai k\u00e9pletekben val\u00f3di indexekre (m2 \u2192 m\u00b2).
+idx=Indexek
+hlp_minus=K\u00f6t\u0151jelek cser\u00e9je Unicode m\u00ednuszjelre a sz\u00e1mok el\u0151tt.
+minus=M\u00ednuszjel
+hlp_SI=Nem SI m\u00e9rt\u00e9kegys\u00e9gek \u00e1talak\u00edt\u00e1sa (\u00b0F, m\u00e9rf\u00f6ld, yard, l\u00e1b, h\u00fcvelyk, gallon, pint, font s\u00faly\u00fa).
+SI=M\u00e9rt\u00e9kegys\u00e9gek
+hlp_hyphen=Nem egy\u00e9rtelm\u0171en elv\u00e1laszthat\u00f3 sz\u00f3alakok elv\u00e1laszt\u00e1s\u00e1nak megad\u00e1sa (pl. megint, f\u00f6l\u00fcl).
+hyphen=Hi\u00e1nyz\u00f3 elv\u00e1laszt\u00e1s megad\u00e1sa
diff --git a/dictionaries/hu_HU/makefile.mk b/dictionaries/hu_HU/makefile.mk
index 879fc85..67916e4 100644
--- a/dictionaries/hu_HU/makefile.mk
+++ b/dictionaries/hu_HU/makefile.mk
@@ -61,7 +61,20 @@ COMPONENT_FILES= \
$(EXTENSIONDIR)$/th_hu_HU_v2.dat \
$(EXTENSIONDIR)$/help/hu/org.openoffice.hu.hunspell.dictionaries/page1.xhp \
$(EXTENSIONDIR)$/help/hu/org.openoffice.hu.hunspell.dictionaries/fsfhu.png \
- $(EXTENSIONDIR)$/help/hu/org.openoffice.hu.hunspell.dictionaries/szinonima.png
+ $(EXTENSIONDIR)$/help/hu/org.openoffice.hu.hunspell.dictionaries/szinonima.png \
+ $(EXTENSIONDIR)$/Lightproof.py \
+ $(EXTENSIONDIR)$/Linguistic.xcu \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_hu_HU.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_handler_hu_HU.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_impl_hu_HU.py \
+ $(EXTENSIONDIR)$/pythonpath/lightproof_opts_hu_HU.py \
+ $(EXTENSIONDIR)$/dialog/hu_HU_en_US.default \
+ $(EXTENSIONDIR)$/dialog/hu_HU_en_US.properties \
+ $(EXTENSIONDIR)$/dialog/hu_HU_hu_HU.properties \
+ $(EXTENSIONDIR)$/dialog/hu_HU.xdl \
+ $(EXTENSIONDIR)$/dialog/OptionsDialog.xcu \
+ $(EXTENSIONDIR)$/dialog/OptionsDialog.xcs \
+ $(EXTENSIONDIR)$/README_lightproof_hu_HU.txt
COMPONENT_CONFIGDEST=.
COMPONENT_XCU= \
diff --git a/dictionaries/hu_HU/manifest.xml b/dictionaries/hu_HU/manifest.xml
index 5699d5b..a9ff14b 100644
--- a/dictionaries/hu_HU/manifest.xml
+++ b/dictionaries/hu_HU/manifest.xml
@@ -3,4 +3,13 @@
<manifest:manifest xmlns:manifest="http://openoffice.org/2001/manifest">
<manifest:file-entry manifest:media-type="application/vnd.sun.star.configuration-data" manifest:full-path="dictionaries.xcu"/>
<manifest:file-entry manifest:media-type="application/vnd.sun.star.help" manifest:full-path="help"/>
+ <manifest:file-entry manifest:full-path="dialog/OptionsDialog.xcs"
+ manifest:media-type="application/vnd.sun.star.configuration-schema" />
+ <manifest:file-entry manifest:full-path="dialog/OptionsDialog.xcu"
+ manifest:media-type="application/vnd.sun.star.configuration-data" />
+ <manifest:file-entry manifest:media-type="application/vnd.sun.star.uno-component;type=Python"
+ manifest:full-path="Lightproof.py"/>
+ <manifest:file-entry
+ manifest:media-type="application/vnd.sun.star.configuration-data"
+ manifest:full-path="Linguistic.xcu" />
</manifest:manifest>
diff --git a/dictionaries/hu_HU/pythonpath/lightproof_handler_hu_HU.py b/dictionaries/hu_HU/pythonpath/lightproof_handler_hu_HU.py
new file mode 100644
index 0000000..1716afb
--- /dev/null
+++ b/dictionaries/hu_HU/pythonpath/lightproof_handler_hu_HU.py
@@ -0,0 +1,121 @@
+import uno
+import unohelper
+
+from lightproof_opts_hu_HU import lopts
+from lightproof_opts_hu_HU import lopts_default
+from lightproof_impl_hu_HU import pkg
+
+from com.sun.star.lang import XServiceInfo
+from com.sun.star.awt import XContainerWindowEventHandler
+
+# options
+options = {}
+
+def load(context):
+ try:
+ l = LightproofOptionsEventHandler(context)
+ for i in lopts:
+ l.load(i)
+ except:
+ pass
+
+def get_option(page, option):
+ try:
+ return options[page + "," + option]
+ except:
+ try:
+ return options[page[:2] + "," + option]
+ except:
+ return 0
+
+def set_option(page, option, value):
+ options[page + "," + option] = int(value)
+
+class LightproofOptionsEventHandler( unohelper.Base, XServiceInfo, XContainerWindowEventHandler ):
+ def __init__( self, ctx ):
+ p = uno.createUnoStruct( "com.sun.star.beans.PropertyValue" )
+ p.Name = "nodepath"
+ p.Value = "/org.openoffice.Lightproof_%s/Leaves"%pkg
+ self.xConfig = ctx.ServiceManager.createInstance( 'com.sun.star.configuration.ConfigurationProvider' )
+ self.node = self.xConfig.createInstanceWithArguments( 'com.sun.star.configuration.ConfigurationUpdateAccess', (p, ) )
+ self.service = "org.openoffice.comp.pyuno.LightproofOptionsEventHandler." + pkg
+ self.ImplementationName = self.service
+ self.services = (self.service, )
+
+ # XContainerWindowEventHandler
+ def callHandlerMethod(self, aWindow, aEventObject, sMethod):
+ if sMethod == "external_event":
+ return self.handleExternalEvent(aWindow, aEventObject)
+
+ def getSupportedMethodNames(self):
+ return ("external_event", )
+
+ def handleExternalEvent(self, aWindow, aEventObject):
+ sMethod = aEventObject
+ if sMethod == "ok":
+ self.saveData(aWindow)
+ elif sMethod == "back" or sMethod == "initialize":
+ self.loadData(aWindow)
+ return True
+
+ def load(self, sWindowName):
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ sValue = child.getPropertyValue(i)
+ if sValue == '':
+ if i in lopts_default[sWindowName]:
+ sValue = 1
+ else:
+ sValue = 0
+ set_option(sWindowName, i, sValue)
+
+ def loadData(self, aWindow):
+ sWindowName = self.getWindowName(aWindow)
+ if (sWindowName == None):
+ return
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ sValue = child.getPropertyValue(i)
+ if sValue == '':
+ if i in lopts_default[sWindowName]:
+ sValue = 1
+ else:
+ sValue = 0
+ xControl = aWindow.getControl(i)
+ xControl.State = sValue
+ set_option(sWindowName, i, sValue)
+
+ def saveData(self, aWindow):
+ sWindowName = self.getWindowName(aWindow)
+ if (sWindowName == None):
+ return
+ child = self.getChild(sWindowName)
+ for i in lopts[sWindowName]:
+ xControl = aWindow.getControl(i)
+ sValue = xControl.State
+ child.setPropertyValue(i, str(sValue))
+ set_option(sWindowName, i, sValue)
+ self.commitChanges()
+
+ def getWindowName(self, aWindow):
+ sName = aWindow.getModel().Name
+ if sName in lopts:
+ return sName
+ return None
+
+ # XServiceInfo method implementations
+ def getImplementationName (self):
+ return self.ImplementationName
+
+ def supportsService(self, ServiceName):
+ return (ServiceName in self.services)
+
+ def getSupportedServiceNames (self):
+ return self.services
+
+ def getChild(self, name):
+ return self.node.getByName(name)
+
+ def commitChanges(self):
+ self.node.commitChanges()
+ return True
diff --git a/dictionaries/hu_HU/pythonpath/lightproof_hu_HU.py b/dictionaries/hu_HU/pythonpath/lightproof_hu_HU.py
new file mode 100644
index 0000000..c857241
--- /dev/null
+++ b/dictionaries/hu_HU/pythonpath/lightproof_hu_HU.py
@@ -0,0 +1 @@
... etc. - the rest is truncated
More information about the Libreoffice-commits
mailing list