Compare commits
45 Commits
release/20
...
version/20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72aae21420 | ||
|
|
b8f206f1d4 | ||
|
|
69e2ca5ce6 | ||
|
|
1020303fec | ||
|
|
68dee3f3a4 | ||
|
|
64f3f534a2 | ||
|
|
4588c2f1e8 | ||
|
|
5eb7dd3ef7 | ||
|
|
c548c55274 | ||
|
|
fcc7af9991 | ||
|
|
e79bb4f8be | ||
|
|
e8d25cd78f | ||
|
|
55408ffda6 | ||
|
|
803e6ef418 | ||
|
|
0b831773af | ||
|
|
bc4bd61de6 | ||
|
|
38426b8a64 | ||
|
|
e9ebb418f3 | ||
|
|
f2cbb733a0 | ||
|
|
195f123c05 | ||
|
|
3408da4f40 | ||
|
|
ab34ede1ea | ||
|
|
dddf9dcb83 | ||
|
|
f6cbce7324 | ||
|
|
6034143788 | ||
|
|
7ad60c4471 | ||
|
|
4f5fd4bea6 | ||
|
|
2ab43e6c0a | ||
|
|
663eaaa65b | ||
|
|
e950c89c2f | ||
|
|
bcb4eb6064 | ||
|
|
c1a00bb944 | ||
|
|
480f8bbb68 | ||
|
|
f1e8e8b4a2 | ||
|
|
4f4a2c9cdd | ||
|
|
dd510b0286 | ||
|
|
018f0907c2 | ||
|
|
f2a27e592d | ||
|
|
89d526e740 | ||
|
|
68e2df9db6 | ||
|
|
87c11e7e2f | ||
|
|
7142621966 | ||
|
|
54d6196698 | ||
|
|
6b970f8e02 | ||
|
|
fb28f40b72 |
12
.gitmodules
vendored
12
.gitmodules
vendored
@@ -1,24 +1,24 @@
|
||||
[submodule "simgear"]
|
||||
path = simgear
|
||||
url = https://git.code.sf.net/p/flightgear/simgear
|
||||
url = ../simgear
|
||||
branch = next
|
||||
[submodule "flightgear"]
|
||||
path = flightgear
|
||||
url = https://git.code.sf.net/p/flightgear/flightgear
|
||||
url = ../flightgear
|
||||
branch = next
|
||||
[submodule "fgrun"]
|
||||
path = fgrun
|
||||
url = https://git.code.sf.net/p/flightgear/fgrun
|
||||
url = ../fgrun
|
||||
branch = next
|
||||
[submodule "fgdata"]
|
||||
path = fgdata
|
||||
url = git://git.code.sf.net/p/flightgear/fgdata
|
||||
url = ../fgdata
|
||||
branch = next
|
||||
[submodule "windows-3rd-party"]
|
||||
path = windows-3rd-party
|
||||
url = https://git.code.sf.net/p/flightgear/windows-3rd-party
|
||||
url = ../windows-3rd-party
|
||||
branch = master
|
||||
[submodule "getstart"]
|
||||
path = getstart
|
||||
url = https://git.code.sf.net/p/flightgear/getstart
|
||||
url = ../getstart
|
||||
branch = next
|
||||
|
||||
@@ -20,13 +20,6 @@
|
||||
|
||||
#include "InstallConfig.iss"
|
||||
|
||||
#if GetEnv("VSINSTALLDIR") == ""
|
||||
#define VSInstallDir "C:\Program Files (x86)\Microsoft Visual Studio 14.0"
|
||||
#else
|
||||
#define VSInstallDir GetEnv("VSINSTALLDIR")
|
||||
#endif
|
||||
|
||||
#define VCInstallDir VSInstallDir + "\VC"
|
||||
#define InstallDir32 "X:\install\msvc140"
|
||||
#define OSGInstallDir InstallDir32 + "\OpenSceneGraph"
|
||||
#define OSGPluginsDir OSGInstallDir + "\bin\osgPlugins-" + OSGVersion
|
||||
@@ -69,7 +62,7 @@ ArchitecturesAllowed=x86 x64
|
||||
|
||||
; Sign tool must be defined in the Inno Setup GUI, to avoid
|
||||
; exposing the certificate password
|
||||
; SignTool=fg_code_sign1
|
||||
; SignTool=fg_code_sign1
|
||||
|
||||
[Tasks]
|
||||
; NOTE: The following entry contains English phrases ("Create a desktop icon" and "Additional icons"). You are free to translate them into another language if required.
|
||||
@@ -92,7 +85,6 @@ Source: "{#ThirdPartyDir}\3rdParty\bin\libintl-8.dll"; DestDir: "{app}\bin"; Che
|
||||
Source: "{#ThirdPartyDir}\3rdParty\bin\CrashRpt1403.dll"; DestDir: "{app}\bin"; Check: not Is64BitInstallMode
|
||||
Source: "{#ThirdPartyDir}\3rdParty\bin\crashrpt_lang.ini"; DestDir: "{app}\bin"; Check: not Is64BitInstallMode
|
||||
Source: "{#ThirdPartyDir}\3rdParty\bin\CrashSender1403.exe"; DestDir: "{app}\bin"; Check: not Is64BitInstallMode
|
||||
Source: "{#VCInstallDir}\redist\x86\Microsoft.VC140.CRT\*.dll"; DestDir: "{app}\bin"; Check: not Is64BitInstallMode
|
||||
|
||||
; 64 bits install
|
||||
Source: "{#InstallDir64}\bin\*.*"; DestDir: "{app}\bin"; Excludes: "{#ExcludedBinaries}"; Flags: ignoreversion recursesubdirs; Check: Is64BitInstallMode
|
||||
@@ -107,7 +99,6 @@ Source: "{#ThirdPartyDir}\3rdParty.x64\bin\libintl-8.dll"; DestDir: "{app}\bin";
|
||||
Source: "{#ThirdPartyDir}\3rdParty.x64\bin\CrashRpt1403.dll"; DestDir: "{app}\bin"; Check: Is64BitInstallMode
|
||||
Source: "{#ThirdPartyDir}\3rdParty.x64\bin\crashrpt_lang.ini"; DestDir: "{app}\bin"; Check: Is64BitInstallMode
|
||||
Source: "{#ThirdPartyDir}\3rdParty.x64\bin\CrashSender1403.exe"; DestDir: "{app}\bin"; Check: Is64BitInstallMode
|
||||
Source: "{#VCInstallDir}\redist\x64\Microsoft.VC140.CRT\*.dll"; DestDir: "{app}\bin"; Check: Is64BitInstallMode
|
||||
|
||||
; Include the base package
|
||||
#if IncludeData == "TRUE"
|
||||
@@ -328,6 +319,66 @@ begin
|
||||
end;
|
||||
end;
|
||||
|
||||
var
|
||||
UninstallCheckCleanPage: TNewNotebookPage;
|
||||
UninstallBackButton: TNewButton;
|
||||
UninstallNextButton: TNewButton;
|
||||
DoCleanCheckbox : TNewCheckBox;
|
||||
CleanHelp : TNewStaticText;
|
||||
|
||||
procedure InitializeUninstallProgressForm();
|
||||
begin
|
||||
UninstallProgressForm
|
||||
|
||||
UninstallCheckCleanPage := TNewNotebookPage.Create(UninstallProgressForm);
|
||||
UninstallCheckCleanPage.Notebook := UninstallProgressForm.InnerNotebook;
|
||||
UninstallCheckCleanPage.Parent := UninstallProgressForm.InnerNotebook;
|
||||
UninstallCheckCleanPage.Align := alClient
|
||||
|
||||
DoCleanCheckbox := TNewCheckBox.Create(UninstallProgressForm);
|
||||
DoCleanCheckbox.Parent := UninstallCheckCleanPage;
|
||||
DoCleanCheckbox.Caption := 'Remove all settings, downloaded scenery and aircraft';
|
||||
DoCleanCheckbox.Left := ScaleX(10);
|
||||
DoCleanCheckbox.Top := ScaleY(10);
|
||||
|
||||
DoCleanCheckbox.Width := UninstallProgressForm.InnerNotebook.Width - ScaleX(20)
|
||||
DoCleanCheckbox.Height := ScaleY(30)
|
||||
|
||||
CleanHelp := TNewStaticText.Create(UninstallProgressForm);
|
||||
CleanHelp.Parent := UninstallCheckCleanPage;
|
||||
CleanHelp.Top := DoCleanCheckbox.Top + DoCleanCheckbox.Height + ScaleY(10);
|
||||
CleanHelp.Left := DoCleanCheckbox.Left;
|
||||
CleanHelp.Width := DoCleanCheckbox.Width;
|
||||
CleanHelp.Height := CleanHelp.AdjustHeight();
|
||||
|
||||
CleanHelp.WordWrap := True;
|
||||
CleanHelp.Caption := 'FlightGear stores some settings in your user folder. In addition, ' +
|
||||
'scenery or aircraft data may have been downloaded to the download directory. ' +
|
||||
'To completely remove all these files, select this option.';
|
||||
|
||||
UninstallProgressForm.InnerNotebook.ActivePage := UninstallCheckCleanPage;
|
||||
|
||||
UninstallNextButton := TNewButton.Create(UninstallProgressForm);
|
||||
UninstallNextButton.Caption := 'Next';
|
||||
UninstallNextButton.Parent := UninstallProgressForm;
|
||||
UninstallNextButton.Left :=
|
||||
UninstallProgressForm.CancelButton.Left -
|
||||
UninstallProgressForm.CancelButton.Width -
|
||||
ScaleX(10);
|
||||
UninstallNextButton.Top := UninstallProgressForm.CancelButton.Top;
|
||||
UninstallNextButton.Width := UninstallProgressForm.CancelButton.Width;
|
||||
UninstallNextButton.Height := UninstallProgressForm.CancelButton.Height;
|
||||
UninstallNextButton.ModalResult := mrOk;
|
||||
|
||||
UninstallProgressForm.CancelButton.Enabled := True;
|
||||
UninstallProgressForm.CancelButton.ModalResult := mrCancel;
|
||||
|
||||
if UninstallProgressForm.ShowModal = mrCancel then Abort;
|
||||
|
||||
UninstallProgressForm.InnerNotebook.ActivePage := UninstallProgressForm.InstallingPage;
|
||||
end;
|
||||
|
||||
|
||||
procedure CurStepChanged(CurStep: TSetupStep);
|
||||
var
|
||||
Version: TWindowsVersion;
|
||||
@@ -353,7 +404,18 @@ begin
|
||||
end;
|
||||
|
||||
procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep);
|
||||
var ResultCode: Integer;
|
||||
begin
|
||||
if CurUninstallStep = usUninstall then
|
||||
begin
|
||||
if DoCleanCheckbox.Checked = True then
|
||||
begin
|
||||
Log('Running clean uninstall');
|
||||
Exec(ExpandConstant('{app}\bin\fgfs.exe'), '--uninstall', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
|
||||
Log('clean uninstall completed');
|
||||
end;
|
||||
end;
|
||||
|
||||
if CurUninstallStep = usPostUninstall then
|
||||
begin
|
||||
RemoveFirewallException('FlightGear', ExpandConstant('{app}') + '\bin\fgfs.exe');
|
||||
|
||||
@@ -7,6 +7,10 @@ fi
|
||||
|
||||
VERSION=`cat flightgear/version`
|
||||
|
||||
#####################################################################################
|
||||
# ensure fgrcc can run when linked against libSimGearCore, for example
|
||||
export LD_LIBRARY_PATH=$WORKSPACE/dist/lib64:$WORKSPACE/dist/lib:$LD_LIBRARY_PATH
|
||||
|
||||
#####################################################################################
|
||||
# remove old and create fresh build directories
|
||||
cd $WORKSPACE
|
||||
@@ -14,7 +18,6 @@ mkdir -p sgBuild
|
||||
mkdir -p fgBuild
|
||||
mkdir -p output
|
||||
rm -rf output/*
|
||||
rm -rf dist/*
|
||||
|
||||
#####################################################################################
|
||||
echo "Starting on SimGear"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
|
||||
IF NOT DEFINED WORKSPACE SET WORKSPACE=%~dp0
|
||||
IF NOT DEFINED IS_NIGHTLY_BUILD SET IS_NIGHTLY_BUILD=1
|
||||
|
||||
IF %IS_NIGHTLY_BUILD% EQU 1 (
|
||||
SET FGBUILDTYPE=Nightly
|
||||
@@ -67,8 +68,9 @@ cmake --build . --config RelWithDebInfo --target INSTALL
|
||||
cd ..
|
||||
|
||||
REM Qt5 deployment
|
||||
%QT5SDK32%\bin\windeployqt --release --list target %WORKSPACE%/install/msvc140/bin/fgfs.exe
|
||||
%QT5SDK64%\bin\windeployqt --release --list target %WORKSPACE%/install/msvc140-64/bin/fgfs.exe
|
||||
SET QMLDIR=%WORKSPACE%/flightgear/src/GUI/qml
|
||||
%QT5SDK32%\bin\windeployqt --release --list target --qmldir %QMLDIR% %WORKSPACE%/install/msvc140/bin/fgfs.exe
|
||||
%QT5SDK64%\bin\windeployqt --release --list target --qmldir %QMLDIR% %WORKSPACE%/install/msvc140-64/bin/fgfs.exe
|
||||
|
||||
REM build setup
|
||||
ECHO Packaging root is %WORKSPACE%
|
||||
|
||||
@@ -10,6 +10,16 @@ import sys
|
||||
import catalogTags
|
||||
|
||||
CATALOG_VERSION = 4
|
||||
quiet = False
|
||||
verbose = False
|
||||
|
||||
def warning(msg):
|
||||
if not quiet:
|
||||
print(msg)
|
||||
|
||||
def log(msg):
|
||||
if verbose:
|
||||
print(msg)
|
||||
|
||||
# xml node (robust) get text helper
|
||||
def get_xml_text(e):
|
||||
@@ -35,11 +45,25 @@ def scan_set_file(aircraft_dir, set_file, includes):
|
||||
if sim_node == None:
|
||||
return None
|
||||
|
||||
# allow -set.xml files to specifcially exclude themselves from
|
||||
# the creation process, by setting <exclude-from-catalog>true</>
|
||||
if (sim_node.getValue("exclude-from-catalog", False) == True):
|
||||
return None
|
||||
|
||||
variant = {}
|
||||
variant['name'] = sim_node.getValue("description", None)
|
||||
name = sim_node.getValue("description", None)
|
||||
if (name == None or len(name) == 0):
|
||||
warning("Set file " + set_file + " is missing a <description>, skipping")
|
||||
return None
|
||||
|
||||
variant['name'] = name
|
||||
variant['status'] = sim_node.getValue("status", None)
|
||||
|
||||
if sim_node.hasChild('author'):
|
||||
if sim_node.hasChild('authors'):
|
||||
# aircraft has structured authors data, handle that
|
||||
variant['authors'] = extract_authors(sim_node.getChild('authors'))
|
||||
|
||||
elif sim_node.hasChild('author'):
|
||||
variant['author'] = sim_node.getValue("author", None)
|
||||
|
||||
if sim_node.hasChild('long-description'):
|
||||
@@ -84,7 +108,7 @@ def extract_previews(previews_node, aircraft_dir):
|
||||
# check path exists in base-name-dir
|
||||
fullPath = os.path.join(aircraft_dir, previewPath)
|
||||
if not os.path.isfile(fullPath):
|
||||
print "Bad preview path, skipping:" + fullPath
|
||||
warning("Bad preview path, skipping:" + fullPath)
|
||||
continue
|
||||
result.append({'type':previewType, 'path':previewPath})
|
||||
|
||||
@@ -96,11 +120,25 @@ def extract_tags(tags_node, set_path):
|
||||
tag = node.value
|
||||
# check tag is in the allowed list
|
||||
if not catalogTags.isValidTag(tag):
|
||||
print "Unknown tag value:", tag, " in ", set_path
|
||||
warning("Unknown tag value:" + tag + " in " + set_path)
|
||||
result.append(tag)
|
||||
|
||||
return result
|
||||
|
||||
def extract_authors(authors_node):
|
||||
result = []
|
||||
for author in authors_node.getChildren("author"):
|
||||
authorName = author.getValue("name", None)
|
||||
if (authorName == None):
|
||||
continue
|
||||
|
||||
authorNick = author.getValue("nick", None)
|
||||
authorEmail = author.getValue("email", None)
|
||||
authorDesc = author.getValue("description", None)
|
||||
|
||||
result.append({'name':authorName, 'nick':authorNick, 'email':authorEmail, 'description':authorDesc})
|
||||
return result
|
||||
|
||||
# scan all the -set.xml files in an aircraft directory. Returns a
|
||||
# package dict and a list of variants.
|
||||
def scan_aircraft_dir(aircraft_dir, includes):
|
||||
@@ -111,6 +149,7 @@ def scan_aircraft_dir(aircraft_dir, includes):
|
||||
files = os.listdir(aircraft_dir)
|
||||
for file in sorted(files, key=lambda s: s.lower()):
|
||||
if file.endswith('-set.xml'):
|
||||
# print 'trying:', file
|
||||
try:
|
||||
d = scan_set_file(aircraft_dir, file, includes)
|
||||
if d == None:
|
||||
@@ -126,8 +165,9 @@ def scan_aircraft_dir(aircraft_dir, includes):
|
||||
elif d['variant-of'] == None:
|
||||
primaryAircraft.append(d)
|
||||
|
||||
# print setDicts
|
||||
if len(setDicts) == 0:
|
||||
return None
|
||||
return None, None
|
||||
|
||||
# use the first one
|
||||
if len(primaryAircraft) == 0:
|
||||
@@ -175,6 +215,25 @@ def append_tag_nodes(node, variant):
|
||||
for tag in variant['tags']:
|
||||
node.append(make_xml_leaf('tag', tag))
|
||||
|
||||
def append_author_nodes(node, info):
|
||||
if 'authors' in info:
|
||||
authors_node = ET.Element('authors')
|
||||
for a in info['authors']:
|
||||
a_node = ET.Element('author')
|
||||
a_node.append(make_xml_leaf('name', a['name']))
|
||||
if (a['email'] != None):
|
||||
a_node.append(make_xml_leaf('email', a['email']))
|
||||
if (a['nick'] != None):
|
||||
a_node.append(make_xml_leaf('nick', a['nick']))
|
||||
if (a['description'] != None):
|
||||
a_node.append(make_xml_leaf('description', a['description']))
|
||||
authors_node.append(a_node)
|
||||
|
||||
node.append(authors_node)
|
||||
elif 'author' in info:
|
||||
# traditional single author string
|
||||
node.append( make_xml_leaf('author', info['author']) )
|
||||
|
||||
def make_aircraft_node(aircraftDirName, package, variants, downloadBase):
|
||||
#print "package:", package
|
||||
#print "variants:", variants
|
||||
@@ -182,8 +241,7 @@ def make_aircraft_node(aircraftDirName, package, variants, downloadBase):
|
||||
package_node.append( make_xml_leaf('name', package['name']) )
|
||||
package_node.append( make_xml_leaf('status', package['status']) )
|
||||
|
||||
if 'author' in package:
|
||||
package_node.append( make_xml_leaf('author', package['author']) )
|
||||
append_author_nodes(package_node, package)
|
||||
|
||||
if 'description' in package:
|
||||
package_node.append( make_xml_leaf('description', package['description']) )
|
||||
@@ -230,6 +288,7 @@ def make_aircraft_node(aircraftDirName, package, variants, downloadBase):
|
||||
|
||||
append_preview_nodes(variant_node, variant, downloadBase, aircraftDirName)
|
||||
append_tag_nodes(variant_node, variant)
|
||||
append_author_nodes(variant_node, variant)
|
||||
|
||||
package_node.append( make_xml_leaf('dir', aircraftDirName) )
|
||||
|
||||
|
||||
@@ -6,13 +6,13 @@
|
||||
<version n="1">3.5.*</version>
|
||||
<version n="2">3.6.*</version>
|
||||
<version n="3">3.7.*</version>
|
||||
<version n="4">2016.1.*</version>
|
||||
<version n="5">2016.*.*</version>
|
||||
<version n="6">2017.*.*</version>
|
||||
<version n="7">2018.*.*</version>
|
||||
<id>org.flightgear.fgaddon</id>
|
||||
<license>GPL</license>
|
||||
<url>http://mirrors.ibiblio.org/flightgear/ftp/Aircraft/catalog.xml</url>
|
||||
<name>FlightGear Aircraft Distribution From fgaddon</name>
|
||||
<name>FlightGear aircraft distribution from fgaddon</name>
|
||||
<description>This hangar provides aircraft officially supported and maintained by the FlightGear project, under a free-software license.</description>
|
||||
<de>
|
||||
<description>Auf Deutsch</description>
|
||||
|
||||
@@ -133,6 +133,8 @@ class Node(object):
|
||||
n.set('type', "bool")
|
||||
except UnicodeEncodeError:
|
||||
print "Encoding error with", self._value, type(self._value)
|
||||
except:
|
||||
print "Some other exceptiong in sgprops._createXMLElement()"
|
||||
|
||||
# index in parent
|
||||
if (self.index != 0):
|
||||
|
||||
9
catalog/testData/Aircraft/c172/c172-set.xml
Normal file
9
catalog/testData/Aircraft/c172/c172-set.xml
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<PropertyList>
|
||||
<sim>
|
||||
<name>c172</name>
|
||||
<description>Cessna 172P</description>
|
||||
<author>Wilbur Wright</author>
|
||||
</sim>
|
||||
|
||||
</PropertyList>
|
||||
@@ -1,7 +1,18 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<PropertyList>
|
||||
<sim include="settings-common.xml">
|
||||
<author>Wilbur Wright</author>
|
||||
<authors>
|
||||
<author n="0">
|
||||
<name>Wilbur Wright</name>
|
||||
<email>ww@wright.com</email>
|
||||
<nick>wilburw</nick>
|
||||
<description>Model, FDM and cockpit</description>
|
||||
</author>
|
||||
<author n="1">
|
||||
<name>Orville Wright</name>
|
||||
<description>Testing and systems</description>
|
||||
</author>
|
||||
</authors>
|
||||
<tags>
|
||||
<tag>fighter</tag>
|
||||
<tag>1980s</tag>
|
||||
|
||||
11
catalog/testData/Aircraft/f16/f16-excluded-set.xml
Normal file
11
catalog/testData/Aircraft/f16/f16-excluded-set.xml
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<PropertyList include="f16-common.xml">
|
||||
<sim>
|
||||
<exclude-from-catalog type="bool">true</exclude-from-catalog>
|
||||
<name>f16-excluded</name>
|
||||
<description>Fine</description>
|
||||
<long-description>Blah blah blah</long-description>
|
||||
<variant-of>f16a</variant-of>
|
||||
</sim>
|
||||
|
||||
</PropertyList>
|
||||
@@ -6,7 +6,14 @@
|
||||
<long-description>The F16-B is an upgraded version of the F16A.</long-description>
|
||||
<variant-of>f16a</variant-of>
|
||||
|
||||
<author>James T Kirk</author>
|
||||
<authors n="0">
|
||||
<author n="0">
|
||||
<name>James T Kirk</name>
|
||||
<email>shatner@enterprise.com</email>
|
||||
<nick>starlover</nick>
|
||||
<description>Everything</description>
|
||||
</author>
|
||||
</authors>
|
||||
</sim>
|
||||
|
||||
</PropertyList>
|
||||
|
||||
11
catalog/testData/Aircraft/f16/f16broken-set.xml
Normal file
11
catalog/testData/Aircraft/f16/f16broken-set.xml
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<PropertyList include="f16-common.xml">
|
||||
<sim>
|
||||
<name>f16broken</name>
|
||||
<!-- <description></description> -->
|
||||
<description></description>
|
||||
<long-description>Blah blah blah</long-description>
|
||||
<variant-of>f16a</variant-of>
|
||||
</sim>
|
||||
|
||||
</PropertyList>
|
||||
@@ -6,6 +6,8 @@ import os
|
||||
import catalog
|
||||
import lxml.etree as ET
|
||||
|
||||
catalog.quiet = True
|
||||
|
||||
class UpdateCatalogTests(unittest.TestCase):
|
||||
def test_scan_set(self):
|
||||
info = catalog.scan_set_file("testData/Aircraft/f16", "f16a-set.xml", ["testData/OtherDir"])
|
||||
@@ -13,18 +15,30 @@ class UpdateCatalogTests(unittest.TestCase):
|
||||
self.assertEqual(info['name'], 'F16-A')
|
||||
self.assertEqual(info['primary-set'], True)
|
||||
self.assertEqual(info['variant-of'], None)
|
||||
self.assertEqual(info['author'], 'Wilbur Wright')
|
||||
self.assertEqual(info['rating_FDM'], 3)
|
||||
self.assertEqual(info['rating_model'], 5)
|
||||
self.assertEqual(len(info['tags']), 3)
|
||||
self.assertEqual(info['minimum-fg-version'], '2017.4')
|
||||
|
||||
authorsArray = info['authors']
|
||||
self.assertNotIn('author', info)
|
||||
self.assertEqual(len(authorsArray), 2)
|
||||
|
||||
author0 = authorsArray[0]
|
||||
self.assertEqual(author0['name'], 'Wilbur Wright')
|
||||
self.assertEqual(author0['nick'], 'wilburw')
|
||||
self.assertEqual(author0['email'], 'ww@wright.com')
|
||||
|
||||
author1 = authorsArray[1]
|
||||
self.assertEqual(author1['name'], 'Orville Wright')
|
||||
# self.assertNotIn('nick', author1)
|
||||
# self.assertNotIn('email', author1)
|
||||
|
||||
def test_scan_dir(self):
|
||||
(pkg, variants) = catalog.scan_aircraft_dir("testData/Aircraft/f16", ["testData/OtherDir"])
|
||||
|
||||
self.assertEqual(pkg['id'], 'f16a')
|
||||
f16trainer = next(v for v in variants if v['id'] == 'f16-trainer')
|
||||
self.assertEqual(pkg['author'], 'Wilbur Wright')
|
||||
self.assertEqual(len(variants), 3)
|
||||
self.assertEqual(pkg['minimum-fg-version'], '2017.4')
|
||||
|
||||
@@ -38,14 +52,29 @@ class UpdateCatalogTests(unittest.TestCase):
|
||||
f16b = next(v for v in variants if v['id'] == 'f16b')
|
||||
self.assertEqual(f16b['variant-of'], 'f16a')
|
||||
self.assertEqual(f16b['primary-set'], False)
|
||||
self.assertEqual(f16b['author'], 'James T Kirk')
|
||||
|
||||
authorsArray = f16b['authors']
|
||||
self.assertNotIn('author', f16b)
|
||||
self.assertEqual(len(authorsArray), 2)
|
||||
|
||||
author0 = authorsArray[0]
|
||||
self.assertEqual(author0['name'], 'James T Kirk')
|
||||
self.assertEqual(author0['nick'], 'starlover')
|
||||
|
||||
f16c = next(v for v in variants if v['id'] == 'f16c')
|
||||
self.assertEqual(f16c['variant-of'], 'f16a')
|
||||
self.assertEqual(f16c['primary-set'], False)
|
||||
|
||||
self.assertEqual(f16c['author'], 'Wilbur Wright')
|
||||
authorsArray = f16c['authors']
|
||||
self.assertNotIn('author', f16c)
|
||||
self.assertEqual(len(authorsArray), 2)
|
||||
|
||||
# test some older constructs for compat
|
||||
def test_scan_dir_legacy(self):
|
||||
(pkg, variants) = catalog.scan_aircraft_dir("testData/Aircraft/c172", [])
|
||||
|
||||
self.assertEqual(pkg['id'], 'c172')
|
||||
self.assertEqual(pkg['author'], 'Wilbur Wright')
|
||||
|
||||
def test_extract_previews(self):
|
||||
info = catalog.scan_set_file("testData/Aircraft/f16", "f16a-set.xml", ["testData/OtherDir"])
|
||||
@@ -90,13 +119,25 @@ class UpdateCatalogTests(unittest.TestCase):
|
||||
|
||||
self.assertEqual(parsedPkgNode.getValue('name'), pkg['name']);
|
||||
self.assertEqual(parsedPkgNode.getValue('description'), pkg['description']);
|
||||
self.assertEqual(parsedPkgNode.getValue('author'), "Wilbur Wright");
|
||||
|
||||
self.assertEqual(parsedPkgNode.getValue('minimum-fg-version'), "2017.4");
|
||||
|
||||
parsedVariants = parsedPkgNode.getChildren("variant")
|
||||
self.assertEqual(len(parsedVariants), 3)
|
||||
|
||||
# author data verification
|
||||
self.assertFalse(parsedPkgNode.hasChild('author'));
|
||||
parsedAuthors = parsedPkgNode.getChild("authors").getChildren('author')
|
||||
|
||||
self.assertEqual(len(parsedAuthors), 2)
|
||||
author1 = parsedAuthors[0]
|
||||
self.assertEqual(author1.getValue("name"), "Wilbur Wright")
|
||||
self.assertEqual(author1.getValue("nick"), "wilburw")
|
||||
self.assertEqual(author1.getValue("email"), "ww@wright.com")
|
||||
|
||||
author2 = parsedAuthors[1]
|
||||
self.assertEqual(author2.getValue("name"), "Orville Wright")
|
||||
|
||||
f16ANode = parsedPkgNode
|
||||
self.assertEqual(f16ANode.getValue('name'), 'F16-A');
|
||||
|
||||
@@ -107,11 +148,18 @@ class UpdateCatalogTests(unittest.TestCase):
|
||||
|
||||
if (var['id'] == 'f16-trainer'):
|
||||
self.assertEqual(pv.getValue('variant-of'), '_primary_')
|
||||
self.assertEqual(pv.getValue('author'), "Wilbur Wright");
|
||||
# self.assertEqual(pv.getValue('author'), "Wilbur Wright");
|
||||
elif (var['id'] == 'f16b'):
|
||||
self.assertEqual(pv.getValue('variant-of'), 'f16a')
|
||||
self.assertEqual(pv.getValue('description'), 'The F16-B is an upgraded version of the F16A.')
|
||||
self.assertEqual(pv.getValue('author'), "James T Kirk");
|
||||
|
||||
# variant author verification
|
||||
parsedAuthors = pv.getChild("authors").getChildren('author')
|
||||
author1 = parsedAuthors[0]
|
||||
self.assertEqual(author1.getValue("name"), "James T Kirk")
|
||||
self.assertEqual(author1.getValue("nick"), "starlover")
|
||||
self.assertEqual(author1.getValue("email"), "shatner@enterprise.com")
|
||||
self.assertEqual(author1.getValue("description"), "Everything")
|
||||
|
||||
def test_minimalAircraft(self):
|
||||
# test an aircraft with a deliberately spartan -set.xml file with
|
||||
|
||||
@@ -144,7 +144,10 @@ def process_aircraft_dir(name, repo_path):
|
||||
package_node = catalog.make_aircraft_node(name, package, variants, download_base)
|
||||
|
||||
download_url = download_base + name + '.zip'
|
||||
thumbnail_url = download_base + 'thumbnails/' + name + '_' + package['thumbnail']
|
||||
if 'thumbnail' in package:
|
||||
# this is never even used, but breaks the script by assuming
|
||||
# all aircraft packages have thumbnails defined?
|
||||
thumbnail_url = download_base + 'thumbnails/' + name + '_' + package['thumbnail']
|
||||
|
||||
# get cached md5sum if it exists
|
||||
md5sum = get_xml_text(md5sum_root.find(str('aircraft_' + name)))
|
||||
@@ -301,6 +304,7 @@ for scm in scm_list:
|
||||
continue
|
||||
|
||||
# process each aircraft in turn
|
||||
# print name, repo_path
|
||||
process_aircraft_dir(name, repo_path)
|
||||
|
||||
# write out the master catalog file
|
||||
|
||||
@@ -26,6 +26,7 @@ VERSION="$(echo "$script_blob_id" | sed 's@\$Id: *\([0-9a-f]\+\) *@\1@')"
|
||||
# Then remove the trailing '$'
|
||||
VERSION="${VERSION%\$}"
|
||||
|
||||
PROGNAME=$(basename "$0")
|
||||
FGVERSION="release/$(git ls-remote --heads https://git.code.sf.net/p/flightgear/flightgear|grep '\/release\/'|cut -f4 -d'/'|sort -t . -k 1,1n -k2,2n -k3,3n|tail -1)"
|
||||
|
||||
#######################################################
|
||||
@@ -54,6 +55,8 @@ BUILD_TYPE="RelWithDebInfo"
|
||||
SG_CMAKEARGS=""
|
||||
FG_CMAKEARGS=""
|
||||
|
||||
declare -a UNMATCHED_OPTIONAL_PKG_ALTERNATIVES
|
||||
|
||||
while getopts "shc:p:a:d:r:j:O:ib:" OPTION; do
|
||||
case $OPTION in
|
||||
s) STABLE="STABLE" ;;
|
||||
@@ -161,31 +164,74 @@ function _make(){
|
||||
fi
|
||||
}
|
||||
|
||||
# Find an available, non-virtual package matching one of the given regexps.
|
||||
# Add an available, non-virtual package matching one of the given regexps.
|
||||
#
|
||||
# Each positional parameter is interpreted as a POSIX extended regular
|
||||
# expression. These parameters are examined from left to right, and the first
|
||||
# available matching package is added to the global PKG variable. If no match
|
||||
# is found, the script aborts.
|
||||
function _package_alternative(){
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Empty package alternative: this is a bug in the script, aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Considering a package alternative:" "$@"
|
||||
_package_alternative_inner "$@"
|
||||
}
|
||||
|
||||
# This function requires the 'dctrl-tools' package
|
||||
function _package_alternative_inner(){
|
||||
function _mandatory_pkg_alternative(){
|
||||
local pkg
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "No match found for the package alternative, aborting."
|
||||
echo "Empty package alternative: this is a bug in the script, aborting." \
|
||||
| tee -a "$LOGFILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Considering a package alternative:" "$@" | tee -a "$LOGFILE"
|
||||
pkg=$(_find_package_alternative "$@")
|
||||
|
||||
if [[ -n "$pkg" ]]; then
|
||||
echo "Package alternative matched for $pkg" | tee -a "$LOGFILE"
|
||||
PKG="$PKG $pkg"
|
||||
else
|
||||
echo "No match found for the package alternative, aborting." \
|
||||
| tee -a "$LOGFILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# If available, add a non-virtual package matching one of the given regexps.
|
||||
#
|
||||
# Returning 0 or 1 on success to indicate whether a match was found could be
|
||||
# done, but would need to be specifically handled at the calling site,
|
||||
# since the script is run under 'set -e' regime.
|
||||
function _optional_pkg_alternative(){
|
||||
local pkg
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Empty optional package alternative: this is a bug in the script," \
|
||||
"aborting." | tee -a "$LOGFILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Considering an optional package alternative:" "$@" | tee -a "$LOGFILE"
|
||||
pkg=$(_find_package_alternative "$@")
|
||||
|
||||
if [[ -n "$pkg" ]]; then
|
||||
echo "Optional package alternative matched for $pkg" | tee -a "$LOGFILE"
|
||||
PKG="$PKG $pkg"
|
||||
else
|
||||
echo "No match found for the optional package alternative, continuing" \
|
||||
"anyway." | tee -a "$LOGFILE"
|
||||
# "$*" so that we only add one element to the array in this line
|
||||
UNMATCHED_OPTIONAL_PKG_ALTERNATIVES+=("$*")
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# This function requires the 'dctrl-tools' package
|
||||
function _find_package_alternative(){
|
||||
local pkg
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
return 0 # Nothing could be found
|
||||
fi
|
||||
|
||||
# This finds non-virtual packages only (on purpose)
|
||||
pkg="$(apt-cache dumpavail | \
|
||||
grep-dctrl -e -sPackage -FPackage \
|
||||
@@ -193,13 +239,12 @@ function _package_alternative_inner(){
|
||||
sed -ne '1s/^Package:[[:space:]]*//gp')"
|
||||
|
||||
if [[ -n "$pkg" ]]; then
|
||||
echo "Package alternative matched for $pkg"
|
||||
PKG="$PKG $pkg"
|
||||
echo "$pkg"
|
||||
return 0
|
||||
else
|
||||
# Try with the next regexp
|
||||
shift
|
||||
_package_alternative_inner "$@"
|
||||
_find_package_alternative "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -267,8 +312,8 @@ if [[ "$DOWNLOAD_PACKAGES" = "y" ]] && [[ "$APT_GET_UPDATE" = "y" ]]; then
|
||||
fi
|
||||
|
||||
# Ensure 'dctrl-tools' is installed
|
||||
if [[ "$(dpkg-query --showformat='${db:Status-Status}\n' --show dctrl-tools \
|
||||
2>/dev/null)" != "installed" ]]; then
|
||||
if [[ "$(dpkg-query --showformat='${Status}\n' --show dctrl-tools \
|
||||
2>/dev/null | awk '{print $3}')" != "installed" ]]; then
|
||||
if [[ "$DOWNLOAD_PACKAGES" = "y" ]]; then
|
||||
_aptInstall dctrl-tools
|
||||
else
|
||||
@@ -280,19 +325,28 @@ fi
|
||||
|
||||
# Minimum
|
||||
PKG="build-essential cmake git"
|
||||
_mandatory_pkg_alternative libcurl4-openssl-dev libcurl4-gnutls-dev
|
||||
# cmake
|
||||
PKG="$PKG libarchive-dev libbz2-dev libcurl4-gnutls-dev libexpat1-dev libjsoncpp-dev liblzma-dev libncurses5-dev procps zlib1g-dev"
|
||||
PKG="$PKG libarchive-dev libbz2-dev libexpat1-dev libjsoncpp-dev liblzma-dev libncurses5-dev procps zlib1g-dev"
|
||||
# TG
|
||||
PKG="$PKG libcgal-dev libgdal-dev libtiff5-dev"
|
||||
# TGGUI/OpenRTI
|
||||
PKG="$PKG libqt4-dev"
|
||||
# SG/FG
|
||||
PKG="$PKG zlib1g-dev freeglut3-dev libboost-dev"
|
||||
_package_alternative libopenscenegraph-3.4-dev libopenscenegraph-dev \
|
||||
'libopenscenegraph-[0-9]+\.[0-9]+-dev'
|
||||
_mandatory_pkg_alternative libopenscenegraph-3.4-dev libopenscenegraph-dev \
|
||||
'libopenscenegraph-[0-9]+\.[0-9]+-dev'
|
||||
# FG
|
||||
PKG="$PKG libopenal-dev libudev-dev qt5-default qtdeclarative5-dev libdbus-1-dev libplib-dev"
|
||||
_package_alternative libpng-dev libpng12-dev libpng16-dev
|
||||
_mandatory_pkg_alternative libpng-dev libpng12-dev libpng16-dev
|
||||
# The following packages are needed for the built-in launcher
|
||||
_optional_pkg_alternative qml-module-qtquick2
|
||||
_optional_pkg_alternative qml-module-qtquick-window2
|
||||
_optional_pkg_alternative qml-module-qtquick-dialogs
|
||||
# The following packages are only needed for the Qt-based remote Canvas
|
||||
# (comment written at the time of FG 2018.2).
|
||||
_optional_pkg_alternative qtbase5-private-dev
|
||||
_optional_pkg_alternative qtdeclarative5-private-dev
|
||||
# FGPanel
|
||||
PKG="$PKG fluid libbz2-dev libfltk1.3-dev libxi-dev libxmu-dev"
|
||||
# FGAdmin
|
||||
@@ -863,6 +917,24 @@ if [[ "$(declare -p WHATTOBUILD)" =~ '['([0-9]+)']="TERRAGEARGUI"' ]]; then
|
||||
echo "./TerraGUI \$@" >> run_terrageargui.sh
|
||||
fi
|
||||
|
||||
# Print optional package alternatives that didn't match (this helps with
|
||||
# troubleshooting)
|
||||
if [[ ${#UNMATCHED_OPTIONAL_PKG_ALTERNATIVES[@]} -gt 0 ]]; then
|
||||
echo | tee -a "$LOGFILE"
|
||||
printf "The following optional package alternative(s) didn't match:\n\n" \
|
||||
| tee -a "$LOGFILE"
|
||||
|
||||
for alt in "${UNMATCHED_OPTIONAL_PKG_ALTERNATIVES[@]}"; do
|
||||
printf " %s\n" "$alt" | tee -a "$LOGFILE"
|
||||
done
|
||||
|
||||
printf "\nThis could explain missing optional features in FlightGear or \
|
||||
other software\ninstalled by $PROGNAME.\n" | tee -a "$LOGFILE"
|
||||
else
|
||||
printf "All optional package alternatives have found a matching package.\n" \
|
||||
| tee -a "$LOGFILE"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "download_and_compile.sh has finished to work"
|
||||
|
||||
|
||||
2
fgdata
2
fgdata
Submodule fgdata updated: 5b4983c716...b6df0ded76
Submodule flightgear updated: c9eff67409...1727af3638
2
getstart
2
getstart
Submodule getstart updated: a75fedfc67...7b216d458f
@@ -40,6 +40,7 @@ puts "Is-release? : ##{$isRelease}"
|
||||
$prefixDir=Dir.pwd + "/dist"
|
||||
dmgDir=Dir.pwd + "/image"
|
||||
srcDir=Dir.pwd + "/flightgear"
|
||||
qmlDir=srcDir + "/src/GUI/qml"
|
||||
|
||||
puts "Erasing previous image dir"
|
||||
`rm -rf #{dmgDir}`
|
||||
@@ -48,8 +49,10 @@ bundle=dmgDir + "/FlightGear.app"
|
||||
|
||||
# run macdeployt before we rename the bundle, otherwise it
|
||||
# can't find the bundle executable
|
||||
# also note if adding options here, the bundle path has to be
|
||||
# the first argument to macdeployqt
|
||||
puts "Running macdeployqt on the bundle to copy Qt libraries"
|
||||
`macdeployqt #{$prefixDir}/fgfs.app`
|
||||
`macdeployqt #{$prefixDir}/fgfs.app -qmldir=#{qmlDir}`
|
||||
|
||||
puts "Moving & renaming app bundle"
|
||||
`mkdir -p #{dmgDir}`
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
-*- coding: utf-8 -*-
|
||||
|
||||
Quick start for the localization (l10n) scripts
|
||||
===============================================
|
||||
|
||||
@@ -11,7 +13,11 @@ $FG_ROOT/Translations:
|
||||
Note: the legacy FlightGear XML localization files are only needed by
|
||||
'fg-convert-translation-files' when migrating to the XLIFF format. The
|
||||
other scripts only need the default translation and obviously, for
|
||||
'fg-update-translation-files', the current XLIFF files.
|
||||
'fg-update-translation-files', the current XLIFF files[1].
|
||||
|
||||
|
||||
Creating XLIFF files from existing FlightGear legacy XML translation files
|
||||
--------------------------------------------------------------------------
|
||||
|
||||
To get the initial XLIFF files (generated from the default translation in
|
||||
$FG_ROOT/Translations/default as well as the legacy FlightGear XML
|
||||
@@ -19,8 +25,10 @@ localization files in $FG_ROOT/Translations/<language_code>):
|
||||
|
||||
languages="de en_US es fr it nl pl pt zh_CN"
|
||||
|
||||
# Your shell must expand $languages as several words. POSIX shell does that,
|
||||
# but not zsh for instance. Otherwise, don't use a shell variable.
|
||||
# Your shell must expand $languages as several words for the following
|
||||
# commands to work. POSIX shell does that, Bash too apparently, but not Zsh
|
||||
# (by default). In Zsh, you can use $=languages or ${=languages} to ensure
|
||||
# the expansion uses word splitting.
|
||||
fg-convert-translation-files --transl-dir="$FG_ROOT/Translations" $languages
|
||||
|
||||
# Add strings found in the default translation but missing in the legacy FG
|
||||
@@ -28,12 +36,18 @@ localization files in $FG_ROOT/Translations/<language_code>):
|
||||
fg-update-translation-files --transl-dir="$FG_ROOT/Translations" \
|
||||
merge-new-master $languages
|
||||
|
||||
When master strings[1] have changed (in a large sense, i.e.: strings added,
|
||||
modified or removed, or categories added or removed[2]):
|
||||
Updating XLIFF files to reflect changes in the default translation
|
||||
------------------------------------------------------------------
|
||||
|
||||
When master strings[2] have changed (in a large sense, i.e.: strings added,
|
||||
modified or removed, or categories added or removed[3]):
|
||||
|
||||
fg-update-translation-files --transl-dir="$FG_ROOT/Translations" \
|
||||
merge-new-master $languages
|
||||
|
||||
Updating XLIFF files to mark or remove obsolete translated strings
|
||||
------------------------------------------------------------------
|
||||
|
||||
To remove unused translated strings (not to be done too often in my opinion):
|
||||
|
||||
fg-update-translation-files --transl-dir="$FG_ROOT/Translations" \
|
||||
@@ -43,6 +57,29 @@ To remove unused translated strings (not to be done too often in my opinion):
|
||||
as not-to-be-translated, however 'merge-new-master' presented above already
|
||||
does that)
|
||||
|
||||
Merging contents from an XLIFF file into another one
|
||||
----------------------------------------------------
|
||||
|
||||
Suppose a translator has been working on a particular translation file, and
|
||||
meanwhile the official XLIFF file for this translation has been updated in
|
||||
FGData (new translatable strings added, obsolete strings marked or removed,
|
||||
etc.). In such a case, 'fg-merge-xliff-into-xliff' can be used to merge the
|
||||
translator's work into the project file. Essentially, this means that for all
|
||||
strings that have the same source text, plural status, number of plural forms
|
||||
and of course target language, the target texts, “approved” status and
|
||||
translator comments will be taken from the first file passed in the following
|
||||
command:
|
||||
|
||||
fg-merge-xliff-into-xliff TRANSLATOR_FILE PROJECT_FILE
|
||||
|
||||
Used like this, PROJECT_FILE will be updated with data from TRANSLATOR_FILE.
|
||||
If you don't want to modify PROJECT_FILE, use the -o (--output) option. If '-'
|
||||
is passed as argument to this option, then the result is written to the
|
||||
standard output.
|
||||
|
||||
Creating skeleton XLIFF files for new translations
|
||||
--------------------------------------------------
|
||||
|
||||
To create skeleton translations for new languages (e.g., for fr_BE, en_AU and
|
||||
ca):
|
||||
|
||||
@@ -62,17 +99,23 @@ ca):
|
||||
fg-new-translations chooses an appropriate place based on the value
|
||||
specified for --transl-dir)
|
||||
|
||||
fg-convert-translation-files, fg-update-translation-files and
|
||||
fg-new-translations all support the --help option for more detailed
|
||||
information.
|
||||
Getting more information on the scripts
|
||||
---------------------------------------
|
||||
|
||||
fg-convert-translation-files, fg-update-translation-files,
|
||||
fg-merge-xliff-into-xliff and fg-new-translations all support the --help
|
||||
option for more detailed information.
|
||||
|
||||
|
||||
Footnotes
|
||||
---------
|
||||
|
||||
[1] Strings in the default translation.
|
||||
[1] Except for the fg-merge-xliff-into-xliff script, which doesn't have any
|
||||
of these requirements.
|
||||
|
||||
[2] Only empty categories are removed by this command. An obsolete category
|
||||
[2] Strings in the default translation.
|
||||
|
||||
[3] Only empty categories are removed by this command. An obsolete category
|
||||
can be made empty by manual editing (easy, just locate the right
|
||||
<group>) or this way:
|
||||
|
||||
|
||||
@@ -24,11 +24,6 @@ import locale
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
import xml.etree.ElementTree as et
|
||||
except ImportError:
|
||||
import elementtree.ElementTree as et
|
||||
|
||||
import flightgear.meta.logging
|
||||
import flightgear.meta.i18n as fg_i18n
|
||||
|
||||
|
||||
123
python3-flightgear/fg-merge-xliff-into-xliff
Executable file
123
python3-flightgear/fg-merge-xliff-into-xliff
Executable file
@@ -0,0 +1,123 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# fg-merge-xliff-into-xliff --- Merge translations from one XLIFF file into
|
||||
# another one
|
||||
# Copyright (C) 2017 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import argparse
|
||||
import locale
|
||||
import os
|
||||
import sys
|
||||
|
||||
import flightgear.meta.logging
|
||||
import flightgear.meta.i18n as fg_i18n
|
||||
|
||||
|
||||
PROGNAME = os.path.basename(sys.argv[0])
|
||||
|
||||
# Only messages with severity >= info will be printed to the terminal (it's
|
||||
# possible to also log all messages to a file regardless of their level, see
|
||||
# the Logger class). Of course, there is also the standard logging module...
|
||||
logger = flightgear.meta.logging.Logger(
|
||||
progname=PROGNAME,
|
||||
logLevel=flightgear.meta.logging.LogLevel.info,
|
||||
defaultOutputStream=sys.stderr)
|
||||
|
||||
|
||||
def processCommandLine():
|
||||
params = argparse.Namespace()
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="""\
|
||||
%(prog)s [OPTION ...] SOURCE INTO
|
||||
Merge strings from a FlightGear XLIFF localization file into another one.""",
|
||||
description="""\
|
||||
This program merges a FlightGear XLIFF localization file into another one.
|
||||
This means that every translatable string that:
|
||||
|
||||
(1) exists in both SOURCE and INTO;
|
||||
|
||||
(2) has the same target language, source text, plural status and number of
|
||||
plural forms in SOURCE and in INTO;
|
||||
|
||||
is updated from SOURCE, i.e.: the target texts, 'approved' status and
|
||||
translator comments are copied from SOURCE.
|
||||
|
||||
The result is written to INTO unless the -o (--output) option is given.
|
||||
|
||||
Note that this program is different from fg-update-translation-files's
|
||||
'merge-new-master' command, which is for updating an XLIFF file according to
|
||||
the default translation ("master").
|
||||
|
||||
Expected use case: suppose that a translator is working on a translation
|
||||
file, and meanwhile the official XLIFF file for this translation is updated
|
||||
in the project repository (new translatable strings added, obsolete strings
|
||||
marked or removed, etc.). This program can then be used to merge the
|
||||
translator work into the project file for all strings for which it makes
|
||||
sense (source text unchanged, same plural status, etc.).""",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
# I want --help but not -h (it might be useful for something else)
|
||||
add_help=False)
|
||||
|
||||
parser.add_argument("source", metavar="SOURCE",
|
||||
help="""\
|
||||
input XLIFF file; read updated translated strings
|
||||
from this file""")
|
||||
parser.add_argument("into", metavar="INTO",
|
||||
help="""\
|
||||
XLIFF file to compare to SOURCE in order to decide
|
||||
which translated strings to update; unless the -o
|
||||
option is used, updated strings are written to this
|
||||
file""")
|
||||
parser.add_argument("-o", "--output",
|
||||
help="""\
|
||||
write the XLIFF merged output to OUTPUT instead of
|
||||
INTO. When this option is used, INTO is read but not
|
||||
modified. If OUTPUT is '-', write the XLIFF merged
|
||||
output to the standard output.""")
|
||||
parser.add_argument("--help", action="help",
|
||||
help="display this message and exit")
|
||||
|
||||
return parser.parse_args(namespace=params)
|
||||
|
||||
|
||||
def mergeXliffIntoXliff(source, into, output):
|
||||
formatHandler = fg_i18n.XliffFormatHandler()
|
||||
|
||||
srcTransl = formatHandler.readTranslation(source)
|
||||
transl = formatHandler.readTranslation(into)
|
||||
# Merge 'srcTransl' into 'transl'
|
||||
transl.mergeNonMasterTransl(srcTransl, logger=logger)
|
||||
|
||||
# File path, or '-' for the standard output
|
||||
outputFile = into if output is None else output
|
||||
formatHandler.writeTranslation(transl, outputFile)
|
||||
|
||||
|
||||
def main():
|
||||
global params
|
||||
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
params = processCommandLine()
|
||||
|
||||
mergeXliffIntoXliff(params.source, params.into, params.output)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__": main()
|
||||
@@ -24,11 +24,6 @@ import locale
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
import xml.etree.ElementTree as et
|
||||
except ImportError:
|
||||
import elementtree.ElementTree as et
|
||||
|
||||
import flightgear.meta.logging
|
||||
import flightgear.meta.i18n as fg_i18n
|
||||
|
||||
|
||||
@@ -25,11 +25,6 @@ import locale
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
import xml.etree.ElementTree as et
|
||||
except ImportError:
|
||||
import elementtree.ElementTree as et
|
||||
|
||||
import flightgear.meta.logging
|
||||
import flightgear.meta.i18n as fg_i18n
|
||||
|
||||
|
||||
@@ -684,8 +684,7 @@ class Translation:
|
||||
|
||||
def mergeMasterForCategory(self, masterTransl, cat, logger=dummyLogger):
|
||||
if cat not in masterTransl:
|
||||
raise BadAPIUse("Bad API use: category {!r} not in " \
|
||||
"'masterTransl'".format(cat))
|
||||
raise BadAPIUse("category {!r} not in 'masterTransl'".format(cat))
|
||||
elif cat not in self:
|
||||
# Category appeared in 'masterTransl' that wasn't in 'self'
|
||||
self.resetCategory(cat)
|
||||
@@ -747,6 +746,110 @@ class Translation:
|
||||
.format(lang=self.targetLanguage, cat=cat))
|
||||
del self[cat]
|
||||
|
||||
# Helper method for mergeNonMasterTranslForCategory()
|
||||
def _mergeNonMasterTranslForCategory_CheckMatchingParams(
|
||||
self, cat, tid, srcTu, logger):
|
||||
translUnit = self.translations[cat][tid]
|
||||
|
||||
if srcTu.targetLanguage != translUnit.targetLanguage:
|
||||
logger.warning(
|
||||
"ignoring translatable string '{id}', because the target "
|
||||
"languages don't match between the two translations"
|
||||
.format(id=tid))
|
||||
return False
|
||||
|
||||
if srcTu.sourceText != translUnit.sourceText:
|
||||
logger.warning(
|
||||
"ignoring translatable string '{id}', because the source "
|
||||
"texts differ between the two translations"
|
||||
.format(id=tid))
|
||||
return False
|
||||
|
||||
if len(srcTu.targetTexts) != len(translUnit.targetTexts):
|
||||
logger.warning(
|
||||
"ignoring translatable string '{id}', because the lists "
|
||||
"of target texts (= number of singular + plural forms) differ "
|
||||
"between the two translations".format(id=tid))
|
||||
return False
|
||||
|
||||
if srcTu.isPlural != translUnit.isPlural:
|
||||
logger.warning(
|
||||
"ignoring translatable string '{id}', because the plural "
|
||||
"statuses don't match".format(id=tid))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def mergeNonMasterTranslForCategory(self, srcTransl, cat,
|
||||
logger=dummyLogger):
|
||||
"""Merge a non-master Translation into 'self' for category 'cat'.
|
||||
|
||||
See mergeNonMasterTransl()'s docstring for more info.
|
||||
|
||||
"""
|
||||
if cat not in srcTransl:
|
||||
return # nothing to merge in this category
|
||||
elif cat not in self:
|
||||
raise BadAPIUse(
|
||||
"cowardly refusing to create category {!r} in the destination "
|
||||
"translation for an XLIFF-to-XLIFF merge operation "
|
||||
"(new categories should be first added to the master "
|
||||
"translation, then merged into each XLIFF translation file)"
|
||||
.format(cat))
|
||||
|
||||
if srcTransl.targetLanguage != self.targetLanguage:
|
||||
raise BadAPIUse(
|
||||
"cowardly refusing to merge two XLIFF files with different "
|
||||
"target languages")
|
||||
|
||||
thisCatTranslations = self.translations[cat]
|
||||
idsSet = { str(tid) for tid in thisCatTranslations.keys() }
|
||||
|
||||
for tid, srcTu in srcTransl.translations[cat].items():
|
||||
if str(tid) not in idsSet:
|
||||
logger.warning(
|
||||
"translatable string '{id}' not found in the "
|
||||
"destination translation during an XLIFF-to-XLIFF merge "
|
||||
"operation. The string will be ignored, because new "
|
||||
"translatable strings must be brought by the default "
|
||||
"translation.".format(id=tid))
|
||||
continue
|
||||
# If some parameters don't match (sourceText, isPlural...), the
|
||||
# translation in 'srcTu' is probably outdated, so don't use it.
|
||||
elif not self._mergeNonMasterTranslForCategory_CheckMatchingParams(
|
||||
cat, tid, srcTu, logger):
|
||||
continue
|
||||
else:
|
||||
translUnit = thisCatTranslations[tid]
|
||||
translUnit.targetTexts = srcTu.targetTexts[:] # copy
|
||||
translUnit.approved = srcTu.approved
|
||||
translUnit.translatorComments = srcTu.translatorComments[:]
|
||||
|
||||
def mergeNonMasterTransl(self, srcTransl, logger=dummyLogger):
|
||||
"""Merge the non-master Translation 'srcTransl' into 'self'.
|
||||
|
||||
Contrary to mergeMasterTranslation(), this method doesn't add
|
||||
new translatable strings to 'self', doesn't mark strings as
|
||||
obsolete or vanished, nor does it add or remove categories in
|
||||
'self'. It only updates strings in 'self' from 'srcTransl' when
|
||||
they:
|
||||
- already exist in 'self';
|
||||
- have the same target language, source text, plural status
|
||||
and number of plural forms in 'self' and in 'srcTransl'.
|
||||
|
||||
Expected use case: suppose that a translator is working on a
|
||||
translation file, and meanwhile the official XLIFF file (for
|
||||
instance) for this translation is updated in the project
|
||||
repository (new translatable strings added, obsolete strings
|
||||
marked or removed, etc.). This method can then be used to merge
|
||||
the translator work into the project file for all strings for
|
||||
which it makes sense (source text unchanged, same plural status,
|
||||
etc.).
|
||||
|
||||
"""
|
||||
for cat in srcTransl:
|
||||
self.mergeNonMasterTranslForCategory(srcTransl, cat, logger=logger)
|
||||
|
||||
def nbPluralForms(self):
|
||||
return nbPluralFormsForLanguage(self.targetLanguage)
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
THIS_RELEASE="2017.3"
|
||||
NEXT_RELEASE="2017.4"
|
||||
THIS_RELEASE="2018.2"
|
||||
NEXT_RELEASE="2018.3"
|
||||
SUBMODULES="simgear flightgear fgdata getstart"
|
||||
|
||||
#:<< 'COMMENT_END'
|
||||
|
||||
2
simgear
2
simgear
Submodule simgear updated: 629e68428f...489573329e
Reference in New Issue
Block a user