first commit
This commit is contained in:
195
scripts/python/FGFSDemo.py
Executable file
195
scripts/python/FGFSDemo.py
Executable file
@@ -0,0 +1,195 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from tkinter import tix as Tix
|
||||
from FlightGear import FlightGear
|
||||
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
|
||||
PROGNAME = os.path.basename(sys.argv[0])
|
||||
|
||||
class PropertyField:
|
||||
def __init__(self, parent, prop, label):
|
||||
self.prop = prop
|
||||
self.field = Tix.LabelEntry( parent, label=label,
|
||||
options='''
|
||||
label.width 30
|
||||
label.anchor e
|
||||
entry.width 30
|
||||
''' )
|
||||
self.field.pack( side=Tix.TOP, padx=20, pady=2 )
|
||||
|
||||
def update_field(self,fgfs):
|
||||
val = fgfs[self.prop]
|
||||
self.field.entry.delete(0,'end')
|
||||
self.field.entry.insert(0, val)
|
||||
|
||||
class PropertyPage(Tix.Frame):
|
||||
def __init__(self,parent,fgfs):
|
||||
Tix.Frame.__init__(self,parent)
|
||||
self.fgfs = fgfs
|
||||
self.pack( side=Tix.TOP, padx=2, pady=2, fill=Tix.BOTH, expand=1 )
|
||||
self.fields = []
|
||||
|
||||
def addField(self, prop, label):
|
||||
f = PropertyField(self, prop, label)
|
||||
self.fields.append(f)
|
||||
|
||||
def update_fields(self):
|
||||
for f in self.fields:
|
||||
f.update_field(self.fgfs)
|
||||
Tix.Frame.update(self)
|
||||
|
||||
class FGFSDemo(Tix.Frame):
|
||||
def __init__(self,fgfs,root=None):
|
||||
Tix.Frame.__init__(self,root)
|
||||
z = root.winfo_toplevel()
|
||||
z.wm_protocol("WM_DELETE_WINDOW", lambda self=self: self.quitcmd())
|
||||
self.fgfs = fgfs
|
||||
self.pack()
|
||||
self.pages = {}
|
||||
self.after_id = None
|
||||
self.createWidgets()
|
||||
self.update()
|
||||
|
||||
def createWidgets(self):
|
||||
self.nb = Tix.NoteBook(self)
|
||||
self.nb.add( 'sim', label='Simulation',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
self.nb.add( 'view', label='View',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
self.nb.add( 'loc', label='Location',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
self.nb.add( 'weather', label='Weather',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
self.nb.add( 'clouds', label='Clouds',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
self.nb.add( 'velocities', label='Velocities',
|
||||
raisecmd= lambda self=self: self.update_page() )
|
||||
|
||||
page = PropertyPage( self.nb.sim, self.fgfs )
|
||||
self.pages['sim'] = page
|
||||
page.addField( '/sim/aircraft', 'Aircraft:')
|
||||
page.addField( '/sim/presets/airport-id', 'Airport ID:')
|
||||
page.addField( '/sim/time/gmt', 'Current time (GMT):')
|
||||
page.addField( '/sim/presets/trim', 'Trim on ground (true/false):')
|
||||
page.addField( '/sim/sound/enabled', 'Sound enabled (true/false):')
|
||||
page.addField( '/sim/startup/browser-app', 'Web browser:')
|
||||
|
||||
page = PropertyPage( self.nb.view, self.fgfs )
|
||||
self.pages['view'] = page
|
||||
page.addField( '/sim/view-mode', 'View mode:')
|
||||
page.addField( "/sim/current-view/field-of-view", "Field of view (deg):" )
|
||||
page.addField( "/sim/current-view/pitch-offset-deg", "View pitch offset (deg):" )
|
||||
page.addField( "/sim/current-view/heading-offset-deg", "View heading offset (deg):" )
|
||||
|
||||
page = PropertyPage( self.nb.loc, self.fgfs )
|
||||
self.pages['loc'] = page
|
||||
page.addField( "/position/altitude-ft", "Altitude (ft):" )
|
||||
page.addField( "/position/longitude-deg", "Longitude (deg):" )
|
||||
page.addField( "/position/latitude-deg", "Latitude (deg):" )
|
||||
page.addField( "/orientation/roll-deg", "Roll (deg):" )
|
||||
page.addField( "/orientation/pitch-deg", "Pitch (deg):" )
|
||||
page.addField( "/orientation/heading-deg", "Heading (deg):" )
|
||||
|
||||
page = PropertyPage( self.nb.weather, self.fgfs )
|
||||
self.pages['weather'] = page
|
||||
page.addField("/environment/wind-from-heading-deg",
|
||||
"Wind direction (deg FROM):")
|
||||
page.addField("/environment/metar/base-wind-speed-kt",
|
||||
"Wind speed (kt):")
|
||||
page.addField("/environment/metar/gust-wind-speed-kt",
|
||||
"Maximum gust (kt):")
|
||||
page.addField("/environment/wind-from-down-fps",
|
||||
"Updraft (fps):")
|
||||
page.addField("/environment/temperature-degc", "Temperature (degC):")
|
||||
page.addField("/environment/dewpoint-degc", "Dewpoint (degC):")
|
||||
page.addField("/environment/pressure-sea-level-inhg",
|
||||
"Altimeter setting (inHG):")
|
||||
|
||||
page = PropertyPage( self.nb.clouds, self.fgfs )
|
||||
self.pages['clouds'] = page
|
||||
page.addField("/environment/clouds/layer[0]/layer-type",
|
||||
"Layer 0 type:")
|
||||
page.addField("/environment/clouds/layer[0]/elevation-ft",
|
||||
"Layer 0 height (ft):")
|
||||
page.addField("/environment/clouds/layer[0]/thickness-ft",
|
||||
"Layer 0 thickness (ft):")
|
||||
page.addField("/environment/clouds/layer[1]/layer-type",
|
||||
"Layer 1 type:")
|
||||
page.addField("/environment/clouds/layer[1]/elevation-ft",
|
||||
"Layer 1 height (ft):")
|
||||
page.addField("/environment/clouds/layer[1]/thickness-ft",
|
||||
"Layer 1 thickness (ft):")
|
||||
page.addField("/environment/clouds/layer[2]/layer-type",
|
||||
"Layer 2 type:")
|
||||
page.addField("/environment/clouds/layer[2]/elevation-ft",
|
||||
"Layer 2 height (ft):")
|
||||
page.addField("/environment/clouds/layer[2]/thickness-ft",
|
||||
"Layer 2 thickness (ft):")
|
||||
page.addField("/environment/clouds/layer[3]/layer-type",
|
||||
"Layer 3 type:")
|
||||
page.addField("/environment/clouds/layer[3]/elevation-ft",
|
||||
"Layer 3 height (ft):")
|
||||
page.addField("/environment/clouds/layer[3]/thickness-ft",
|
||||
"Layer 3 thickness (ft):")
|
||||
page.addField("/environment/clouds/layer[4]/layer-type",
|
||||
"Layer 4 type:")
|
||||
page.addField("/environment/clouds/layer[4]/elevation-ft",
|
||||
"Layer 4 height (ft):")
|
||||
page.addField("/environment/clouds/layer[4]/thickness-ft",
|
||||
"Layer 4 thickness (ft):")
|
||||
|
||||
page = PropertyPage( self.nb.velocities, self.fgfs )
|
||||
self.pages['velocities'] = page
|
||||
page.addField("/velocities/airspeed-kt", "Airspeed (kt):")
|
||||
page.addField("/velocities/speed-down-fps", "Descent speed (fps):")
|
||||
|
||||
self.nb.pack( expand=1, fill=Tix.BOTH, padx=5, pady=5, side=Tix.TOP )
|
||||
|
||||
self.QUIT = Tix.Button(self)
|
||||
self.QUIT['text'] = 'Quit'
|
||||
self.QUIT['command'] = self.quitcmd
|
||||
self.QUIT.pack({"side": "bottom"})
|
||||
|
||||
def quitcmd(self):
|
||||
if self.after_id:
|
||||
self.after_cancel(self.after_id)
|
||||
#self.quit()
|
||||
self.destroy()
|
||||
|
||||
def update_page(self):
|
||||
page = self.pages[ self.nb.raised() ]
|
||||
page.update_fields()
|
||||
self.update()
|
||||
self.after_id = self.after( 1000, lambda self=self: self.update_page() )
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print('Usage: {} host port'.format(PROGNAME))
|
||||
sys.exit(1)
|
||||
|
||||
host = sys.argv[1]
|
||||
try:
|
||||
port = int( sys.argv[2] )
|
||||
except ValueError:
|
||||
print('Error: expected a number for the port argument, not {!r}'
|
||||
.format(sys.argv[2]))
|
||||
sys.exit(1)
|
||||
|
||||
fgfs = None
|
||||
try:
|
||||
fgfs = FlightGear( host, port )
|
||||
except socket.error as msg:
|
||||
print('Error connecting to flightgear:', msg.strerror)
|
||||
sys.exit(1)
|
||||
|
||||
root = Tix.Tk()
|
||||
app = FGFSDemo( fgfs, root )
|
||||
app.mainloop()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
211
scripts/python/FlightGear.py
Normal file
211
scripts/python/FlightGear.py
Normal file
@@ -0,0 +1,211 @@
|
||||
from telnetlib import Telnet
|
||||
import sys
|
||||
import socket
|
||||
import re
|
||||
import time
|
||||
|
||||
__all__ = ["FlightGear"]
|
||||
|
||||
CRLF = '\r\n'
|
||||
|
||||
class FGTelnet(Telnet):
|
||||
def __init__(self,host,port):
|
||||
Telnet.__init__(self,host,port)
|
||||
self.prompt = [re.compile('/[^>]*> '.encode('utf-8'))]
|
||||
self.timeout = 5
|
||||
#Telnet.set_debuglevel(self,2)
|
||||
|
||||
def help(self):
|
||||
return
|
||||
|
||||
def ls(self,dir=None):
|
||||
"""
|
||||
Returns a list of properties.
|
||||
"""
|
||||
if dir is None:
|
||||
self._putcmd('ls')
|
||||
else:
|
||||
self._putcmd('ls %s' % dir )
|
||||
return self._getresp()
|
||||
|
||||
def ls2(self, dir_):
|
||||
self._putcmd(f'ls2 {dir_}')
|
||||
return self._getresp()
|
||||
|
||||
def dump(self):
|
||||
"""Dump current state as XML."""
|
||||
self._putcmd('dump')
|
||||
return self._getresp()
|
||||
|
||||
def cd(self, dir):
|
||||
"""Change directory."""
|
||||
self._putcmd('cd ' + dir)
|
||||
self._getresp()
|
||||
return
|
||||
|
||||
def pwd(self):
|
||||
"""Display current path."""
|
||||
self._putcmd('pwd')
|
||||
return self._getresp()
|
||||
|
||||
def get(self,var):
|
||||
"""Retrieve the value of a parameter."""
|
||||
self._putcmd('get %s' % var )
|
||||
return self._getresp()
|
||||
|
||||
def set(self,var,value):
|
||||
"""Set variable to a new value"""
|
||||
self._putcmd('set %s %s' % (var,value))
|
||||
self._getresp() # Discard response
|
||||
|
||||
def quit(self):
|
||||
"""Terminate connection"""
|
||||
self._putcmd('quit')
|
||||
self.close()
|
||||
return
|
||||
|
||||
# Internal: send one command to FlightGear
|
||||
def _putcmd(self,cmd):
|
||||
cmd = cmd + CRLF
|
||||
Telnet.write(self, cmd.encode('utf-8'))
|
||||
return
|
||||
|
||||
def _getresp(self):
|
||||
# Telnet.expect() can return short result, so we call it in a loop.
|
||||
response = b''
|
||||
while 1:
|
||||
_i, _match, data = Telnet.expect(self, self.prompt, self.timeout)
|
||||
response += data
|
||||
if _i == 0:
|
||||
break # We have the prompt that marks the end of the data.
|
||||
assert _i == -1, f'i={i}'
|
||||
# Remove the terminating prompt.
|
||||
# Everything preceding it is the response.
|
||||
return response.decode('utf-8').split('\n')[:-1]
|
||||
|
||||
class LsItem:
|
||||
def __init__(self, num_children, name, index, type_, value_text):
|
||||
self.num_children = num_children
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.type_ = type_
|
||||
self.value_text = value_text
|
||||
# Convert to correct type; type_ is originally from
|
||||
# flightgear/src/Network/props.cxx:getValueTypeString().
|
||||
#
|
||||
if type_ in ('unknown', 'unspecified', 'none'):
|
||||
value = value_text
|
||||
elif type_ == 'bool':
|
||||
value = (value_text == 'true')
|
||||
elif type_ in ('int', 'long'):
|
||||
value = int(value_text)
|
||||
elif type_ in ('float', 'double'):
|
||||
self.value = float(value_text)
|
||||
elif type_ == 'string':
|
||||
self.value = value_text
|
||||
else:
|
||||
assert 0, f'Unrecognised type: {type_}'
|
||||
|
||||
def __str__(self):
|
||||
return f'num_children={self.num_children} name={self.name}[{self.index}] type={self.type_}: {self.value!r}'
|
||||
|
||||
class FlightGear:
|
||||
"""FlightGear interface class.
|
||||
|
||||
An instance of this class represents a connection to a FlightGear telnet
|
||||
server.
|
||||
|
||||
Properties are accessed using a dictionary style interface:
|
||||
For example:
|
||||
|
||||
# Connect to flightgear telnet server.
|
||||
fg = FlightGear('myhost', 5500)
|
||||
# parking brake on
|
||||
fg['/controls/gear/brake-parking'] = 1
|
||||
# Get current heading
|
||||
heading = fg['/orientation/heading-deg']
|
||||
|
||||
Other non-property related methods
|
||||
"""
|
||||
|
||||
def __init__( self, host = 'localhost', port = 5500 ):
|
||||
try:
|
||||
self.telnet = FGTelnet(host,port)
|
||||
except socket.error as msg:
|
||||
self.telnet = None
|
||||
raise msg
|
||||
|
||||
def __del__(self):
|
||||
# Ensure telnet connection is closed cleanly.
|
||||
self.quit()
|
||||
|
||||
def __getitem__(self,key):
|
||||
"""Get a FlightGear property value.
|
||||
Where possible the value is converted to the equivalent Python type.
|
||||
"""
|
||||
s = self.telnet.get(key)[0]
|
||||
match = re.compile( r'[^=]*=\s*\'([^\']*)\'\s*([^\r]*)\r').match( s )
|
||||
if not match:
|
||||
return None
|
||||
value,type = match.groups()
|
||||
#value = match.group(1)
|
||||
#type = match.group(2)
|
||||
if value == '':
|
||||
return None
|
||||
|
||||
if type == '(double)':
|
||||
return float(value)
|
||||
elif type == '(int)':
|
||||
return int(value)
|
||||
elif type == '(bool)':
|
||||
if value == 'true':
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
else:
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Set a FlightGear property value."""
|
||||
if value is True:
|
||||
# Flightgear props doesn't treat string 'True' as true - see
|
||||
# SGPropertyNode::setStringValue().
|
||||
value = 'true'
|
||||
self.telnet.set( key, value )
|
||||
|
||||
def ls(self, dir_):
|
||||
'''
|
||||
Returns list of LsItem's.
|
||||
'''
|
||||
lines = self.telnet.ls2(dir_)
|
||||
ret = []
|
||||
for line in lines:
|
||||
if line.endswith('\r'):
|
||||
line = line[:-1]
|
||||
#print(f'line={line!r}')
|
||||
try:
|
||||
num_children, name, index, type_, value = line.split(' ', 4)
|
||||
except Exception as e:
|
||||
print(f'*** dir_={dir_!r} len(lines)={len(lines)}. failed to read items from line={line!r}. lines is: {lines!r}')
|
||||
raise
|
||||
index = int(index)
|
||||
num_children = int(num_children)
|
||||
item = LsItem(num_children, name, index, type_, value)
|
||||
#print(f'item={item}')
|
||||
ret.append( item)
|
||||
return ret
|
||||
|
||||
def quit(self):
|
||||
"""Close the telnet connection to FlightGear."""
|
||||
if self.telnet:
|
||||
self.telnet.quit()
|
||||
self.telnet = None
|
||||
|
||||
def view_next(self):
|
||||
"""Move to next view."""
|
||||
self.telnet.set( "/command/view/next", "true")
|
||||
|
||||
def view_prev(self):
|
||||
"""Move to next view."""
|
||||
self.telnet.set( "/command/view/prev", "true")
|
||||
|
||||
26
scripts/python/TerraSync/terrasync.py
Executable file
26
scripts/python/TerraSync/terrasync.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# terrasync.py --- Synchronize TerraScenery data to your local disk
|
||||
# Copyright (C) 2018 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
# terrasync.py development was started by Torsten Dreyer in 2016. This file is
|
||||
# just the normal entry point for users.
|
||||
|
||||
import terrasync.main
|
||||
|
||||
terrasync.main.main()
|
||||
0
scripts/python/TerraSync/terrasync/__init__.py
Normal file
0
scripts/python/TerraSync/terrasync/__init__.py
Normal file
105
scripts/python/TerraSync/terrasync/dirindex.py
Normal file
105
scripts/python/TerraSync/terrasync/dirindex.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# dirindex.py --- Class used to parse .dirindex files
|
||||
#
|
||||
# Copyright (C) 2016 Torsten Dreyer
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""Parser for .dirindex files."""
|
||||
|
||||
from .exceptions import InvalidDirIndexFile
|
||||
from .virtual_path import VirtualPath
|
||||
|
||||
|
||||
class DirIndex:
|
||||
"""Parser for .dirindex files."""
|
||||
|
||||
def __init__(self, dirIndexFile):
|
||||
self.directories = []
|
||||
self.files = []
|
||||
self.tarballs = []
|
||||
self.version = 0
|
||||
self.path = None # will be a VirtualPath instance when set
|
||||
|
||||
# readFrom() stores the raw contents of the .dirindex file in this
|
||||
# attribute. This is useful for troubleshooting.
|
||||
self._rawContents = None
|
||||
|
||||
with open(dirIndexFile, "r", encoding="ascii") as f:
|
||||
self.readFrom(f)
|
||||
|
||||
self._sanityCheck()
|
||||
|
||||
@classmethod
|
||||
def checkForBackslashOrLeadingSlash(cls, line, path):
|
||||
if '\\' in path or path.startswith('/'):
|
||||
raise InvalidDirIndexFile(
|
||||
r"invalid '\' or leading '/' in path field from line {!r}"
|
||||
.format(line))
|
||||
|
||||
@classmethod
|
||||
def checkForSlashBackslashOrDoubleColon(cls, line, name):
|
||||
if '/' in name or '\\' in name:
|
||||
raise InvalidDirIndexFile(
|
||||
r"invalid '\' or '/' in name field from line {!r}"
|
||||
.format(line))
|
||||
|
||||
if name == "..":
|
||||
raise InvalidDirIndexFile(
|
||||
r"invalid name field equal to '..' in line {!r}".format(line))
|
||||
|
||||
def readFrom(self, readable):
|
||||
self._rawContents = readable.read()
|
||||
|
||||
for line in self._rawContents.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
|
||||
tokens = line.split(':')
|
||||
if len(tokens) == 0:
|
||||
continue
|
||||
elif tokens[0] == "version":
|
||||
self.version = int(tokens[1])
|
||||
elif tokens[0] == "path":
|
||||
self.checkForBackslashOrLeadingSlash(line, tokens[1])
|
||||
# This is relative to the repository root
|
||||
self.path = VirtualPath(tokens[1])
|
||||
|
||||
if ".." in self.path.parts:
|
||||
raise InvalidDirIndexFile(
|
||||
"'..' component found in 'path' entry {!r}"
|
||||
.format(self.path))
|
||||
elif tokens[0] == "d":
|
||||
self.checkForSlashBackslashOrDoubleColon(line, tokens[1])
|
||||
self.directories.append({'name': tokens[1], 'hash': tokens[2]})
|
||||
elif tokens[0] == "f":
|
||||
self.checkForSlashBackslashOrDoubleColon(line, tokens[1])
|
||||
self.files.append({'name': tokens[1],
|
||||
'hash': tokens[2], 'size': int(tokens[3])})
|
||||
elif tokens[0] == "t":
|
||||
self.checkForSlashBackslashOrDoubleColon(line, tokens[1])
|
||||
self.tarballs.append({'name': tokens[1], 'hash': tokens[2],
|
||||
'size': int(tokens[3])})
|
||||
|
||||
def _sanityCheck(self):
|
||||
if self.path is None:
|
||||
assert self._rawContents is not None
|
||||
|
||||
firstLines = self._rawContents.split('\n')[:5]
|
||||
raise InvalidDirIndexFile(
|
||||
"no 'path' field found; the first lines of this .dirindex file "
|
||||
"follow:\n\n" + '\n'.join(firstLines))
|
||||
79
scripts/python/TerraSync/terrasync/exceptions.py
Normal file
79
scripts/python/TerraSync/terrasync/exceptions.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# exceptions.py --- Custom exception classes for terrasync.py
|
||||
#
|
||||
# Copyright (C) 2018 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
# Generic exception class for terrasync.py, to be subclassed for each specific
|
||||
# kind of exception.
|
||||
class TerraSyncPyException(Exception):
|
||||
def __init__(self, message=None, *, mayCapitalizeMsg=True):
|
||||
"""Initialize a TerraSyncPyException instance.
|
||||
|
||||
Except in cases where 'message' starts with a proper noun or
|
||||
something like that, its first character should be given in
|
||||
lower case. Automated treatments of this exception may print the
|
||||
message with its first character changed to upper case, unless
|
||||
'mayCapitalizeMsg' is False. In other words, if the case of the
|
||||
first character of 'message' must not be changed under any
|
||||
circumstances, set 'mayCapitalizeMsg' to False.
|
||||
|
||||
"""
|
||||
self.message = message
|
||||
self.mayCapitalizeMsg = mayCapitalizeMsg
|
||||
|
||||
def __str__(self):
|
||||
return self.completeMessage()
|
||||
|
||||
def __repr__(self):
|
||||
return "{}.{}({!r})".format(__name__, type(self).__name__, self.message)
|
||||
|
||||
# Typically overridden by subclasses with a custom constructor
|
||||
def detail(self):
|
||||
return self.message
|
||||
|
||||
def completeMessage(self):
|
||||
if self.message:
|
||||
return "{shortDesc}: {detail}".format(
|
||||
shortDesc=self.ExceptionShortDescription,
|
||||
detail=self.detail())
|
||||
else:
|
||||
return self.ExceptionShortDescription
|
||||
|
||||
ExceptionShortDescription = "terrasync.py generic exception"
|
||||
|
||||
|
||||
class UserError(TerraSyncPyException):
|
||||
"""Exception raised when the program is used in an incorrect way."""
|
||||
ExceptionShortDescription = "User error"
|
||||
|
||||
class NetworkError(TerraSyncPyException):
|
||||
"""Exception raised when getting a network error even after retrying."""
|
||||
ExceptionShortDescription = "Network error"
|
||||
|
||||
class UnsupportedURLScheme(TerraSyncPyException):
|
||||
"""Exception raised when asked to handle an unsupported URL scheme."""
|
||||
ExceptionShortDescription = "Unsupported URL scheme"
|
||||
|
||||
class RepoDataError(TerraSyncPyException):
|
||||
"""
|
||||
Exception raised when getting invalid data from the TerraSync repository."""
|
||||
ExceptionShortDescription = "Invalid data from the TerraSync repository"
|
||||
|
||||
class InvalidDirIndexFile(RepoDataError):
|
||||
"""Exception raised when getting invalid data from a .dirindex file."""
|
||||
ExceptionShortDescription = "Invalid .dirindex file"
|
||||
743
scripts/python/TerraSync/terrasync/main.py
Executable file
743
scripts/python/TerraSync/terrasync/main.py
Executable file
@@ -0,0 +1,743 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# main.py --- Main module for terrasync.py
|
||||
#
|
||||
# Copyright (C) 2016 Torsten Dreyer
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import argparse
|
||||
import enum
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
import urllib
|
||||
|
||||
from urllib.parse import urlparse, urljoin
|
||||
from http.client import HTTPConnection, HTTPSConnection, HTTPException
|
||||
from os import listdir
|
||||
from os.path import isfile, isdir, join
|
||||
from base64 import b64encode
|
||||
|
||||
from . import dirindex
|
||||
from .exceptions import UserError, NetworkError, RepoDataError, \
|
||||
InvalidDirIndexFile, UnsupportedURLScheme
|
||||
from .virtual_path import VirtualPath
|
||||
|
||||
|
||||
PROGNAME = os.path.basename(sys.argv[0])
|
||||
|
||||
class ExitStatus(enum.Enum):
|
||||
SUCCESS = 0
|
||||
# The program exit status is 1 when an exception isn't caught.
|
||||
ERROR = 1
|
||||
CHECK_MODE_FOUND_MISMATCH = 2
|
||||
|
||||
|
||||
# *****************************************************************************
|
||||
# * Utility functions *
|
||||
# *****************************************************************************
|
||||
|
||||
# If a path matches this regexp, we really don't want to delete it recursively
|
||||
# (“cre” stands for “compiled regexp”).
|
||||
_removeDirectoryTree_dangerous_cre = re.compile(
|
||||
r"""^(/ (home (/ [^/]*)? )? /* | # for Unix-like systems
|
||||
[a-zA-Z]: [\/]* # for Windows
|
||||
)$""", re.VERBOSE)
|
||||
|
||||
def removeDirectoryTree(base, whatToRemove):
|
||||
"""Recursively remove directory 'whatToRemove', with safety checks.
|
||||
|
||||
This function ensures that 'whatToRemove' does not resolve to a
|
||||
directory such as /, /home, /home/foobar, C:\, d:\, etc. It is also
|
||||
an error if 'whatToRemove' does not literally start with the value
|
||||
of 'base' (IOW, this function refuses to erase anything that is not
|
||||
under 'base').
|
||||
|
||||
'whatToRemove' is *not* interpreted relatively to 'base' (this would
|
||||
be doable, just a different API).
|
||||
|
||||
"""
|
||||
assert os.path.isdir(base), "Not a directory: {!r}".format(base)
|
||||
assert (base and
|
||||
whatToRemove.startswith(base) and
|
||||
whatToRemove[len(base):].startswith(os.sep)), \
|
||||
"Unexpected base path for removeDirectoryTree(): {!r}".format(base)
|
||||
absPath = os.path.abspath(whatToRemove)
|
||||
|
||||
if not os.path.isfile(join(absPath, ".dirindex")):
|
||||
raise UserError("refusing to recursively delete '{}' because "
|
||||
"it does not contain a .dirindex file".format(absPath))
|
||||
elif _removeDirectoryTree_dangerous_cre.match(absPath):
|
||||
raise UserError("in order to protect your data, refusing to "
|
||||
"recursively delete '{}'".format(absPath))
|
||||
else:
|
||||
shutil.rmtree(absPath)
|
||||
|
||||
|
||||
def computeHash(fileLike):
|
||||
hash = hashlib.sha1()
|
||||
|
||||
for chunk in iter(lambda: fileLike.read(4096), b""):
|
||||
hash.update(chunk)
|
||||
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
def hashForFile(fname):
|
||||
with open(fname, "rb") as f:
|
||||
return computeHash(f)
|
||||
|
||||
|
||||
# *****************************************************************************
|
||||
# * Network-related classes *
|
||||
# *****************************************************************************
|
||||
|
||||
class HTTPGetCallback:
|
||||
def __init__(self, src, callback):
|
||||
"""Initialize an HTTPGetCallback instance.
|
||||
|
||||
src -- a VirtualPath instance (corresponding to the path on
|
||||
the server for which a GET request is to be issued)
|
||||
callback -- a function taking two parameters: the URL (string)
|
||||
and an http.client.HTTPResponse instance. When
|
||||
invoked, the callback return value will be returned
|
||||
by HTTPGetter.get().
|
||||
|
||||
"""
|
||||
if callback is not None:
|
||||
self.callback = callback
|
||||
self.src = src
|
||||
|
||||
class HTTPGetter:
|
||||
def __init__(self, baseUrl, maxPending=10, auth=""):
|
||||
self.baseUrl = baseUrl
|
||||
self.parsedBaseUrl = urlparse(baseUrl)
|
||||
self.maxPending = maxPending
|
||||
self.requests = []
|
||||
self.pendingRequests = []
|
||||
|
||||
if self.parsedBaseUrl.scheme == "http":
|
||||
self.httpConnection = HTTPConnection(self.parsedBaseUrl.netloc)
|
||||
elif self.parsedBaseUrl.scheme == "https":
|
||||
context = ssl.create_default_context()
|
||||
self.httpConnection = HTTPSConnection(self.parsedBaseUrl.netloc,
|
||||
context=context)
|
||||
else:
|
||||
raise UnsupportedURLScheme(self.parsedBaseUrl.scheme)
|
||||
|
||||
self.httpRequestHeaders = headers = {'Host':self.parsedBaseUrl.netloc,'Content-Length':0,'Connection':'Keep-Alive','User-Agent':'FlightGear terrasync.py'}
|
||||
if( auth and not auth.isspace() ):
|
||||
self.httpRequestHeaders['Authorization'] = 'Basic %s' % b64encode(auth.encode("utf-8")).decode("ascii")
|
||||
|
||||
def assemblePath(self, httpGetCallback):
|
||||
"""Return the path-on-server for the file to download.
|
||||
|
||||
Example: '/scenery/Airports/N/E/4/.dirindex'
|
||||
|
||||
"""
|
||||
assert not self.parsedBaseUrl.path.endswith('/'), \
|
||||
repr(self.parsedBaseUrl)
|
||||
return self.parsedBaseUrl.path + str(httpGetCallback.src)
|
||||
|
||||
def assembleUrl(self, httpGetCallback):
|
||||
"""Return the URL of the file to download."""
|
||||
baseUrl = self.parsedBaseUrl.geturl()
|
||||
assert not baseUrl.endswith('/'), repr(baseUrl)
|
||||
|
||||
return urljoin(baseUrl + '/', httpGetCallback.src.asRelative())
|
||||
|
||||
def doGet(self, httpGetCallback):
|
||||
time.sleep(1.25) # throttle the rate
|
||||
|
||||
pathOnServer = self.assemblePath(httpGetCallback)
|
||||
self.httpConnection.request("GET", pathOnServer, None,
|
||||
self.httpRequestHeaders)
|
||||
httpResponse = self.httpConnection.getresponse()
|
||||
|
||||
# 'httpResponse' is an http.client.HTTPResponse instance
|
||||
return httpGetCallback.callback(self.assembleUrl(httpGetCallback),
|
||||
httpResponse)
|
||||
|
||||
def get(self, httpGetCallback):
|
||||
nbRetries = nbRetriesLeft = 5
|
||||
|
||||
while True:
|
||||
try:
|
||||
return self.doGet(httpGetCallback)
|
||||
except HTTPException as exc:
|
||||
if nbRetriesLeft == 0:
|
||||
raise NetworkError(
|
||||
"after {nbRetries} retries for URL {url}: {errMsg}"
|
||||
.format(nbRetries=nbRetries,
|
||||
url=self.assembleUrl(httpGetCallback),
|
||||
errMsg=exc)) from exc
|
||||
|
||||
# Try to reconnect
|
||||
self.httpConnection.close()
|
||||
time.sleep(1)
|
||||
self.httpConnection.connect()
|
||||
nbRetriesLeft -= 1
|
||||
|
||||
|
||||
class HTTPDownloadRequest(HTTPGetCallback):
|
||||
def __init__(self, src, dst, callback=None):
|
||||
"""Initialize an HTTPDownloadRequest instance.
|
||||
|
||||
src -- a VirtualPath instance (corresponding to the path
|
||||
on the server for which a GET request is to be
|
||||
issued)
|
||||
dst -- file path (or whatever open() accepts) where the
|
||||
downloaded data is to be stored
|
||||
callback -- a function that will be called if the download is
|
||||
successful, or None if no such callback is desired.
|
||||
The function must take one parameter: when invoked,
|
||||
it will be passed this HTTPDownloadRequest
|
||||
instance. Its return value is ignored.
|
||||
|
||||
"""
|
||||
HTTPGetCallback.__init__(self, src, None)
|
||||
self.dst = dst
|
||||
self.mycallback = callback
|
||||
|
||||
# 'httpResponse' is an http.client.HTTPResponse instance
|
||||
def callback(self, url, httpResponse):
|
||||
# I suspect this doesn't handle HTTP redirects and things like that. As
|
||||
# mentioned at <https://docs.python.org/3/library/http.client.html>,
|
||||
# http.client is a low-level interface that should normally not be used
|
||||
# directly!
|
||||
if httpResponse.status != 200:
|
||||
raise NetworkError("HTTP callback got status {status} for URL {url}"
|
||||
.format(status=httpResponse.status, url=url))
|
||||
|
||||
try:
|
||||
with open(self.dst, 'wb') as f:
|
||||
f.write(httpResponse.read())
|
||||
except HTTPException as exc:
|
||||
raise NetworkError("for URL {url}: {error}"
|
||||
.format(url=url, error=exc)) from exc
|
||||
|
||||
if self.mycallback is not None:
|
||||
self.mycallback(self)
|
||||
|
||||
|
||||
class HTTPSocketRequest(HTTPGetCallback):
|
||||
"""HTTPGetCallback class whose callback returns a file-like object.
|
||||
|
||||
The file-like object returned by the callback, and thus by
|
||||
HTTPGetter.get(), is a socket or similar. This allows one to read
|
||||
the data obtained from the network without necessarily storing it
|
||||
to a file.
|
||||
|
||||
"""
|
||||
def __init__(self, src):
|
||||
"""Initialize an HTTPSocketRequest object.
|
||||
|
||||
src -- VirtualPath instance for the resource on the server
|
||||
(presumably a file)
|
||||
|
||||
"""
|
||||
HTTPGetCallback.__init__(self, src, None)
|
||||
|
||||
def callback(self, url, httpResponse):
|
||||
# Same comment as for HTTPDownloadRequest.callback()
|
||||
if httpResponse.status != 200:
|
||||
raise NetworkError("HTTP callback got status {status} for URL {url}"
|
||||
.format(status=httpResponse.status, url=url))
|
||||
|
||||
return httpResponse
|
||||
|
||||
#################################################################################################################################
|
||||
|
||||
class Coordinate:
|
||||
def __init__(self, lat, lon):
|
||||
self.lat = lat
|
||||
self.lon = lon
|
||||
|
||||
class DownloadBoundaries:
|
||||
def __init__(self, top, left, bottom, right):
|
||||
if top < bottom:
|
||||
raise ValueError("top cannot be less than bottom")
|
||||
if right < left:
|
||||
# right may be less than left when wrapping across the antimeridian
|
||||
if not (left >= 0 and right < 0):
|
||||
raise ValueError("right cannot be less than left")
|
||||
|
||||
if top > 90 or bottom < -90:
|
||||
raise ValueError("top and bottom must be a valid latitude")
|
||||
if left < -180 or right >= 180:
|
||||
raise ValueError("left and right must be a valid longitude")
|
||||
self.top = top
|
||||
self.left = left
|
||||
self.bottom = bottom
|
||||
self.right = right
|
||||
|
||||
def is_coordinate_inside_boundaries(self, coordinate, isOuterBucket):
|
||||
bigTileBottom = coordinate.lat
|
||||
bigTileTop = bigTileBottom + (10 if isOuterBucket else 1)
|
||||
bigTileLeft = coordinate.lon
|
||||
bigTileRight = bigTileLeft + (10 if isOuterBucket else 1)
|
||||
|
||||
# if the two regions do not overlap then we are done
|
||||
if bigTileTop <= self.bottom or bigTileBottom > self.top:
|
||||
return False
|
||||
if bigTileRight <= self.left or bigTileLeft > self.right:
|
||||
# check for spanning across the antimeridian
|
||||
if self.left >= 0 and self.right < 0:
|
||||
# determine which side we are on and check of region overlap
|
||||
if bigTileLeft >= 0:
|
||||
if bigTileRight <= self.left:
|
||||
return False
|
||||
elif bigTileLeft > self.right:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
# at least a partial overlap exists, so more processing will be needed
|
||||
return True
|
||||
|
||||
|
||||
def parse_terrasync_coordinate(coordinate):
|
||||
matches = re.match("(w|e)(\d{3})(n|s)(\d{2})", coordinate)
|
||||
if not matches:
|
||||
return None
|
||||
|
||||
lon = int(matches.group(2))
|
||||
if matches.group(1) == "w":
|
||||
lon *= -1
|
||||
lat = int(matches.group(4))
|
||||
if matches.group(3) == "s":
|
||||
lat *= -1
|
||||
|
||||
return Coordinate(lat, lon)
|
||||
|
||||
|
||||
class Report:
|
||||
"""Gather and format data about the state of a TerraSync mirror."""
|
||||
|
||||
def __init__(self, targetDir):
|
||||
self.targetDir = targetDir
|
||||
|
||||
self.dirsWithMissingIndex = set()
|
||||
self.dirsWithMismatchingDirIndexHash = set()
|
||||
self.missingFiles = set()
|
||||
self.filesWithMismatchingHash = set()
|
||||
self.dirsSkippedDueToBoundaries = set()
|
||||
|
||||
self.orphanFiles = set()
|
||||
self.orphanDirs = set()
|
||||
|
||||
def addMissingDirIndex(self, directoryVirtualPath):
|
||||
self.dirsWithMissingIndex.add(directoryVirtualPath)
|
||||
|
||||
def addDirIndexWithMismatchingHash(self, directoryVirtualPath):
|
||||
self.dirsWithMismatchingDirIndexHash.add(directoryVirtualPath)
|
||||
|
||||
def addMissingFile(self, virtualPath):
|
||||
self.missingFiles.add(virtualPath)
|
||||
|
||||
def addFileWithMismatchingHash(self, virtualPath):
|
||||
self.filesWithMismatchingHash.add(virtualPath)
|
||||
|
||||
def addSkippedDueToBoundaries(self, virtualPath):
|
||||
self.dirsSkippedDueToBoundaries.add(virtualPath)
|
||||
|
||||
def addOrphanFile(self, virtualPath):
|
||||
self.orphanFiles.add(virtualPath)
|
||||
|
||||
def addOrphanDir(self, virtualPath):
|
||||
self.orphanDirs.add(virtualPath)
|
||||
|
||||
def summaryString(self):
|
||||
reportElements = [
|
||||
("Directories with missing index", self.dirsWithMissingIndex),
|
||||
("Directories whose .dirindex file had a mismatching hash",
|
||||
self.dirsWithMismatchingDirIndexHash),
|
||||
("Missing files", self.missingFiles),
|
||||
("Files with a mismatching hash", self.filesWithMismatchingHash),
|
||||
("Directories skipped because of the specified boundaries",
|
||||
self.dirsSkippedDueToBoundaries),
|
||||
("Orphan files", self.orphanFiles),
|
||||
("Orphan directories", self.orphanDirs)]
|
||||
|
||||
l = []
|
||||
for heading, setOfFilesOrDirs in reportElements:
|
||||
if setOfFilesOrDirs:
|
||||
l.append(heading + ":\n")
|
||||
l.extend( (" " + str(f) for f in sorted(setOfFilesOrDirs)) )
|
||||
l.append('') # ensure a blank line follows the list
|
||||
else:
|
||||
l.append(heading + ": none")
|
||||
|
||||
return '\n'.join(l)
|
||||
|
||||
def printReport(self):
|
||||
title = "{prg} report".format(prg=PROGNAME)
|
||||
print("\n" + title + '\n' + len(title)*"=", end="\n\n")
|
||||
print(self.summaryString())
|
||||
|
||||
|
||||
@enum.unique
|
||||
class FailedCheckReason(enum.Enum):
|
||||
"""Reasons that can cause 'check' mode to report a mismatch.
|
||||
|
||||
Note that network errors and things like that do *not* belong here.
|
||||
|
||||
"""
|
||||
|
||||
missingDirIndexFile, mismatchingHashForDirIndexFile, \
|
||||
missingNormalFile, mismatchingHashForNormalFile, \
|
||||
orphanFile, orphanDirectory = range(6)
|
||||
|
||||
# 'path': VirtualPath instance for a file or directory
|
||||
def explain(self, path):
|
||||
if self is FailedCheckReason.missingDirIndexFile:
|
||||
res = ".dirindex file '{}' is missing locally".format(path)
|
||||
elif self is FailedCheckReason.mismatchingHashForDirIndexFile:
|
||||
res = ".dirindex file '{}' doesn't have the hash it " \
|
||||
"should have according to the server".format(path)
|
||||
elif self is FailedCheckReason.missingNormalFile:
|
||||
res = "file '{}' is present on the server but missing locally" \
|
||||
.format(path)
|
||||
elif self is FailedCheckReason.mismatchingHashForNormalFile:
|
||||
res = "file '{}' doesn't have the hash given in the " \
|
||||
".dirindex file of its containing directory".format(path)
|
||||
elif self is FailedCheckReason.orphanFile:
|
||||
res = "file '{}' was found locally but is not present on the " \
|
||||
"server".format(path)
|
||||
elif self is FailedCheckReason.orphanDirectory:
|
||||
res = "directory '{}' was found locally but is not present " \
|
||||
"on the server".format(path)
|
||||
else:
|
||||
assert False, "Unhandled enum value: {!r}".format(self)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class TerraSync:
|
||||
|
||||
@enum.unique
|
||||
class Mode(enum.Enum):
|
||||
"""Main modes of operation for the TerraSync class."""
|
||||
|
||||
# Using lower case for the member names, because this way
|
||||
# enumMember.name is exactly the mode string passed to --mode on the
|
||||
# command line (can be useful for messages destined to users).
|
||||
check, sync = range(2)
|
||||
|
||||
def __init__(self, mode, doReport, url, target, quick, removeOrphan,
|
||||
downloadBoundaries, auth):
|
||||
self.mode = self.Mode[mode]
|
||||
self.doReport = doReport
|
||||
self.setUrl(url).setTarget(target)
|
||||
self.auth = auth
|
||||
self.quick = quick
|
||||
self.removeOrphan = removeOrphan
|
||||
self.httpGetter = None
|
||||
self.downloadBoundaries = downloadBoundaries
|
||||
# Status of the local repository (as compared to what the server says),
|
||||
# before any update we might do to it.
|
||||
self.report = Report(self.target)
|
||||
|
||||
def inSyncMode(self):
|
||||
return self.mode == self.Mode.sync
|
||||
|
||||
def setUrl(self, url):
|
||||
self.url = url.rstrip('/').strip()
|
||||
return self
|
||||
|
||||
def setTarget(self, target):
|
||||
# Using os.path.abspath() here is safer in case the process later uses
|
||||
# os.chdir(), which would change the meaning of the "." directory.
|
||||
self.target = os.path.abspath(target)
|
||||
return self
|
||||
|
||||
def start(self, virtualSubdir=VirtualPath('/')):
|
||||
"""Start the 'sync' or 'check' process.
|
||||
|
||||
The 'virtualSubdir' argument must be a VirtualPath instance and
|
||||
allows one to start the 'sync' or 'check' process in a chosen
|
||||
subdirectory of the TerraSync repository, instead of at its
|
||||
root.
|
||||
|
||||
"""
|
||||
# Remove the leading '/' from 'virtualSubdir' and convert to native
|
||||
# separators ('/' or '\' depending on the platform).
|
||||
localSubdir = os.path.normpath(virtualSubdir.asRelative())
|
||||
if localSubdir == ".": # just ugly, but it wouldn't hurt
|
||||
localSubdir = ""
|
||||
|
||||
assert not os.path.isabs(localSubdir), repr(localSubdir)
|
||||
self.httpGetter = HTTPGetter(baseUrl=self.url,auth=self.auth)
|
||||
|
||||
# Get the hash of the .dirindex file for 'virtualSubdir'
|
||||
try:
|
||||
request = HTTPSocketRequest(virtualSubdir / ".dirindex")
|
||||
with self.httpGetter.get(request) as fileLike:
|
||||
dirIndexHash = computeHash(fileLike)
|
||||
except HTTPException as exc:
|
||||
raise NetworkError("for the root .dirindex file: {errMsg}"
|
||||
.format(errMsg=exc)) from exc
|
||||
|
||||
# Process the chosen part of the repository (recursive)
|
||||
self.processDirectoryEntry(virtualSubdir, localSubdir, dirIndexHash)
|
||||
|
||||
return self.report
|
||||
|
||||
def processFileEntry(self, virtualPath, localPath, fileHash):
|
||||
"""Process a file entry from a .dirindex file."""
|
||||
localFullPath = join(self.target, localPath)
|
||||
failedCheckReason = None
|
||||
|
||||
if not os.path.isfile(localFullPath):
|
||||
self.report.addMissingFile(virtualPath)
|
||||
failedCheckReason = FailedCheckReason.missingNormalFile
|
||||
elif hashForFile(localFullPath) != fileHash:
|
||||
self.report.addFileWithMismatchingHash(virtualPath)
|
||||
failedCheckReason = FailedCheckReason.mismatchingHashForNormalFile
|
||||
else:
|
||||
# The file exists and has the hash mentioned in the .dirindex file
|
||||
return
|
||||
|
||||
assert failedCheckReason is not None
|
||||
|
||||
if self.inSyncMode():
|
||||
if os.path.isdir(localFullPath):
|
||||
# 'localFullPath' is a directory (locally), but on the server
|
||||
# it is a file -> remove the dir so that we can store the file.
|
||||
removeDirectoryTree(self.target, localFullPath)
|
||||
|
||||
print("Downloading '{}'".format(virtualPath))
|
||||
request = HTTPDownloadRequest(virtualPath, localFullPath)
|
||||
self.httpGetter.get(request)
|
||||
else:
|
||||
self.abortCheckMode(failedCheckReason, virtualPath)
|
||||
|
||||
def processDirectoryEntry(self, virtualPath, localPath, dirIndexHash):
|
||||
"""Process a directory entry from a .dirindex file."""
|
||||
print("Processing '{}'...".format(virtualPath))
|
||||
isOuterBucket = True if len(virtualPath.parts) <= 3 else False
|
||||
|
||||
coord = parse_terrasync_coordinate(virtualPath.name)
|
||||
|
||||
if (coord and
|
||||
not self.downloadBoundaries.is_coordinate_inside_boundaries(coord, isOuterBucket)):
|
||||
self.report.addSkippedDueToBoundaries(virtualPath)
|
||||
return
|
||||
|
||||
localFullPath = join(self.target, localPath)
|
||||
localDirIndex = join(localFullPath, ".dirindex")
|
||||
failedCheckReason = None
|
||||
|
||||
if not os.path.isfile(localDirIndex):
|
||||
failedCheckReason = FailedCheckReason.missingDirIndexFile
|
||||
self.report.addMissingDirIndex(virtualPath)
|
||||
elif hashForFile(localDirIndex) != dirIndexHash:
|
||||
failedCheckReason = FailedCheckReason.mismatchingHashForDirIndexFile
|
||||
self.report.addDirIndexWithMismatchingHash(virtualPath)
|
||||
|
||||
if failedCheckReason is None:
|
||||
if not self.quick:
|
||||
self.handleDirindexFile(localDirIndex)
|
||||
elif self.inSyncMode():
|
||||
if os.path.isfile(localFullPath):
|
||||
os.unlink(localFullPath) # file on server became a directory
|
||||
if not os.path.exists(localFullPath):
|
||||
os.makedirs(localFullPath)
|
||||
|
||||
request = HTTPDownloadRequest(virtualPath / ".dirindex",
|
||||
localDirIndex,
|
||||
self.handleDirindexRequest)
|
||||
self.httpGetter.get(request)
|
||||
else:
|
||||
self.abortCheckMode(failedCheckReason, virtualPath / ".dirindex")
|
||||
|
||||
def handleDirindexRequest(self, dirindexRequest):
|
||||
self.handleDirindexFile(dirindexRequest.dst)
|
||||
|
||||
def handleDirindexFile(self, dirindexFile):
|
||||
dirIndex = dirindex.DirIndex(dirindexFile)
|
||||
virtualBase = dirIndex.path # VirtualPath instance
|
||||
relativeBase = virtualBase.asRelative() # string, doesn't start with '/'
|
||||
serverFiles = []
|
||||
serverDirs = []
|
||||
|
||||
for file in dirIndex.files:
|
||||
f = file['name']
|
||||
self.processFileEntry(virtualBase / f,
|
||||
join(relativeBase, f),
|
||||
file['hash'])
|
||||
serverFiles.append(f)
|
||||
|
||||
for subdir in dirIndex.directories:
|
||||
d = subdir['name']
|
||||
self.processDirectoryEntry(virtualBase / d,
|
||||
join(relativeBase, d),
|
||||
subdir['hash'])
|
||||
serverDirs.append(d)
|
||||
|
||||
for tarball in dirIndex.tarballs:
|
||||
# Tarballs are handled the same as normal files.
|
||||
f = tarball['name']
|
||||
self.processFileEntry(virtualBase / f,
|
||||
join(relativeBase, f),
|
||||
tarball['hash'])
|
||||
serverFiles.append(f)
|
||||
|
||||
localFullPath = join(self.target, relativeBase)
|
||||
localFiles = [ f for f in listdir(localFullPath)
|
||||
if isfile(join(localFullPath, f)) ]
|
||||
|
||||
for f in localFiles:
|
||||
if f != ".dirindex" and f not in serverFiles:
|
||||
virtualPath = virtualBase / f
|
||||
self.report.addOrphanFile(virtualPath)
|
||||
|
||||
if self.inSyncMode():
|
||||
if self.removeOrphan:
|
||||
os.remove(join(self.target, virtualPath.asRelative()))
|
||||
else:
|
||||
self.abortCheckMode(FailedCheckReason.orphanFile,
|
||||
virtualPath)
|
||||
|
||||
localDirs = [ f for f in listdir(localFullPath)
|
||||
if isdir(join(localFullPath, f)) ]
|
||||
|
||||
for d in localDirs:
|
||||
if d not in serverDirs:
|
||||
virtualPath = virtualBase / d
|
||||
self.report.addOrphanDir(virtualPath)
|
||||
|
||||
if self.inSyncMode():
|
||||
if self.removeOrphan:
|
||||
removeDirectoryTree(self.target,
|
||||
join(self.target,
|
||||
virtualPath.asRelative()))
|
||||
else:
|
||||
self.abortCheckMode(FailedCheckReason.orphanDirectory,
|
||||
virtualPath)
|
||||
|
||||
# 'reason' is a member of the FailedCheckReason enum
|
||||
def abortCheckMode(self, reason, fileOrDirVirtualPath):
|
||||
assert self.mode == self.Mode.check, repr(self.mode)
|
||||
|
||||
print("{prg}: exiting from 'check' mode because {explanation}."
|
||||
.format(prg=PROGNAME,
|
||||
explanation=reason.explain(fileOrDirVirtualPath)))
|
||||
|
||||
if self.doReport:
|
||||
self.report.printReport()
|
||||
|
||||
sys.exit(ExitStatus.CHECK_MODE_FOUND_MISMATCH.value)
|
||||
|
||||
#################################################################################################################################
|
||||
|
||||
def parseCommandLine():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("-u", "--url", dest="url", metavar="URL",
|
||||
default="http://flightgear.sourceforge.net/scenery",
|
||||
help="server URL [default: %(default)s]")
|
||||
|
||||
parser.add_argument("-a", "--auth", dest="auth", metavar="user:password",
|
||||
default="", help="""\
|
||||
authentication credentials for basic auth [default: empty, no authentication]""")
|
||||
|
||||
parser.add_argument("-t", "--target", dest="target", metavar="DIR",
|
||||
default=".", help="""\
|
||||
directory where to store the files [default: the current directory]""")
|
||||
|
||||
parser.add_argument("--only-subdir", dest="onlySubdir", metavar="SUBDIR",
|
||||
default="", help="""\
|
||||
restrict processing to this subdirectory of the TerraSync repository. Use
|
||||
a path relative to the repository root, for instance 'Models/Residential'
|
||||
[default: process the whole repository]""")
|
||||
|
||||
parser.add_argument("-q", "--quick", dest="quick", action="store_true",
|
||||
default=False, help="enable quick mode")
|
||||
|
||||
parser.add_argument("-r", "--remove-orphan", dest="removeOrphan",
|
||||
action="store_true",
|
||||
default=False, help="remove old scenery files")
|
||||
|
||||
parser.add_argument("--mode", default="sync", choices=("check", "sync"),
|
||||
help="""\
|
||||
main mode of operation (default: '%(default)s'). In 'sync' mode, contents
|
||||
is downloaded from the server to the target directory. On the other hand,
|
||||
in 'check' mode, {progname} compares the contents of the target directory
|
||||
with the remote repository without writing nor deleting anything on
|
||||
disk.""".format(progname=PROGNAME))
|
||||
|
||||
parser.add_argument("--report", dest="report", action="store_true",
|
||||
default=False,
|
||||
help="""\
|
||||
before normal exit, print a report of what was found""")
|
||||
|
||||
parser.add_argument("--top", dest="top", type=int, default=90, help="""\
|
||||
maximum latitude to include in download [default: %(default)d]""")
|
||||
|
||||
parser.add_argument("--bottom", dest="bottom", type=int, default=-90,
|
||||
help="""\
|
||||
minimum latitude to include in download [default: %(default)d]""")
|
||||
|
||||
parser.add_argument("--left", dest="left", type=int, default=-180, help="""\
|
||||
minimum longitude to include in download [default: %(default)d]""")
|
||||
parser.add_argument("--right", dest="right", type=int, default=179,
|
||||
help="""\
|
||||
maximum longitude to include in download [default: %(default)d]""")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Perform consistency checks on the arguments
|
||||
if args.mode == "check" and args.removeOrphan:
|
||||
print("{prg}: 'check' mode is read-only and thus doesn't make sense "
|
||||
"with\noption --remove-orphan (-r)".format(prg=PROGNAME),
|
||||
file=sys.stderr)
|
||||
sys.exit(ExitStatus.ERROR.value)
|
||||
|
||||
# Replace backslashes with forward slashes, remove leading and trailing
|
||||
# slashes, collapse consecutive slashes. Yes, this implies that we tolerate
|
||||
# leading slashes for --only-subdir (which makes sense because virtual
|
||||
# paths are printed like that by this program, therefore it is natural for
|
||||
# users to copy & paste such paths in order to use them for --only-subdir).
|
||||
args.virtualSubdir = VirtualPath(args.onlySubdir.replace('\\', '/'))
|
||||
|
||||
# Be nice to our user in case the path starts with '\', 'C:\', etc.
|
||||
if os.path.isabs(args.virtualSubdir.asRelative()):
|
||||
print("{prg}: option --only-subdir expects a *relative* path, but got "
|
||||
"'{subdir}'".format(prg=PROGNAME, subdir=args.onlySubdir),
|
||||
file=sys.stderr)
|
||||
sys.exit(ExitStatus.ERROR.value)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def main():
|
||||
args = parseCommandLine()
|
||||
terraSync = TerraSync(args.mode, args.report, args.url, args.target,
|
||||
args.quick, args.removeOrphan,
|
||||
DownloadBoundaries(args.top, args.left, args.bottom,
|
||||
args.right),args.auth)
|
||||
report = terraSync.start(args.virtualSubdir)
|
||||
|
||||
if args.report:
|
||||
report.printReport()
|
||||
|
||||
sys.exit(ExitStatus.SUCCESS.value)
|
||||
506
scripts/python/TerraSync/terrasync/virtual_path.py
Normal file
506
scripts/python/TerraSync/terrasync/virtual_path.py
Normal file
@@ -0,0 +1,506 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# virtual_path.py --- Classes used to manipulate slash-separated virtual paths
|
||||
#
|
||||
# Copyright (C) 2018 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""Module containing the VirtualPath and MutableVirtualPath classes."""
|
||||
|
||||
import pathlib
|
||||
|
||||
|
||||
class VirtualPath:
|
||||
"""Class used to represent virtual paths using the slash separator.
|
||||
|
||||
This class always uses the slash ('/') as the separator between
|
||||
components. For terrasync.py, the root path '/' corresponds to the
|
||||
repository root, regardless of where it is stored (hard drive,
|
||||
remote server, etc.).
|
||||
|
||||
Note: because of this, the class is not supposed to be used directly
|
||||
for filesystem accesses, since some root directory or
|
||||
protocol://server/root-dir prefix would have to be prepended
|
||||
to provide reasonably useful functionality. This is why the
|
||||
paths managed by this class are said to be virtual. This also
|
||||
implies that even in Python 3.6 or later, this class should
|
||||
*not* inherit from os.PathLike.
|
||||
|
||||
Whenever a given feature exists in pathlib.PurePath, this class
|
||||
replicates the corresponding pathlib.PurePath API, but using
|
||||
mixedCaseStyle instead of underscore_style (the latter being used
|
||||
for every method of pathlib.PurePath). Of course, types are adapted:
|
||||
for instance, methods of this class often return a VirtualPath
|
||||
instance, whereas the corresponding pathlib.PurePath methods would
|
||||
return a pathlib.PurePath instance.
|
||||
|
||||
"""
|
||||
def __init__(self, p):
|
||||
# Once this function exits, self._path *must not be changed* anymore
|
||||
# (doing so would violate the contract for a hashable object: the
|
||||
# hash must not change once the object has been constructed).
|
||||
self._path = self.normalizeStringPath(p)
|
||||
# This check could of course be skipped if it is found to really affect
|
||||
# performance.
|
||||
self._check()
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of the path in self.
|
||||
|
||||
The return value:
|
||||
- always starts with a '/';
|
||||
- never ends with a '/' except if it is exactly '/' (i.e.,
|
||||
the root virtual path).
|
||||
|
||||
"""
|
||||
return self._path
|
||||
|
||||
def asPosix(self):
|
||||
"""Return a string representation of the path in self.
|
||||
|
||||
This method returns str(self), it is only present for
|
||||
compatibility with pathlib.PurePath.
|
||||
|
||||
"""
|
||||
return str(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}.{}({!r})".format(__name__, type(self).__name__, self._path)
|
||||
|
||||
def __lt__(self, other):
|
||||
# Allow sorting with instances of VirtualPath, or of any subclass. Note
|
||||
# that the == operator (__eq__()) and therefore also != are stricter
|
||||
# with respect to typing.
|
||||
if isinstance(other, VirtualPath):
|
||||
return self._path < other._path
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other):
|
||||
if isinstance(other, VirtualPath):
|
||||
return self._path <= other._path
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __eq__(self, other):
|
||||
# The types must be the same, therefore a VirtualPath never compares
|
||||
# equal to a MutableVirtualPath with the == operator. For such
|
||||
# comparisons, use the samePath() method. If __eq__() (and thus
|
||||
# necessarily __hash__()) were more lax about typing, adding
|
||||
# VirtualPath instances and instances of hashable subclasses of
|
||||
# VirtualPath with the same _path to a set or frozenset would lead to
|
||||
# unintuitive behavior, since they would all be considered equal.
|
||||
return type(self) == type(other) and self._path == other._path
|
||||
|
||||
# intentionally not implemented. Python3 provides a default implementation.
|
||||
# def __ne__(self, other):
|
||||
|
||||
def __gt__(self, other):
|
||||
if isinstance(other, VirtualPath):
|
||||
return self._path > other._path
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other):
|
||||
if isinstance(other, VirtualPath):
|
||||
return self._path >= other._path
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def __hash__(self):
|
||||
# Be strict about typing, as for __eq__().
|
||||
return hash((type(self), self._path))
|
||||
|
||||
def samePath(self, other):
|
||||
"""Compare the path with another instance, possibly of a subclass.
|
||||
|
||||
other -- instance of VirtualPath, or of a subclass of
|
||||
VirtualPath
|
||||
|
||||
"""
|
||||
if isinstance(other, VirtualPath):
|
||||
return self._path == other._path
|
||||
else:
|
||||
raise TypeError("{obj!r} is of type {klass}, which is neither "
|
||||
"VirtualPath nor a subclass thereof"
|
||||
.format(obj=other, klass=type(other).__name__))
|
||||
|
||||
def _check(self):
|
||||
"""Run consistency checks on self."""
|
||||
assert (self._path.startswith('/') and not self._path.startswith('//')
|
||||
and (self._path == '/' or not self._path.endswith('/'))), \
|
||||
repr(self._path)
|
||||
|
||||
@classmethod
|
||||
def normalizeStringPath(cls, path):
|
||||
"""Normalize a string representing a virtual path.
|
||||
|
||||
path -- input path (string)
|
||||
|
||||
Return a string that always starts with a slash, never contains
|
||||
consecutive slashes and only ends with a slash if it's the root
|
||||
virtual path ('/').
|
||||
|
||||
If 'path' doesn't start with a slash ('/'), it is considered
|
||||
relative to the root. This implies that if 'path' is the empty
|
||||
string, the return value is '/'.
|
||||
|
||||
"""
|
||||
if not path.startswith('/'):
|
||||
# / is the “virtual root” of the TerraSync repository
|
||||
path = '/' + path
|
||||
elif path.startswith('//') and not path.startswith('///'):
|
||||
# Nasty special case. As allowed (but not mandated!) by POSIX[1],
|
||||
# in pathlib.PurePosixPath('//some/path'), no collapsing happens[2].
|
||||
# This is only the case for exactly *two* *leading* slashes.
|
||||
# [1] http://pubs.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap04.html#tag_04_11
|
||||
# [2] https://www.python.org/dev/peps/pep-0428/#construction
|
||||
path = path[1:]
|
||||
|
||||
return pathlib.PurePosixPath(path).as_posix()
|
||||
|
||||
def __truediv__(self, s):
|
||||
"""Path concatenation with the '/' operator.
|
||||
|
||||
's' must be a string representing a relative path using the '/'
|
||||
separator, for instance "dir/subdir/other-subdir".
|
||||
|
||||
Return a new instance of type(self).
|
||||
|
||||
"""
|
||||
assert not (s.startswith('/') or s.endswith('/')), repr(s)
|
||||
|
||||
if self._path == '/':
|
||||
return type(self)(self._path + s)
|
||||
else:
|
||||
return type(self)(self._path + '/' + s)
|
||||
|
||||
def joinpath(self, *args):
|
||||
"""Combine 'self' with each given string argument in turn.
|
||||
|
||||
Each argument should be of the form "foo", "foo/bar",
|
||||
"foo/bar/baz", etc. Return the corresponding instance of
|
||||
type(self).
|
||||
|
||||
>>> p = VirtualPath("/foo").joinpath("bar", "baz", "quux/zoot")
|
||||
>>> str(p)
|
||||
'/foo/bar/baz/quux/zoot'
|
||||
|
||||
"""
|
||||
return self / '/'.join(args)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return a string representing the final path component.
|
||||
|
||||
>>> p = VirtualPath("/foo/bar/baz")
|
||||
>>> p.name
|
||||
'baz'
|
||||
|
||||
"""
|
||||
pos = self._path.rfind('/')
|
||||
assert pos != -1, (pos, self._path)
|
||||
|
||||
return self._path[pos+1:]
|
||||
|
||||
@property
|
||||
def parts(self):
|
||||
"""Return a tuple containing the path’s components.
|
||||
|
||||
>>> p = VirtualPath('/usr/bin/python3')
|
||||
>>> p.parts
|
||||
('/', 'usr', 'bin', 'python3')
|
||||
|
||||
"""
|
||||
if self._path == "/":
|
||||
return ('/',)
|
||||
else:
|
||||
# Skip the leading slash before splitting
|
||||
return ('/',) + tuple(self._path[1:].split('/'))
|
||||
|
||||
def generateParents(self):
|
||||
"""Generator function for the parents of the path.
|
||||
|
||||
See the 'parents' property for details.
|
||||
|
||||
"""
|
||||
if self._path == '/':
|
||||
return
|
||||
|
||||
assert self._path.startswith('/'), repr(self._path)
|
||||
prevPos = len(self._path)
|
||||
|
||||
while True:
|
||||
pos = self._path.rfind('/', 0, prevPos)
|
||||
|
||||
if pos > 0:
|
||||
yield type(self)(self._path[:pos])
|
||||
prevPos = pos
|
||||
else:
|
||||
assert pos == 0, pos
|
||||
break
|
||||
|
||||
yield type(self)('/')
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
"""The path ancestors.
|
||||
|
||||
Return an immutable sequence providing access to the logical
|
||||
ancestors of the path.
|
||||
|
||||
>>> p = VirtualPath('/foo/bar/baz')
|
||||
>>> len(p.parents)
|
||||
3
|
||||
>>> p.parents[0]
|
||||
terrasync.virtual_path.VirtualPath('/foo/bar')
|
||||
>>> p.parents[1]
|
||||
terrasync.virtual_path.VirtualPath('/foo')
|
||||
>>> p.parents[2]
|
||||
terrasync.virtual_path.VirtualPath('/')
|
||||
|
||||
"""
|
||||
return tuple(self.generateParents())
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
"""The logical parent of the path.
|
||||
|
||||
>>> p = VirtualPath('/foo/bar/baz')
|
||||
>>> p.parent
|
||||
terrasync.virtual_path.VirtualPath('/foo/bar')
|
||||
>>> q = VirtualPath('/')
|
||||
>>> q.parent
|
||||
terrasync.virtual_path.VirtualPath('/')
|
||||
|
||||
"""
|
||||
pos = self._path.rfind('/')
|
||||
assert pos >= 0, pos
|
||||
|
||||
if pos == 0:
|
||||
return type(self)('/')
|
||||
else:
|
||||
return type(self)(self._path[:pos])
|
||||
|
||||
@property
|
||||
def suffix(self):
|
||||
"""The extension of the final component, if any.
|
||||
|
||||
>>> VirtualPath('/my/library/setup.py').suffix
|
||||
'.py'
|
||||
>>> VirtualPath('/my/library.tar.gz').suffix
|
||||
'.gz'
|
||||
>>> VirtualPath('/my/library').suffix
|
||||
''
|
||||
|
||||
"""
|
||||
name = self.name
|
||||
pos = name.rfind('.')
|
||||
return name[pos:] if pos != -1 else ''
|
||||
|
||||
@property
|
||||
def suffixes(self):
|
||||
"""A list of the path’s extensions.
|
||||
|
||||
>>> VirtualPath('/my/library/setup.py').suffixes
|
||||
['.py']
|
||||
>>> VirtualPath('/my/library.tar.gz').suffixes
|
||||
['.tar', '.gz']
|
||||
>>> VirtualPath('/my/library').suffixes
|
||||
[]
|
||||
|
||||
"""
|
||||
name = self.name
|
||||
prevPos = len(name)
|
||||
l = []
|
||||
|
||||
while True:
|
||||
pos = name.rfind('.', 0, prevPos)
|
||||
if pos == -1:
|
||||
break
|
||||
else:
|
||||
l.insert(0, name[pos:prevPos])
|
||||
prevPos = pos
|
||||
|
||||
return l
|
||||
|
||||
@property
|
||||
def stem(self):
|
||||
"""The final path component, without its suffix.
|
||||
|
||||
>>> VirtualPath('/my/library.tar.gz').stem
|
||||
'library.tar'
|
||||
>>> VirtualPath('/my/library.tar').stem
|
||||
'library'
|
||||
>>> VirtualPath('/my/library').stem
|
||||
'library'
|
||||
>>> VirtualPath('/').stem
|
||||
''
|
||||
|
||||
"""
|
||||
name = self.name
|
||||
pos = name.rfind('.')
|
||||
|
||||
return name if pos == -1 else name[:pos]
|
||||
|
||||
def asRelative(self):
|
||||
"""Return the virtual path without its leading '/'.
|
||||
|
||||
>>> p = VirtualPath('/usr/bin/python3')
|
||||
>>> p.asRelative()
|
||||
'usr/bin/python3'
|
||||
|
||||
>>> VirtualPath('').asRelative()
|
||||
''
|
||||
>>> VirtualPath('/').asRelative()
|
||||
''
|
||||
|
||||
"""
|
||||
assert self._path.startswith('/'), repr(self._path)
|
||||
return self._path[1:]
|
||||
|
||||
def relativeTo(self, other):
|
||||
"""Return the portion of this path that follows 'other'.
|
||||
|
||||
The return value is a string. If the operation is impossible,
|
||||
ValueError is raised.
|
||||
|
||||
>>> VirtualPath('/etc/passwd').relativeTo('/')
|
||||
'etc/passwd'
|
||||
>>> VirtualPath('/etc/passwd').relativeTo('/etc')
|
||||
'passwd'
|
||||
|
||||
"""
|
||||
normedOther = self.normalizeStringPath(other)
|
||||
|
||||
if normedOther == '/':
|
||||
return self._path[1:]
|
||||
elif self._path.startswith(normedOther):
|
||||
rest = self._path[len(normedOther):]
|
||||
|
||||
if rest.startswith('/'):
|
||||
return rest[1:]
|
||||
|
||||
raise ValueError("{!r} does not start with '{}'".format(self, other))
|
||||
|
||||
def withName(self, newName):
|
||||
"""Return a new VirtualPath instance with the 'name' part changed.
|
||||
|
||||
If the original path is '/' (which doesn’t have a name in the
|
||||
sense of the 'name' property), ValueError is raised.
|
||||
|
||||
>>> p = VirtualPath('/foobar/downloads/pathlib.tar.gz')
|
||||
>>> p.withName('setup.py')
|
||||
terrasync.virtual_path.VirtualPath('/foobar/downloads/setup.py')
|
||||
|
||||
"""
|
||||
if self._path == '/':
|
||||
raise ValueError("{!r} has an empty name".format(self))
|
||||
else:
|
||||
pos = self._path.rfind('/')
|
||||
assert pos != -1, (pos, self._path)
|
||||
|
||||
if newName.startswith('/'):
|
||||
raise ValueError("{!r} starts with a '/'".format(newName))
|
||||
elif newName.endswith('/'):
|
||||
raise ValueError("{!r} ends with a '/'".format(newName))
|
||||
else:
|
||||
return VirtualPath(self._path[:pos]) / newName
|
||||
|
||||
|
||||
def withSuffix(self, newSuffix):
|
||||
"""Return a new VirtualPath instance with the suffix changed.
|
||||
|
||||
If the original path doesn’t have a suffix, the new suffix is
|
||||
appended:
|
||||
|
||||
>>> p = VirtualPath('/foobar/downloads/pathlib.tar.gz')
|
||||
>>> p.withSuffix('.bz2')
|
||||
terrasync.virtual_path.VirtualPath('/foobar/downloads/pathlib.tar.bz2')
|
||||
>>> p = VirtualPath('/foobar/README')
|
||||
>>> p.withSuffix('.txt')
|
||||
terrasync.virtual_path.VirtualPath('/foobar/README.txt')
|
||||
|
||||
If 'self' is the root virtual path ('/') or 'newSuffix' doesn't
|
||||
start with '.', ValueError is raised.
|
||||
|
||||
"""
|
||||
if not newSuffix.startswith('.'):
|
||||
raise ValueError("new suffix {!r} doesn't start with '.'"
|
||||
.format(newSuffix))
|
||||
|
||||
name = self.name
|
||||
if not name:
|
||||
raise ValueError("{!r} has an empty 'name' part".format(self))
|
||||
|
||||
pos = name.rfind('.')
|
||||
|
||||
if pos == -1:
|
||||
return self.withName(name + newSuffix) # append suffix
|
||||
else:
|
||||
return self.withName(name[:pos] + newSuffix) # replace suffix
|
||||
|
||||
|
||||
class MutableVirtualPath(VirtualPath):
|
||||
|
||||
"""Mutable subclass of VirtualPath.
|
||||
|
||||
Contrary to VirtualPath objects, instances of this class can be
|
||||
modified in-place with the /= operator, in order to append path
|
||||
components. The price to pay for this advantage is that they can't
|
||||
be used as dictionary keys or as elements of a set or frozenset,
|
||||
because they are not hashable.
|
||||
|
||||
"""
|
||||
|
||||
__hash__ = None # ensure the type is not hashable
|
||||
|
||||
def _normalize(self):
|
||||
self._path = self.normalizeStringPath(self._path)
|
||||
|
||||
def __itruediv__(self, s):
|
||||
"""Path concatenation with the '/=' operator.
|
||||
|
||||
's' must be a string representing a relative path using the '/'
|
||||
separator, for instance "dir/subdir/other-subdir".
|
||||
|
||||
"""
|
||||
# This check could of course be skipped if it is found to really affect
|
||||
# performance.
|
||||
self._check()
|
||||
assert not (s.startswith('/') or s.endswith('/')), repr(s)
|
||||
|
||||
if self._path == '/':
|
||||
self._path += s
|
||||
else:
|
||||
self._path += '/' + s
|
||||
|
||||
# Collapse multiple slashes, remove trailing '/' except if the whole
|
||||
# path is '/', etc.
|
||||
self._normalize()
|
||||
|
||||
return self
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# The doctest setup below works, but for full test coverage, use the
|
||||
# unittest framework (it is set up to automatically run all doctests from
|
||||
# this module!).
|
||||
#
|
||||
# Hint: 'python3 -m unittest discover' from the TerraSync directory
|
||||
# should do the trick.
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
0
scripts/python/TerraSync/tests/__init__.py
Normal file
0
scripts/python/TerraSync/tests/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
d:some\illegal directory name with a backslash:378b3dd58ce3058f2992b70aa5ecf8947a4d7f9e
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
f:some\illegal file name with a backslash:4cbf3d1746a1249bff7809e4b079dd80cfce594c:123
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
t:some\illegal tarball name with a backslash.tgz:b63a067d82824f158d6bde66f9e76654274277fe:1234567
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
d:..:378b3dd58ce3058f2992b70aa5ecf8947a4d7f9e
|
||||
@@ -0,0 +1,2 @@
|
||||
version:1
|
||||
path:some/path/with/a/../component
|
||||
@@ -0,0 +1,2 @@
|
||||
version:1
|
||||
path:some/path/non-ASCII chars like é, ê, €, Œ, Ÿ, etc./foo/bar
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
f:..:4cbf3d1746a1249bff7809e4b079dd80cfce594c:123
|
||||
@@ -0,0 +1,2 @@
|
||||
version:1
|
||||
path:some/path/that/contains \ a/backslash
|
||||
@@ -0,0 +1,2 @@
|
||||
version:1
|
||||
path:/some/path/that/starts/with/a/slash
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
d:some/illegal directory name with a slash:378b3dd58ce3058f2992b70aa5ecf8947a4d7f9e
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
f:some/illegal file name with a slash:4cbf3d1746a1249bff7809e4b079dd80cfce594c:123
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
t:some/illegal tarball name with a slash.tgz:b63a067d82824f158d6bde66f9e76654274277fe:1234567
|
||||
@@ -0,0 +1,3 @@
|
||||
version:1
|
||||
path:some/path
|
||||
t:..:b63a067d82824f158d6bde66f9e76654274277fe:1234567
|
||||
@@ -0,0 +1,16 @@
|
||||
# Comment line
|
||||
version:1
|
||||
path:some/path
|
||||
time:20200926-10:38Z
|
||||
d:Airports:8a93b5d8a2b04d2fb8de4ef58ad02f9e8819d314
|
||||
d:Models:bee221c9d2621dc9b69cd9e0ad7dd0605f6ea928
|
||||
d:Objects:10ae32c986470fa55b56b8eefbc6ed565cce0642
|
||||
# Other comment line
|
||||
d:Terrain:e934024dc0f959f9a433e47c646d256630052c2e
|
||||
d:Buildings:19060725efc2a301fa6844991e2922d42d8de5e2
|
||||
d:Pylons:378b3dd58ce3058f2992b70aa5ecf8947a4d7f9e
|
||||
d:Roads:89f8f10406041948368c76c0a2e794d45ac536b7
|
||||
f:some file:4cbf3d1746a1249bff7809e4b079dd80cfce594c:123
|
||||
f:other file:62726252f7183eef31001c1c565e149f3c4527b9:4567
|
||||
f:third file:303adcc1747d8dc438096307189881e987e9bb61:89012
|
||||
t:Airports_archive.tgz:b63a067d82824f158d6bde66f9e76654274277fe:1234567
|
||||
95
scripts/python/TerraSync/tests/test_dirindex.py
Normal file
95
scripts/python/TerraSync/tests/test_dirindex.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# test_dirindex.py --- Test module for terrasync.dirindex
|
||||
# Copyright (C) 2020 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
# In order to exercise all tests, run the following command from the parent
|
||||
# directory (you may omit the 'discover' argument):
|
||||
#
|
||||
# python3 -m unittest discover
|
||||
|
||||
"""Test module for terrasync.dirindex"""
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from terrasync.dirindex import DirIndex
|
||||
from terrasync.exceptions import InvalidDirIndexFile
|
||||
from terrasync.virtual_path import VirtualPath
|
||||
|
||||
|
||||
baseDir = os.path.dirname(__file__)
|
||||
|
||||
def testData(*args):
|
||||
return os.path.join(baseDir, "data", "dirindex", *args)
|
||||
|
||||
|
||||
directories_in_sample_dirindex_1 = [
|
||||
{'name': 'Airports', 'hash': '8a93b5d8a2b04d2fb8de4ef58ad02f9e8819d314'},
|
||||
{'name': 'Models', 'hash': 'bee221c9d2621dc9b69cd9e0ad7dd0605f6ea928'},
|
||||
{'name': 'Objects', 'hash': '10ae32c986470fa55b56b8eefbc6ed565cce0642'},
|
||||
{'name': 'Terrain', 'hash': 'e934024dc0f959f9a433e47c646d256630052c2e'},
|
||||
{'name': 'Buildings', 'hash': '19060725efc2a301fa6844991e2922d42d8de5e2'},
|
||||
{'name': 'Pylons', 'hash': '378b3dd58ce3058f2992b70aa5ecf8947a4d7f9e'},
|
||||
{'name': 'Roads', 'hash': '89f8f10406041948368c76c0a2e794d45ac536b7'}]
|
||||
|
||||
files_in_sample_dirindex_1 = [
|
||||
{'name': 'some file',
|
||||
'hash': '4cbf3d1746a1249bff7809e4b079dd80cfce594c',
|
||||
'size': 123},
|
||||
{'name': 'other file',
|
||||
'hash': '62726252f7183eef31001c1c565e149f3c4527b9',
|
||||
'size': 4567},
|
||||
{'name': 'third file',
|
||||
'hash': '303adcc1747d8dc438096307189881e987e9bb61',
|
||||
'size': 89012}]
|
||||
|
||||
tarballs_in_sample_dirindex_1 = [
|
||||
{'name': 'Airports_archive.tgz',
|
||||
'hash': 'b63a067d82824f158d6bde66f9e76654274277fe',
|
||||
'size': 1234567}]
|
||||
|
||||
|
||||
class TestDirIndex(unittest.TestCase):
|
||||
"""Unit tests for the DirIndex class."""
|
||||
|
||||
def test_constructor(self):
|
||||
d = DirIndex(testData("good", "sample_dirindex_1"))
|
||||
self.assertEqual(d.version, 1)
|
||||
self.assertEqual(d.path, VirtualPath("some/path"))
|
||||
self.assertEqual(d.directories, directories_in_sample_dirindex_1)
|
||||
self.assertEqual(d.files, files_in_sample_dirindex_1)
|
||||
self.assertEqual(d.tarballs, tarballs_in_sample_dirindex_1)
|
||||
|
||||
stems = ("path_starts_with_slash",
|
||||
"path_contains_a_backslash",
|
||||
"dotdot_in_path",
|
||||
"slash_in_directory_name",
|
||||
"slash_in_file_name",
|
||||
"slash_in_tarball_name",
|
||||
"backslash_in_directory_name",
|
||||
"backslash_in_file_name",
|
||||
"backslash_in_tarball_name",
|
||||
"directory_name_is_double_colon",
|
||||
"file_name_is_double_colon",
|
||||
"tarball_name_is_double_colon",)
|
||||
for stem in stems:
|
||||
with self.assertRaises(InvalidDirIndexFile):
|
||||
DirIndex(testData("bad", "bad_dirindex_" + stem))
|
||||
|
||||
with self.assertRaises(UnicodeDecodeError):
|
||||
d = DirIndex(testData("bad", "bad_dirindex_encoding"))
|
||||
357
scripts/python/TerraSync/tests/test_virtual_path.py
Normal file
357
scripts/python/TerraSync/tests/test_virtual_path.py
Normal file
@@ -0,0 +1,357 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# test_virtual_path.py --- Test module for terrasync.virtual_path
|
||||
# Copyright (C) 2018 Florent Rougon
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License as
|
||||
# published by the Free Software Foundation; either version 2 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
# In order to exercise all tests, run the following command from the parent
|
||||
# directory (you may omit the 'discover' argument):
|
||||
#
|
||||
# python3 -m unittest discover
|
||||
|
||||
import collections
|
||||
import unittest
|
||||
|
||||
from terrasync.virtual_path import VirtualPath, MutableVirtualPath
|
||||
|
||||
# Hook doctest-based tests into the unittest test discovery mechanism
|
||||
import doctest
|
||||
import terrasync.virtual_path
|
||||
|
||||
def load_tests(loader, tests, ignore):
|
||||
# Tell unittest to run doctests from terrasync.virtual_path
|
||||
tests.addTests(doctest.DocTestSuite(terrasync.virtual_path))
|
||||
return tests
|
||||
|
||||
|
||||
class VirtualPathCommonTests:
|
||||
"""Common tests to run for both VirtualPath and MutableVirtualPath.
|
||||
|
||||
The tests inside this class must exercice the class (VirtualPath or
|
||||
MutableVirtualPath) stored in the 'cls' class attribute. They must
|
||||
work for both VirtualPath and MutableVirtualPath, otherwise they
|
||||
don't belong here!
|
||||
|
||||
"""
|
||||
|
||||
def test_normalizeStringPath(self):
|
||||
self.assertEqual(self.cls.normalizeStringPath("/"), "/")
|
||||
self.assertEqual(self.cls.normalizeStringPath(""), "/")
|
||||
self.assertEqual(
|
||||
self.cls.normalizeStringPath("/abc/Def ijk//l Mn///op/q/rst/"),
|
||||
"/abc/Def ijk/l Mn/op/q/rst")
|
||||
self.assertEqual(self.cls.normalizeStringPath("abc/def"), "/abc/def")
|
||||
self.assertEqual(self.cls.normalizeStringPath("/abc/def"), "/abc/def")
|
||||
self.assertEqual(self.cls.normalizeStringPath("//abc/def"),
|
||||
"/abc/def")
|
||||
self.assertEqual(self.cls.normalizeStringPath("///abc/def"),
|
||||
"/abc/def")
|
||||
self.assertEqual(self.cls.normalizeStringPath("/abc//def"),
|
||||
"/abc/def")
|
||||
|
||||
# Unless the implementation of VirtualPath.__init__() has changed
|
||||
# meanwhile, the following function must be essentially the same as
|
||||
# test_normalizeStringPath().
|
||||
def test_constructor_and_str(self):
|
||||
p = self.cls("/")
|
||||
self.assertEqual(str(p), "/")
|
||||
|
||||
p = self.cls("")
|
||||
self.assertEqual(str(p), "/")
|
||||
|
||||
p = self.cls("/abc/Def ijk//l Mn///op/q/rst/")
|
||||
self.assertEqual(str(p), "/abc/Def ijk/l Mn/op/q/rst")
|
||||
|
||||
p = self.cls("abc/def")
|
||||
self.assertEqual(str(p), "/abc/def")
|
||||
|
||||
p = self.cls("/abc/def")
|
||||
self.assertEqual(str(p), "/abc/def")
|
||||
|
||||
p = self.cls("//abc/def")
|
||||
self.assertEqual(str(p), "/abc/def")
|
||||
|
||||
p = self.cls("///abc/def")
|
||||
self.assertEqual(str(p), "/abc/def")
|
||||
|
||||
p = self.cls("/abc//def")
|
||||
self.assertEqual(str(p), "/abc/def")
|
||||
|
||||
def test_asPosix (self):
|
||||
self.assertEqual(self.cls("").asPosix(), "/")
|
||||
self.assertEqual(self.cls("/").asPosix(), "/")
|
||||
self.assertEqual(self.cls("/abc//def").asPosix(), "/abc/def")
|
||||
self.assertEqual(self.cls("/abc//def/").asPosix(), "/abc/def")
|
||||
self.assertEqual(self.cls("//abc//def//").asPosix(), "/abc/def")
|
||||
self.assertEqual(self.cls("////abc//def//").asPosix(), "/abc/def")
|
||||
|
||||
def test_samePath(self):
|
||||
self.assertTrue(self.cls("").samePath(self.cls("")))
|
||||
self.assertTrue(self.cls("").samePath(self.cls("/")))
|
||||
self.assertTrue(self.cls("/").samePath(self.cls("")))
|
||||
self.assertTrue(self.cls("/").samePath(self.cls("/")))
|
||||
|
||||
self.assertTrue(
|
||||
self.cls("/abc/def").samePath(self.cls("/abc/def")))
|
||||
self.assertTrue(
|
||||
self.cls("/abc//def").samePath(self.cls("/abc/def")))
|
||||
self.assertTrue(
|
||||
self.cls("/abc/def/").samePath(self.cls("/abc/def")))
|
||||
|
||||
def test_comparisons(self):
|
||||
self.assertEqual(self.cls("/abc/def"), self.cls("/abc/def"))
|
||||
self.assertEqual(self.cls("/abc//def"), self.cls("/abc/def"))
|
||||
self.assertEqual(self.cls("/abc/def/"), self.cls("/abc/def"))
|
||||
|
||||
self.assertNotEqual(self.cls("/abc/dEf"), self.cls("/abc/def"))
|
||||
self.assertNotEqual(self.cls("/abc/def "), self.cls("/abc/def"))
|
||||
|
||||
self.assertLessEqual(self.cls("/foo/bar"), self.cls("/foo/bar"))
|
||||
self.assertLessEqual(self.cls("/foo/bar"), self.cls("/foo/bbr"))
|
||||
self.assertLess(self.cls("/foo/bar"), self.cls("/foo/bbr"))
|
||||
|
||||
self.assertGreaterEqual(self.cls("/foo/bar"), self.cls("/foo/bar"))
|
||||
self.assertGreaterEqual(self.cls("/foo/bbr"), self.cls("/foo/bar"))
|
||||
self.assertGreater(self.cls("/foo/bbr"), self.cls("/foo/bar"))
|
||||
|
||||
def test_truedivOperators(self):
|
||||
"""
|
||||
Test operators used to add paths components to a VirtualPath instance."""
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
self.assertEqual(p, self.cls("/") / "foo" / "bar" / "baz/quux/zoot")
|
||||
self.assertEqual(p, self.cls("/foo") / "bar" / "baz/quux/zoot")
|
||||
self.assertEqual(p, self.cls("/foo/bar") / "baz/quux/zoot")
|
||||
|
||||
def test_joinpath(self):
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
self.assertEqual(
|
||||
p,
|
||||
self.cls("/foo").joinpath("bar", "baz", "quux/zoot"))
|
||||
|
||||
def test_nameAttribute(self):
|
||||
self.assertEqual(self.cls("/").name, "")
|
||||
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
self.assertEqual(p.name, "zoot")
|
||||
|
||||
def test_partsAttribute(self):
|
||||
self.assertEqual(self.cls("/").parts, ("/",))
|
||||
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
self.assertEqual(p.parts, ("/", "foo", "bar", "baz", "quux", "zoot"))
|
||||
|
||||
def test_parentsAttribute(self):
|
||||
def pathify(*args):
|
||||
return tuple( (self.cls(s) for s in args) )
|
||||
|
||||
p = self.cls("/")
|
||||
self.assertEqual(tuple(p.parents), pathify()) # empty tuple
|
||||
|
||||
p = self.cls("/foo")
|
||||
self.assertEqual(tuple(p.parents), pathify("/"))
|
||||
|
||||
p = self.cls("/foo/bar")
|
||||
self.assertEqual(tuple(p.parents), pathify("/foo", "/"))
|
||||
|
||||
p = self.cls("/foo/bar/baz")
|
||||
self.assertEqual(tuple(p.parents), pathify("/foo/bar", "/foo", "/"))
|
||||
|
||||
def test_parentAttribute(self):
|
||||
def pathify(s):
|
||||
return self.cls(s)
|
||||
|
||||
p = self.cls("/")
|
||||
self.assertEqual(p.parent, pathify("/"))
|
||||
|
||||
p = self.cls("/foo")
|
||||
self.assertEqual(p.parent, pathify("/"))
|
||||
|
||||
p = self.cls("/foo/bar")
|
||||
self.assertEqual(p.parent, pathify("/foo"))
|
||||
|
||||
p = self.cls("/foo/bar/baz")
|
||||
self.assertEqual(p.parent, pathify("/foo/bar"))
|
||||
|
||||
def test_suffixAttribute(self):
|
||||
p = self.cls("/")
|
||||
self.assertEqual(p.suffix, '')
|
||||
|
||||
p = self.cls("/foo/bar/baz.py")
|
||||
self.assertEqual(p.suffix, '.py')
|
||||
|
||||
p = self.cls("/foo/bar/baz.py.bla")
|
||||
self.assertEqual(p.suffix, '.bla')
|
||||
|
||||
p = self.cls("/foo/bar/baz")
|
||||
self.assertEqual(p.suffix, '')
|
||||
|
||||
def test_suffixesAttribute(self):
|
||||
p = self.cls("/")
|
||||
self.assertEqual(p.suffixes, [])
|
||||
|
||||
p = self.cls("/foo/bar/baz.py")
|
||||
self.assertEqual(p.suffixes, ['.py'])
|
||||
|
||||
p = self.cls("/foo/bar/baz.py.bla")
|
||||
self.assertEqual(p.suffixes, ['.py', '.bla'])
|
||||
|
||||
p = self.cls("/foo/bar/baz")
|
||||
self.assertEqual(p.suffixes, [])
|
||||
|
||||
def test_stemAttribute(self):
|
||||
p = self.cls("/")
|
||||
self.assertEqual(p.stem, '')
|
||||
|
||||
p = self.cls("/foo/bar/baz.py")
|
||||
self.assertEqual(p.stem, 'baz')
|
||||
|
||||
p = self.cls("/foo/bar/baz.py.bla")
|
||||
self.assertEqual(p.stem, 'baz.py')
|
||||
|
||||
def test_asRelative(self):
|
||||
self.assertEqual(self.cls("/").asRelative(), "")
|
||||
self.assertEqual(self.cls("/foo/bar/baz/quux/zoot").asRelative(),
|
||||
"foo/bar/baz/quux/zoot")
|
||||
|
||||
def test_relativeTo(self):
|
||||
self.assertEqual(self.cls("").relativeTo(""), "")
|
||||
self.assertEqual(self.cls("").relativeTo("/"), "")
|
||||
self.assertEqual(self.cls("/").relativeTo("/"), "")
|
||||
self.assertEqual(self.cls("/").relativeTo(""), "")
|
||||
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
|
||||
self.assertEqual(p.relativeTo(""), "foo/bar/baz/quux/zoot")
|
||||
self.assertEqual(p.relativeTo("/"), "foo/bar/baz/quux/zoot")
|
||||
|
||||
self.assertEqual(p.relativeTo("foo"), "bar/baz/quux/zoot")
|
||||
self.assertEqual(p.relativeTo("foo/"), "bar/baz/quux/zoot")
|
||||
self.assertEqual(p.relativeTo("/foo"), "bar/baz/quux/zoot")
|
||||
self.assertEqual(p.relativeTo("/foo/"), "bar/baz/quux/zoot")
|
||||
|
||||
self.assertEqual(p.relativeTo("foo/bar/baz"), "quux/zoot")
|
||||
self.assertEqual(p.relativeTo("foo/bar/baz/"), "quux/zoot")
|
||||
self.assertEqual(p.relativeTo("/foo/bar/baz"), "quux/zoot")
|
||||
self.assertEqual(p.relativeTo("/foo/bar/baz/"), "quux/zoot")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
p.relativeTo("/foo/ba")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
p.relativeTo("/foo/balloon")
|
||||
|
||||
def test_withName(self):
|
||||
p = self.cls("/foo/bar/baz/quux/zoot")
|
||||
|
||||
self.assertEqual(p.withName(""),
|
||||
VirtualPath("/foo/bar/baz/quux"))
|
||||
self.assertEqual(p.withName("pouet"),
|
||||
VirtualPath("/foo/bar/baz/quux/pouet"))
|
||||
self.assertEqual(p.withName("pouet/zdong"),
|
||||
VirtualPath("/foo/bar/baz/quux/pouet/zdong"))
|
||||
|
||||
# The self.cls object has no 'name' (referring to the 'name' property)
|
||||
with self.assertRaises(ValueError):
|
||||
self.cls("").withName("foobar")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
self.cls("/").withName("foobar")
|
||||
|
||||
def test_withSuffix(self):
|
||||
p = self.cls("/foo/bar/baz.tar.gz")
|
||||
self.assertEqual(p.withSuffix(".bz2"),
|
||||
VirtualPath("/foo/bar/baz.tar.bz2"))
|
||||
p = self.cls("/foo/bar/baz")
|
||||
self.assertEqual(p.withSuffix(".tar.xz"),
|
||||
VirtualPath("/foo/bar/baz.tar.xz"))
|
||||
|
||||
# The self.cls object has no 'name' (referring to the 'name' property)
|
||||
with self.assertRaises(ValueError):
|
||||
self.cls("/foo/bar/baz.tar.gz").withSuffix("no-leading-dot")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
# The root virtual path ('/') can't be used for this
|
||||
self.cls("/").withSuffix(".foobar")
|
||||
|
||||
|
||||
class TestVirtualPath(unittest.TestCase, VirtualPathCommonTests):
|
||||
"""Tests for the VirtualPath class.
|
||||
|
||||
These are the tests using the common infrastructure from
|
||||
VirtualPathCommonTests.
|
||||
|
||||
"""
|
||||
|
||||
cls = VirtualPath
|
||||
|
||||
class TestVirtualPathSpecific(unittest.TestCase):
|
||||
"""Tests specific to the VirtualPath class."""
|
||||
|
||||
def test_isHashableType(self):
|
||||
p = VirtualPath("/foo")
|
||||
self.assertTrue(isinstance(p, collections.abc.Hashable))
|
||||
|
||||
def test_insideSet(self):
|
||||
l1 = [ VirtualPath("/foo/bar"),
|
||||
VirtualPath("/foo/baz") ]
|
||||
l2 = l1 + [ VirtualPath("/foo/bar") ] # l2 has a duplicate element
|
||||
|
||||
# Sets allow one to ignore duplicate elements when comparing
|
||||
self.assertEqual(set(l1), set(l2))
|
||||
self.assertEqual(frozenset(l1), frozenset(l2))
|
||||
|
||||
|
||||
class TestMutableVirtualPath(unittest.TestCase, VirtualPathCommonTests):
|
||||
"""Tests for the MutableVirtualPath class.
|
||||
|
||||
These are the tests using the common infrastructure from
|
||||
VirtualPathCommonTests.
|
||||
|
||||
"""
|
||||
|
||||
cls = MutableVirtualPath
|
||||
|
||||
class TestMutableVirtualPathSpecific(unittest.TestCase):
|
||||
"""Tests specific to the MutableVirtualPath class."""
|
||||
|
||||
def test_mixedComparisons(self):
|
||||
self.assertTrue(
|
||||
VirtualPath("/abc/def").samePath(MutableVirtualPath("/abc/def")))
|
||||
self.assertTrue(
|
||||
VirtualPath("/abc//def").samePath(MutableVirtualPath("/abc/def")))
|
||||
self.assertTrue(
|
||||
VirtualPath("/abc/def/").samePath(MutableVirtualPath("/abc/def")))
|
||||
|
||||
self.assertTrue(
|
||||
MutableVirtualPath("/abc/def").samePath(VirtualPath("/abc/def")))
|
||||
self.assertTrue(
|
||||
MutableVirtualPath("/abc//def").samePath(VirtualPath("/abc/def")))
|
||||
self.assertTrue(
|
||||
MutableVirtualPath("/abc/def/").samePath(VirtualPath("/abc/def")))
|
||||
|
||||
def test_inPlacePathConcatenation(self):
|
||||
p = VirtualPath("/foo/bar/baz/quux/zoot")
|
||||
|
||||
q = MutableVirtualPath("/foo")
|
||||
q /= "bar"
|
||||
q /= "baz/quux/zoot"
|
||||
|
||||
self.assertTrue(p.samePath(q))
|
||||
|
||||
def test_isNotHashableType(self):
|
||||
p = MutableVirtualPath("/foo")
|
||||
self.assertFalse(isinstance(p, collections.abc.Hashable))
|
||||
49
scripts/python/demo.py
Executable file
49
scripts/python/demo.py
Executable file
@@ -0,0 +1,49 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from FlightGear import FlightGear
|
||||
import time
|
||||
|
||||
def main():
|
||||
fg = FlightGear('localhost', 5500)
|
||||
|
||||
# Wait five seconds for simulator to settle down
|
||||
while 1:
|
||||
if fg['/sim/time/elapsed-sec'] > 5:
|
||||
break
|
||||
time.sleep(1.0)
|
||||
print(fg['/sim/time/elapsed-sec'])
|
||||
|
||||
|
||||
# parking brake on
|
||||
fg['/controls/parking-brake'] = 1
|
||||
|
||||
# heading = fg['/orientation/heading-deg']
|
||||
|
||||
# Switch to external view for for 'walk around'.
|
||||
fg.view_next()
|
||||
|
||||
fg['/sim/current-view/goal-heading-offset-deg'] = 180.0
|
||||
#fg.wait_for_prop_eq('/sim/current-view/heading-offset-deg', 180.0)
|
||||
|
||||
fg['/sim/current-view/goal-heading-offset-deg'] = 90.0
|
||||
#fg.wait_for_prop_eq('/sim/current-view/heading-offset-deg', 90.0)
|
||||
|
||||
fg['/sim/current-view/goal-heading-offset-deg'] = 0.0
|
||||
#fg.wait_for_prop_eq('/sim/current-view/heading-offset-deg', 0.0)
|
||||
|
||||
time.sleep(2.0)
|
||||
|
||||
# Switch back to cockpit view
|
||||
fg.view_prev()
|
||||
|
||||
time.sleep(2.0)
|
||||
|
||||
# Flaps to take off position
|
||||
fg['/controls/flaps'] = 0.34
|
||||
#fg.wait_for_prop_eq('/surface-positions/flap-pos-norm', 0.34)
|
||||
|
||||
fg.quit()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
301
scripts/python/nasal_api_doc.py
Executable file
301
scripts/python/nasal_api_doc.py
Executable file
@@ -0,0 +1,301 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (C) 2012 Adrian Musceac
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
import os, sys, glob
|
||||
import io
|
||||
import re, string
|
||||
|
||||
"""Script which generates an API documentation file for Nasal libraries
|
||||
located inside $FGROOT/Nasal/
|
||||
Usage: nasal_api_doc.py parse [path to $FGROOT/Nasal/]
|
||||
Or configure the local path below, and ommit the path in the console.
|
||||
The API doc in HTML format is generated in the current working directory"""
|
||||
|
||||
########### Local $FGROOT/Nasal/ path ##########
|
||||
NASAL_PATH="../fgfs/fgdata/Nasal/"
|
||||
|
||||
|
||||
def get_files(nasal_dir):
|
||||
if nasal_dir[-1]!='/':
|
||||
nasal_dir+='/'
|
||||
try:
|
||||
os.stat(nasal_dir)
|
||||
except:
|
||||
print("The path does not exist")
|
||||
sys.exit()
|
||||
fgroot_dir = nasal_dir.rstrip('/').replace('Nasal','')
|
||||
|
||||
try:
|
||||
f_version = open(fgroot_dir+'version','rb')
|
||||
version = f_version.read(256).rstrip('\n')
|
||||
finally:
|
||||
f_version.close()
|
||||
|
||||
top_level = []
|
||||
modules = []
|
||||
top_namespaces = []
|
||||
files_list = os.listdir(nasal_dir)
|
||||
for f in files_list:
|
||||
if f.find(".nas")!=-1:
|
||||
top_level.append(f)
|
||||
continue
|
||||
if os.path.isdir(nasal_dir + f):
|
||||
modules.append(f)
|
||||
top_level.sort()
|
||||
modules.sort()
|
||||
if len(top_level) ==0:
|
||||
print("This does not look like the correct $FGROOT/Nasal path")
|
||||
sys.exit()
|
||||
if len(modules)==0:
|
||||
print("Warning: could not find any submodules")
|
||||
for f in top_level:
|
||||
namespace=f.replace(".nas","")
|
||||
functions=parse_file(nasal_dir + f)
|
||||
top_namespaces.append([namespace,functions])
|
||||
for m in modules:
|
||||
files=glob.glob(nasal_dir+m+"/*.nas")
|
||||
for f in files:
|
||||
functions=parse_file(f)
|
||||
top_namespaces.append([m,functions])
|
||||
|
||||
output_text(top_namespaces,modules,version)
|
||||
|
||||
|
||||
def output_text(top_namespaces,modules,version):
|
||||
fw=open('./nasal_api_doc.html','wb')
|
||||
buf='<html><head>\
|
||||
<title>Nasal API</title>\
|
||||
<style>\n\
|
||||
a.main_module_link {margin-left:30px;display:block;float:left;}\
|
||||
div.container {background-color:#eee;clear:left;margin-top:20px;}\
|
||||
h2.namespace_title {padding-left:20px;color:#fff;background-color:#8888AC}\
|
||||
h4.class_function {padding-left:20px;background-color:#eee;color:#000033}\
|
||||
h4.class_definition {padding-left:20px;background-color:#eee;color:#000033}\
|
||||
h4.function {padding-left:20px;background-color:#eee;color:#000033}\
|
||||
hr {margin-left:30px;margin-right:30px;}\
|
||||
div.comments {padding-left:40px;display:inline;font-size:12px;}\
|
||||
</style>\n\
|
||||
</head><body style="width:1024px;">'
|
||||
|
||||
buf+='<h1 style="padding-left:20px;display:block;color:#fff;background-color:#555588;">\
|
||||
Nasal $FGROOT Library<br/><span style="font-size:12px;">Flightgear version: '+version+'\
|
||||
<br/>This file is generated automatically by scripts/python/nasal_api_doc.py\
|
||||
</span></h1>\
|
||||
<br/><a href="http://plausible.org/nasal">Nasal documentation</a> \
|
||||
<a href="http://wiki.flightgear.org/Nasal_scripting_language">Flightgear Nasal documentation</a>\n<div style="float:right;"> '
|
||||
buf+='<h2 style="font-size:14px;height:450px;width:250px;overflow:scroll;display:block;position:fixed;top:20px;right:20px;background-color:#8888AC;border:1px solid black;">\n'
|
||||
done=[]
|
||||
for namespace in top_namespaces:
|
||||
color='0000cc'
|
||||
if namespace[0] in modules:
|
||||
color='cc0000'
|
||||
if namespace[0] not in done:
|
||||
buf+='<a class="main_module_link" style="color:'+color+'" href="#'+namespace[0]+'">'+namespace[0]+'</a> <br/>\n'
|
||||
done.append(namespace[0])
|
||||
buf+='</h2></div>\n'
|
||||
done2=[]
|
||||
for namespace in top_namespaces:
|
||||
if namespace[0] not in done2:
|
||||
buf+='<div class="container" style="">\n'
|
||||
buf += '<h2 class="namespace_title"><a name="'+namespace[0]+'">'+namespace[0]+'</a></h2>\n'
|
||||
done2.append(namespace[0])
|
||||
for functions in namespace[1]:
|
||||
class_func=functions[0].split('.')
|
||||
if len(class_func)>1:
|
||||
f_name=''
|
||||
if class_func[1].find('_')==0:
|
||||
f_name='<font color="#0000cc">'+class_func[1]+'</font>'
|
||||
else:
|
||||
f_name=class_func[1]
|
||||
if class_func[1]!='':
|
||||
buf+= '<div><h4 class="class_function"><b>'\
|
||||
+namespace[0]+'</b>'+ "." + '<b><i>'+class_func[0]+'</i></b>'+'<b>.'+f_name+'</b>'+' ( <font color="#cc0000">'+ functions[1]+ '</font> )' +'</h4>\n'
|
||||
else:
|
||||
buf+= '<div><h4 class="class_definition"><b>'\
|
||||
+namespace[0]+'</b>'+ "." + '<b><i><u><font color="#000000">'+class_func[0]+'</font></u></i></b>' +'</h4>\n'
|
||||
else:
|
||||
if functions[0].find('_')==0:
|
||||
f_name='<font color="#0000cc">'+functions[0]+'</font>'
|
||||
else:
|
||||
f_name=functions[0]
|
||||
buf+= '<div><h4 class="function"><b>'\
|
||||
+namespace[0]+'</b>'+ "." + '<b>'+f_name+'</b>'+ ' ( <font color="#cc0000">'+ functions[1]+ '</font> )' +'</h4>\n'
|
||||
for comment in functions[2]:
|
||||
if comment.find('=====')!=-1:
|
||||
buf+='<hr/>'
|
||||
else:
|
||||
tempComment = comment.replace('#','').replace('<','<').replace('>','>')
|
||||
if tempComment.strip()!="":
|
||||
buf+= '<div class="comments">'+tempComment+'</div><br/>\n'
|
||||
buf+='</div>\n'
|
||||
if namespace[0] not in done2:
|
||||
buf+='</div>\n'
|
||||
buf+='</body></html>'
|
||||
fw.write(buf)
|
||||
fw.close()
|
||||
|
||||
def parse_file(filename):
|
||||
with open(filename,'rb') as fr:
|
||||
content = fr.readlines()
|
||||
|
||||
i=0
|
||||
retval=[]
|
||||
classname=""
|
||||
for line in content:
|
||||
match=re.search(r'^var\s+([A-Za-z0-9_-]+)\s*=\s*func\s*\(?([A-Za-z0-9_\s,=.\n-]*)\)?',line)
|
||||
if match is not None:
|
||||
func_name=match.group(1)
|
||||
comments=[]
|
||||
param=match.group(2)
|
||||
if(line.find(')')==-1 and line.find('(')!=-1):
|
||||
k=i+1
|
||||
while(content[k].find(')')==-1):
|
||||
param+=content[k].rstrip('\n')
|
||||
k+=1
|
||||
param+=content[k].split(')')[0]
|
||||
j=i-1
|
||||
count=0
|
||||
while ( j>i-35 and j>-1):
|
||||
if count>3:
|
||||
break
|
||||
if len(content[j])<2:
|
||||
j-=1
|
||||
count+=1
|
||||
continue
|
||||
if re.search(r'^\s*#',content[j]) is not None:
|
||||
comments.append(content[j].rstrip('\n'))
|
||||
j-=1
|
||||
else:
|
||||
break
|
||||
if(len(comments)>1):
|
||||
comments.reverse()
|
||||
retval.append((func_name, param,comments))
|
||||
i+=1
|
||||
continue
|
||||
|
||||
match3=re.search(r'^var\s*([A-Za-z0-9_-]+)\s*=\s*{\s*(\n|})',line)
|
||||
if match3 is not None:
|
||||
classname=match3.group(1)
|
||||
|
||||
comments=[]
|
||||
|
||||
j=i-1
|
||||
count=0
|
||||
while ( j>i-35 and j>-1):
|
||||
if count>3:
|
||||
break
|
||||
if len(content[j])<2:
|
||||
j-=1
|
||||
count+=1
|
||||
continue
|
||||
if re.search(r'^\s*#',content[j]) is not None:
|
||||
comments.append(content[j].rstrip('\n'))
|
||||
j-=1
|
||||
else:
|
||||
break
|
||||
if(len(comments)>1):
|
||||
comments.reverse()
|
||||
retval.append((classname+'.', '',comments))
|
||||
i+=1
|
||||
continue
|
||||
|
||||
match2=re.search(r'^\s*([A-Za-z0-9_-]+)\s*:\s*func\s*\(?([A-Za-z0-9_\s,=.\n-]*)\)?',line)
|
||||
if match2 is not None:
|
||||
func_name=match2.group(1)
|
||||
comments=[]
|
||||
param=match2.group(2)
|
||||
if(line.find(')')==-1 and line.find('(')!=-1):
|
||||
k=i+1
|
||||
while(content[k].find(')')==-1):
|
||||
param+=content[k].rstrip('\n')
|
||||
k+=1
|
||||
param+=content[k].split(')')[0]
|
||||
j=i-1
|
||||
count=0
|
||||
while ( j>i-35 and j>-1):
|
||||
if count>3:
|
||||
break
|
||||
if len(content[j])<2:
|
||||
j-=1
|
||||
count+=1
|
||||
continue
|
||||
if re.search(r'^\s*#',content[j]) is not None:
|
||||
comments.append(content[j].rstrip('\n'))
|
||||
j-=1
|
||||
else:
|
||||
break
|
||||
if(len(comments)>1):
|
||||
comments.reverse()
|
||||
if classname =='':
|
||||
continue
|
||||
retval.append((classname+'.'+func_name, param,comments))
|
||||
i+=1
|
||||
continue
|
||||
|
||||
match4=re.search(r'^([A-Za-z0-9_-]+)\.([A-Za-z0-9_-]+)\s*=\s*func\s*\(?([A-Za-z0-9_\s,=\n.-]*)\)?',line)
|
||||
if match4 is not None:
|
||||
classname=match4.group(1)
|
||||
func_name=match4.group(2)
|
||||
comments=[]
|
||||
param=match4.group(3)
|
||||
if(line.find(')')==-1 and line.find('(')!=-1):
|
||||
k=i+1
|
||||
while(content[k].find(')')==-1):
|
||||
param+=content[k].rstrip('\n')
|
||||
k+=1
|
||||
param+=content[k].split(')')[0]
|
||||
j=i-1
|
||||
count=0
|
||||
while ( j>i-35 and j>-1):
|
||||
if count>3:
|
||||
break
|
||||
if len(content[j])<2:
|
||||
j-=1
|
||||
count+=1
|
||||
continue
|
||||
if re.search(r'^\s*#',content[j]) is not None:
|
||||
comments.append(content[j].rstrip('\n'))
|
||||
j-=1
|
||||
else:
|
||||
break
|
||||
if(len(comments)>1):
|
||||
comments.reverse()
|
||||
retval.append((classname+'.'+func_name, param,comments))
|
||||
i+=1
|
||||
continue
|
||||
|
||||
i+=1
|
||||
return retval
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) <2:
|
||||
print('Usage: nasal_api_doc.py parse [path to $FGROOT/Nasal/]')
|
||||
sys.exit()
|
||||
else:
|
||||
if sys.argv[1]=='parse':
|
||||
if len(sys.argv) <3:
|
||||
nasal_path=NASAL_PATH
|
||||
else:
|
||||
nasal_path=sys.argv[2]
|
||||
get_files(nasal_path)
|
||||
else:
|
||||
print('Usage: nasal_api_doc.py parse [path to $FGROOT/Nasal/]')
|
||||
sys.exit()
|
||||
102
scripts/python/performance_replay.py
Executable file
102
scripts/python/performance_replay.py
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Replays a Flightgear recording and shows framerate statistics.
|
||||
|
||||
Usage:
|
||||
-f <fgfs>
|
||||
Name of Flightear executable/script, e.g.: -f run_fgfs.sh
|
||||
-i <tape>
|
||||
Name of recording, for use with --load-tape.
|
||||
'''
|
||||
|
||||
import math
|
||||
import sys
|
||||
import time
|
||||
|
||||
import recordreplay
|
||||
|
||||
|
||||
def average_stddev(items):
|
||||
'''
|
||||
Returns (average, stddev).
|
||||
'''
|
||||
total = 0
|
||||
total_sq = 0
|
||||
for item in items:
|
||||
total += item
|
||||
total_sq += item*item
|
||||
n = len(items)
|
||||
average = total / n
|
||||
variance = total_sq / n - (total / n)**2
|
||||
stddev = math.sqrt(variance)
|
||||
return average, stddev
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
fgfs = None
|
||||
tape = None
|
||||
args = iter(sys.argv[1:])
|
||||
while 1:
|
||||
try:
|
||||
arg = next(args)
|
||||
except StopIteration:
|
||||
break
|
||||
if arg in ('-h', '--help'):
|
||||
print(__doc__)
|
||||
elif arg == '-f':
|
||||
fgfs = next(args)
|
||||
elif arg == '-i':
|
||||
tape = next(args)
|
||||
else:
|
||||
raise Exception(f'Unrecognised arg: {arg}')
|
||||
|
||||
if not fgfs:
|
||||
raise Exception(f'Specify fgfs executable/run-script with -f')
|
||||
if not tape:
|
||||
raise Exception(f'Specify tape to replay with -i')
|
||||
|
||||
fg = recordreplay.Fg( None,
|
||||
f'{fgfs}'
|
||||
f' --load-tape={tape}'
|
||||
f' --timeofday=noon'
|
||||
f' --prop:bool:/sim/replay/log-frame-times=true'
|
||||
f' --prop:bool:/sim/replay/replay-main-view=true'
|
||||
f' --prop:bool:/sim/replay/replay-main-window-size=true'
|
||||
f' --prop:bool:/sim/replay/looped=false'
|
||||
)
|
||||
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
fg.waitfor('/sim/replay/replay-state', 1)
|
||||
fg.waitfor('/sim/replay/replay-state-eof', 1, timeout=600)
|
||||
|
||||
print(f'Reading frame-time statistics...')
|
||||
t = time.time()
|
||||
items = fg.fg.ls('/sim/replay/log-frame-times')
|
||||
t = time.time() - t
|
||||
print(f'fg.fg.ls took t={t}')
|
||||
|
||||
fg.close()
|
||||
|
||||
dts = []
|
||||
for item in items:
|
||||
if item.name == 'dt':
|
||||
dts.append(float(item.value))
|
||||
|
||||
def statistics_text(dts):
|
||||
dt_average, dt_stddev = average_stddev(dts)
|
||||
t_total = sum(dts)
|
||||
return f'n={len(dts)} dt_average={dt_average} dt_stddev={dt_stddev} t_total={t_total} fps={len(dts)/t_total}'
|
||||
|
||||
print(f'-' * 40)
|
||||
print(f'')
|
||||
print(f'Overall frame time (dt) statistics:')
|
||||
print(f' {statistics_text(dts)}')
|
||||
|
||||
print(f'Ignoring first 4 frames:')
|
||||
print(f' {statistics_text(dts[4:])}')
|
||||
|
||||
print(f'')
|
||||
print(f'Raw frame times:')
|
||||
dts_text = ' '.join(map(lambda dt: f'{dt:.4f}', dts))
|
||||
print(f'dts: {dts_text}')
|
||||
847
scripts/python/recordreplay.py
Executable file
847
scripts/python/recordreplay.py
Executable file
@@ -0,0 +1,847 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Test script for record/replay. Only tested on Unix.
|
||||
|
||||
E.g.:
|
||||
|
||||
./flightgear/scripts/python/recordreplay.py -f run_fgfs.sh
|
||||
|
||||
Args:
|
||||
--all
|
||||
Run all tests (this is default).
|
||||
--continuous BOOLS
|
||||
--extra-properties BOOLS
|
||||
--it-max <it>
|
||||
Set min iteration (inclusive) to run; useful when fixing tests and
|
||||
retrying.
|
||||
--it-min <it>
|
||||
Set max iteration (exclusive) to run; useful when fixing tests and
|
||||
retrying.
|
||||
--main-view BOOLS
|
||||
--multiplayer
|
||||
-f <fgfs>
|
||||
The fgfs executable to use. Default assumes the Walk build system.
|
||||
--f-old <fgfs-old>
|
||||
A second fgfs executable. If specified we run all tests twice, first
|
||||
using <fgfs-old> to create the recording and <fgfs> to replay it,
|
||||
second the other way round.
|
||||
--test-motion
|
||||
Checks that speed of aircraft on replay is not affected by frame rate.
|
||||
|
||||
We deliberately change frame rate while recording UFO moving at
|
||||
constant speed.
|
||||
|
||||
--test-motion-mp
|
||||
Checks that speed of MP on replay is not affected by frame rate.
|
||||
|
||||
We deliberately change frame rate while recording UFO moving at
|
||||
constant speed.
|
||||
|
||||
BOOLS is comma-sparated list of 0 or 1, with 1 activating the particular
|
||||
feature. So for example '--continuous 0' tests normal recording/replay',
|
||||
'--continuous 1' tests continuous recording/replay, and continuous 0,1'
|
||||
tests both.
|
||||
|
||||
We test all combinations of continuous, extra-properties, main-view and
|
||||
multiplayer recordings. For each test we check that we can create a
|
||||
recording, and replay it in a new fgfs instance. When replaying we check
|
||||
a small number of basic things such as the recording length, and whether
|
||||
extra-properties are replayed.
|
||||
'''
|
||||
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
import resource
|
||||
except Exception:
|
||||
# We don't mind if 'resource' module is not available, e.g. on Windows.
|
||||
resource = None
|
||||
|
||||
import FlightGear
|
||||
|
||||
def log(text):
|
||||
print(text, file=sys.stderr)
|
||||
sys.stderr.flush()
|
||||
|
||||
g_cleanup = []
|
||||
g_tapedir = './recordreplay.py.tapes'
|
||||
|
||||
|
||||
def remove(path):
|
||||
'''
|
||||
Removes file, ignoring any error.
|
||||
'''
|
||||
log(f'Removing: {path}')
|
||||
try:
|
||||
os.remove(path)
|
||||
except Exception as e:
|
||||
log(f'Failed to remove {path}: {e}')
|
||||
|
||||
|
||||
def readlink(path):
|
||||
'''
|
||||
Returns absolute path destination of link.
|
||||
'''
|
||||
ret = os.readlink(path)
|
||||
if not os.path.isabs(ret):
|
||||
ret = os.path.join(os.path.dirname(path), ret)
|
||||
return ret
|
||||
|
||||
|
||||
class Fg:
|
||||
'''
|
||||
Runs flightgear, with support for setting/getting properties etc.
|
||||
|
||||
self.fg is a FlightGear.FlightGear instance, which uses telnet to
|
||||
communicate with Flightgear.
|
||||
'''
|
||||
def __init__(self, aircraft, args, env=None, telnet_port=None, telnet_hz=None, out=None, screensaver_suspend=True):
|
||||
'''
|
||||
aircraft:
|
||||
Specified as: --aircraft={aircraft}. This is separate from <args>
|
||||
because we need to know the name of recording files.
|
||||
args:
|
||||
Miscellenous args either space-separated name=value strings or a
|
||||
dict.
|
||||
env:
|
||||
Environment to set. If DISPLAY is not in <env> we add 'DISPLAY=:0'.
|
||||
telnet_port:
|
||||
telnet_hz:
|
||||
.
|
||||
'''
|
||||
self.child = None
|
||||
self.aircraft = aircraft
|
||||
if aircraft:
|
||||
args += f' --aircraft={aircraft}'
|
||||
|
||||
if telnet_port is None:
|
||||
telnet_port = 5500
|
||||
if telnet_hz is None:
|
||||
args += f' --telnet={telnet_port}'
|
||||
else:
|
||||
args += f' --telnet=_,_,{telnet_hz},_,{telnet_port},_'
|
||||
args += f' --prop:/sim/replay/tape-directory={g_tapedir}'
|
||||
args += f' --prop:bool:/sim/startup/screensaver-suspend={"true" if screensaver_suspend else "false"}'
|
||||
|
||||
args2 = args.split()
|
||||
|
||||
environ = os.environ.copy()
|
||||
if isinstance(env, str):
|
||||
for nv in env.split():
|
||||
n, v = nv.split('=', 1)
|
||||
environ[n] = v
|
||||
if 'DISPLAY' not in environ:
|
||||
environ['DISPLAY'] = ':0'
|
||||
|
||||
# Run flightgear in new process, telling it to open telnet server.
|
||||
#
|
||||
# We run not in a shell, otherwise self.child.terminate() doesn't
|
||||
# work - it would kill the shell but leave fgfs running (there are
|
||||
# workarounds for this, such as prefixing the command with 'exec').
|
||||
#
|
||||
log(f'Command is: {args}')
|
||||
log(f'Running: {args2}')
|
||||
if resource:
|
||||
def preexec():
|
||||
try:
|
||||
resource.setrlimit(resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY))
|
||||
except Exception as e:
|
||||
log(f'*** preexec failed with e={e}')
|
||||
raise
|
||||
else:
|
||||
preexec = None
|
||||
if out:
|
||||
out = open(out, 'w')
|
||||
self.child = subprocess.Popen(
|
||||
args2,
|
||||
env=environ,
|
||||
preexec_fn=preexec,
|
||||
stdout=out,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
# Connect to flightgear's telnet server.
|
||||
timeout = 15
|
||||
t0 = time.time()
|
||||
while 1:
|
||||
time.sleep(1)
|
||||
dt = time.time() - t0
|
||||
if dt > timeout:
|
||||
text = f'Timeout trying to connect. timeout={timeout}'
|
||||
log(text)
|
||||
raise Exception(text)
|
||||
try:
|
||||
log('Connecting... ')
|
||||
self.fg = FlightGear.FlightGear('localhost', telnet_port)
|
||||
log(f'Connected. timeout={timeout} dt={dt:.1f}')
|
||||
return
|
||||
except Exception as e:
|
||||
log(f'Failed to connect timeout={timeout} dt={dt:.1f}: {e}')
|
||||
|
||||
def waitfor(self, name, value, timeout=30):
|
||||
'''
|
||||
Waits for specified property to be <value>. Returns time taken.
|
||||
'''
|
||||
t0 = time.time()
|
||||
while 1:
|
||||
time.sleep(1)
|
||||
dt = time.time() - t0
|
||||
try:
|
||||
v = self.fg[name]
|
||||
log(f'Waiting for {name}={value} current value: {v}. timeout={timeout} dt={dt:.1f}')
|
||||
except Exception as e:
|
||||
log(f'Failed to get value of property {name}: {e}. timeout={timeout} dt={dt:.1f}')
|
||||
v = None
|
||||
if v == value:
|
||||
return dt
|
||||
if dt > timeout:
|
||||
raise Exception(f'Timeout waiting for {name}={value}; current value: {v}. timeout={timeout}')
|
||||
|
||||
def run_command(self, command):
|
||||
self.fg.telnet._putcmd(command)
|
||||
ret = self.fg.telnet._getresp()
|
||||
log(f'command={command!r} ret: {ret}')
|
||||
return ret
|
||||
|
||||
def close(self):
|
||||
assert self.child
|
||||
log(f'close(): stopping flightgear pid={self.child.pid}')
|
||||
if 1:
|
||||
# Kill any child processes so that things work if fgfs is being run
|
||||
# by download_and_compile.sh's run_fgfs.sh script.
|
||||
#
|
||||
# This is Unix-only.
|
||||
try:
|
||||
child_pids = subprocess.check_output(f'pgrep -P {self.child.pid}', shell=True)
|
||||
except Exception:
|
||||
# We get here if self.child has no child processes.
|
||||
child_pids = b''
|
||||
child_pids = child_pids.decode('utf-8')
|
||||
child_pids = child_pids.split()
|
||||
for child_pid in child_pids:
|
||||
#log(f'*** close() child_pid={child_pid}')
|
||||
child_pid = int(child_pid)
|
||||
#log(f'*** close() killing child_pid={child_pid}')
|
||||
os.kill(child_pid, signal.SIGTERM)
|
||||
self.child.terminate()
|
||||
self.child.wait()
|
||||
self.child = None
|
||||
#log(f'*** close() returning.')
|
||||
|
||||
def __del__(self):
|
||||
if self.child:
|
||||
log('*** Fg.__del__() calling self.close()')
|
||||
self.close()
|
||||
|
||||
def make_recording(
|
||||
fg,
|
||||
continuous=0, # 2 means continuous with compression.
|
||||
extra_properties=0,
|
||||
main_view=0,
|
||||
length=5,
|
||||
):
|
||||
'''
|
||||
Makes a recording, and returns its path.
|
||||
|
||||
We check that the recording file is newly created.
|
||||
'''
|
||||
t = time.time()
|
||||
fg.fg['/sim/replay/record-signals'] = True # Just in case they are disabled by user.
|
||||
if continuous:
|
||||
assert not fg.fg['/sim/replay/record-continuous']
|
||||
if continuous == 2:
|
||||
fg.fg['/sim/replay/record-continuous-compression'] = 1
|
||||
fg.fg['/sim/replay/record-continuous'] = 1
|
||||
t0 = time.time()
|
||||
while 1:
|
||||
if time.time() > t0 + length:
|
||||
break
|
||||
time.sleep(1)
|
||||
fg.run_command('run view-step step=1')
|
||||
fg.fg['/sim/replay/record-continuous'] = 0
|
||||
path = f'{g_tapedir}/{fg.aircraft}-continuous.fgtape'
|
||||
time.sleep(1)
|
||||
else:
|
||||
# Normal recording will have effectively already started, so we sleep
|
||||
# for the remaining time. This is a little inaccurate though because it
|
||||
# looks like normal recording starts a little after t=0, e.g. at t=0.5.
|
||||
#
|
||||
# Also, it looks like /sim/time/elapsed-sec doesn't quite track real
|
||||
# time, so we sometimes need to sleep a little longer.
|
||||
#
|
||||
while 1:
|
||||
# Telnet interface seems very slow even if we set telnet_hz to
|
||||
# 100 (for example). We want to make recording have near to the
|
||||
# specified length, so we are cautious about overrunning.
|
||||
#
|
||||
#log(f'a: time.time()-t={time.time()-t}')
|
||||
t_record_begin = fg.fg['sim/replay/record-normal-begin']
|
||||
#log(f'b: time.time()-t={time.time()-t}')
|
||||
t_record_end = fg.fg['sim/replay/record-normal-end']
|
||||
#log(f'c: time.time()-t={time.time()-t}')
|
||||
t_delta = t_record_end - t_record_begin
|
||||
log(f't_record_begin={t_record_begin} t_record_end={t_record_end} t_delta={t_delta}')
|
||||
if t_delta >= length:
|
||||
break
|
||||
ts = max(length - t_delta - 1, 0.2)
|
||||
log(f'd: ts={ts}')
|
||||
time.sleep(ts)
|
||||
log(f'/sim/time/elapsed-sec={t}')
|
||||
log(f'/sim/replay/start-time={fg.fg["/sim/replay/start-time"]}')
|
||||
log(f'/sim/replay/end-time={fg.fg["/sim/replay/end-time"]}')
|
||||
fg.fg.telnet._putcmd('run save-tape tape-data/starttime= tape-data/stoptime=')
|
||||
response = fg.fg.telnet._getresp()
|
||||
log(f'response: {response!r}')
|
||||
path = f'{g_tapedir}/{fg.aircraft}.fgtape'
|
||||
|
||||
# Check recording is new.
|
||||
os.system(f'ls -lL {path}')
|
||||
s = os.stat(path, follow_symlinks=True)
|
||||
assert s.st_mtime > t
|
||||
path2 = readlink(path)
|
||||
log(f'path={path} path2={path2}')
|
||||
return path
|
||||
|
||||
|
||||
def test_record_replay(
|
||||
fgfs_save,
|
||||
fgfs_load,
|
||||
multiplayer,
|
||||
continuous,
|
||||
extra_properties,
|
||||
main_view,
|
||||
length,
|
||||
):
|
||||
if not fgfs_load:
|
||||
fgfs_load = fgfs_save
|
||||
log(f'=== save: {fgfs_save}')
|
||||
log(f'=== load: {fgfs_load}')
|
||||
log(f'=== --multiplayer {multiplayer} --continuous {continuous} --extra-properties {extra_properties} --main-view {main_view}')
|
||||
log(f'===')
|
||||
|
||||
aircraft = 'harrier-gr3'
|
||||
args = f'--state=vto --airport=egtk'
|
||||
args += f' --prop:bool:/sim/replay/record-extra-properties={extra_properties}'
|
||||
args += f' --prop:bool:/sim/replay/record-main-view={main_view}'
|
||||
args += f' --prop:bool:/sim/replay/record-main-window=0'
|
||||
#args += f' --prop:bool:/sim/time/simple-time/enabled=0'
|
||||
|
||||
# Start Flightgear.
|
||||
fg = Fg(aircraft, f'{fgfs_save} {args}',
|
||||
#env='SG_LOG_DELTAS=flightgear/src/Network/props.cxx=4',
|
||||
telnet_hz=100,
|
||||
)
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
|
||||
assert fg.fg['sim/replay/record-extra-properties'] == extra_properties
|
||||
assert fg.fg['sim/replay/record-main-view'] == main_view
|
||||
log(f'sim/replay/record-extra-properties={fg.fg["sim/replay/record-extra-properties"]}')
|
||||
|
||||
# Save recording:
|
||||
path = make_recording(fg,
|
||||
continuous=continuous,
|
||||
extra_properties=extra_properties,
|
||||
main_view=main_view,
|
||||
length=length,
|
||||
)
|
||||
|
||||
g_cleanup.append(lambda: remove(path))
|
||||
fg.close()
|
||||
|
||||
# Load recording into new Flightgear.
|
||||
path = f'{g_tapedir}/{aircraft}-continuous.fgtape' if continuous else f'{g_tapedir}/{aircraft}.fgtape'
|
||||
fg = Fg(aircraft, f'{fgfs_load} {args} --load-tape={path}')
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
fg.waitfor('/sim/replay/replay-state', 1)
|
||||
|
||||
t0 = time.time()
|
||||
|
||||
# Check replay time is ok.
|
||||
rtime_begin = fg.fg['/sim/replay/start-time']
|
||||
rtime_end = fg.fg['/sim/replay/end-time']
|
||||
rtime = rtime_end - rtime_begin
|
||||
log(f'rtime={rtime_begin}..{rtime_end}, recording length: {rtime}, length={length}')
|
||||
assert rtime > length-1 and rtime <= length+2, \
|
||||
f'length={length} rtime_begin={rtime_begin} rtime_end={rtime_end} rtime={rtime}'
|
||||
|
||||
num_frames_extra_properties = fg.fg['/sim/replay/continuous-stats-num-frames-extra-properties']
|
||||
log(f'num_frames_extra_properties={num_frames_extra_properties}')
|
||||
if continuous:
|
||||
if main_view:
|
||||
assert num_frames_extra_properties > 1, f'num_frames_extra_properties={num_frames_extra_properties}'
|
||||
else:
|
||||
assert num_frames_extra_properties == 0
|
||||
else:
|
||||
assert num_frames_extra_properties in (0, None), \
|
||||
f'num_frames_extra_properties={num_frames_extra_properties}'
|
||||
|
||||
fg.run_command('run dialog-show dialog-name=replay')
|
||||
|
||||
while 1:
|
||||
t = time.time()
|
||||
if t < t0 + length - 1:
|
||||
pass
|
||||
# Disabled because it seems that Flightgear starts replaying before
|
||||
# we see replay-state set to 1 because scenery loading blocks
|
||||
# things.
|
||||
#
|
||||
#assert not fg.fg['/sim/replay/replay-state-eof'], f'Replay has finished too early; lenth={length} t-t0={t-t0}'
|
||||
if t > t0 + length + 1:
|
||||
assert fg.fg['/sim/replay/replay-state-eof'], f'Replay has not finished on time; lenth={length} t-t0={t-t0}'
|
||||
break
|
||||
e = fg.fg['sim/replay/replay-error']
|
||||
assert not e, f'Replay failed: e={e}'
|
||||
time.sleep(1)
|
||||
|
||||
fg.close()
|
||||
|
||||
remove(path)
|
||||
|
||||
log('Test passed')
|
||||
|
||||
|
||||
def test_motion(fgfs, multiplayer=False):
|
||||
'''
|
||||
Records UFO moving with constant velocity with varying framerates, then
|
||||
replays with varying framerates and checks that replayed UFO moves with
|
||||
expected constant speed.
|
||||
|
||||
If <multiplayer> is true we also record MP UFO running in second Flightgear
|
||||
instance and check that it too moves at constant speed when replaying.
|
||||
'''
|
||||
log('')
|
||||
log('='*80)
|
||||
log('== Record')
|
||||
|
||||
aircraft = 'ufo'
|
||||
fgfs += ' --prop:bool:/sim/time/simple-time/enabled=true'
|
||||
if multiplayer:
|
||||
fg = Fg( aircraft, f'{fgfs} --prop:/sim/replay/log-raw-speed-multiplayer=cgdae-t')
|
||||
else:
|
||||
fg = Fg( aircraft, f'{fgfs}')
|
||||
path = f'{g_tapedir}/{fg.aircraft}-continuous.fgtape'
|
||||
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
|
||||
fg.fg['/controls/engines/engine[0]/throttle'] = 0
|
||||
|
||||
# Throttle/speed for ufo is set in fgdata/Aircraft/ufo/ufo.nas.
|
||||
#
|
||||
speed_max = 2000 # default for ufo; current=7.
|
||||
fixed_speed = 100
|
||||
throttle = fixed_speed / speed_max
|
||||
|
||||
if multiplayer:
|
||||
fg.fg['/sim/replay/record-multiplayer'] = True
|
||||
fg2 = Fg( aircraft, f'{fgfs} --callsign=cgdae-t --multiplay=in,4,,5033 --read-only', telnet_port=5501)
|
||||
fg2.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
fg.fg['/controls/engines/engine[0]/throttle'] = throttle
|
||||
fg2.fg['/controls/engines/engine[0]/throttle'] = throttle
|
||||
time.sleep(1)
|
||||
fgt = fg.fg['/controls/engines/engine[0]/throttle']
|
||||
fg2t = fg2.fg['/controls/engines/engine[0]/throttle']
|
||||
log(f'fgt={fgt} fg2t={fg2t}')
|
||||
else:
|
||||
fg.fg['/controls/engines/engine[0]/throttle'] = throttle
|
||||
|
||||
# Run UFO with constant speed, varying the framerate so we check whether
|
||||
# recorded speeds are affected.
|
||||
#
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 5
|
||||
if multiplayer:
|
||||
fg2.fg['/sim/frame-rate-throttle-hz'] = 5
|
||||
|
||||
# Delay to let frame rate settle.
|
||||
time.sleep(10)
|
||||
|
||||
# Start recording.
|
||||
fg.fg['/sim/replay/record-continuous'] = 1
|
||||
time.sleep(5)
|
||||
|
||||
# Change frame rate.
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 2
|
||||
time.sleep(5)
|
||||
|
||||
# Change frame rate.
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 5
|
||||
if multiplayer:
|
||||
fg2.fg['/sim/frame-rate-throttle-hz'] = 2
|
||||
time.sleep(5)
|
||||
|
||||
# Stop recording.
|
||||
fg.fg['/sim/replay/record-continuous'] = 0
|
||||
|
||||
fg.close()
|
||||
if multiplayer:
|
||||
fg2.close()
|
||||
time.sleep(2)
|
||||
|
||||
path2 = readlink( path)
|
||||
log(f'*** path={path} path2={path2}')
|
||||
g_cleanup.append(lambda: remove(path2))
|
||||
|
||||
log('')
|
||||
log('='*80)
|
||||
log('== Replay')
|
||||
|
||||
if multiplayer:
|
||||
fg = Fg( aircraft, f'{fgfs} --load-tape={path}'
|
||||
f' --prop:/sim/replay/log-raw-speed-multiplayer=cgdae-t'
|
||||
f' --prop:/sim/replay/log-raw-speed=true'
|
||||
)
|
||||
else:
|
||||
fg = Fg( aircraft,
|
||||
f'{fgfs} --load-tape={path} --prop:/sim/replay/log-raw-speed=true',
|
||||
#env='SG_LOG_DELTAS=flightgear/src/Aircraft/flightrecorder.cxx:replay=3',
|
||||
)
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 10
|
||||
fg.waitfor('/sim/replay/replay-state', 1)
|
||||
|
||||
time.sleep(3)
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 2
|
||||
time.sleep(5)
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 5
|
||||
time.sleep(3)
|
||||
fg.fg['/sim/frame-rate-throttle-hz'] = 7
|
||||
|
||||
fg.waitfor('/sim/replay/replay-state-eof', 1)
|
||||
|
||||
errors = []
|
||||
def examine_values(infix=''):
|
||||
'''
|
||||
Looks at /sim/replay/log-raw-speed{infix}-values/value[], which will
|
||||
contain measured speed of user/MP UFO. We check that the values are all
|
||||
as expected - constant speed.
|
||||
'''
|
||||
log(f'== Looking at /sim/replay/log-raw-speed{infix}-values/value[]')
|
||||
items0 = fg.fg.ls( f'/sim/replay/log-raw-speed{infix}-values')
|
||||
log(f'{infix} len(items0)={len(items0)}')
|
||||
assert items0, f'Failed to read items in /sim/replay/log-raw-speed{infix}-values/'
|
||||
items = []
|
||||
descriptions = []
|
||||
for item in items0:
|
||||
if item.name == 'value':
|
||||
#log(f'have read item: {item}')
|
||||
items.append(item)
|
||||
elif item.name == 'description':
|
||||
descriptions.append(item)
|
||||
num_errors = 0
|
||||
for i in range(len(items)-1): # Ignore last item because replay at end interpolates.
|
||||
item = items[i]
|
||||
description = ''
|
||||
if i < len(descriptions):
|
||||
description = descriptions[i].value
|
||||
speed = float(item.value)
|
||||
prefix = ' '
|
||||
if abs(speed - fixed_speed) > 0.1:
|
||||
num_errors += 1
|
||||
prefix = '*'
|
||||
log( f' {infix} {prefix} speed={speed:12.4} details: {item}: {description}')
|
||||
if num_errors != 0:
|
||||
log( f'*** Replay showed uneven speed')
|
||||
errors.append('1')
|
||||
|
||||
def show_values(paths):
|
||||
if isinstance(paths, str):
|
||||
paths = paths,
|
||||
log(f'Values in {paths}:')
|
||||
line2values = dict()
|
||||
for i, path in enumerate(paths):
|
||||
line = 0
|
||||
for item in fg.fg.ls(path):
|
||||
if item.name == 'value':
|
||||
line2values.setdefault(line, []).append(item.value)
|
||||
line += 1
|
||||
for line in sorted(line2values.keys()):
|
||||
t = ''
|
||||
for value in line2values[line]:
|
||||
t += f' {value}'
|
||||
log(f' {t}')
|
||||
|
||||
if multiplayer:
|
||||
examine_values()
|
||||
examine_values('-multiplayer')
|
||||
examine_values('-multiplayer-post')
|
||||
|
||||
if 0:
|
||||
show_values('/sim/replay/log-raw-speed-multiplayer-post-relative-distance')
|
||||
show_values('/sim/replay/log-raw-speed-multiplayer-post-relative-bearing')
|
||||
show_values('/sim/replay/log-raw-speed-multiplayer-post-absolute-distance')
|
||||
show_values('/sim/replay/log-raw-speed-multiplayer-post-user-absolute-distance')
|
||||
|
||||
def get_values(path):
|
||||
'''
|
||||
Returns <path>/value[] as a list.
|
||||
'''
|
||||
ret = []
|
||||
for item in fg.fg.ls(path):
|
||||
if item.name == 'value':
|
||||
ret.append(item.value)
|
||||
return ret
|
||||
|
||||
# Check that distance between user and mp is constant.
|
||||
#
|
||||
# The two paths below contain values[] that are the distances of the
|
||||
# mp and user aircraft from their starting points. Both are moving at
|
||||
# the same speed in the same direction, so the differences between each
|
||||
# pair of values should be constant.
|
||||
#
|
||||
distances_mp = get_values('/sim/replay/log-raw-speed-multiplayer-post-absolute-distance')
|
||||
distances_user = get_values('/sim/replay/log-raw-speed-multiplayer-post-user-absolute-distance')
|
||||
log(f'len(distances_user)={len(distances_user)} len(distances_mp)={len(distances_mp)}')
|
||||
assert len(distances_user) == len(distances_mp)
|
||||
assert len(distances_user) > 20
|
||||
for i in range(len(distances_user)):
|
||||
distance_mp = distances_mp[i]
|
||||
distance_user = distances_user[i]
|
||||
delta = distance_mp - distance_user
|
||||
if i == 0:
|
||||
delta_original = delta
|
||||
prefix = ' '
|
||||
if abs(delta - delta_original) > 0.01:
|
||||
#log('replay shows varying differences between user and mp aircraft')
|
||||
errors.append('1')
|
||||
prefix = '*'
|
||||
log(f' {prefix} user={distance_user} mp={distance_mp} delta={delta}')
|
||||
else:
|
||||
examine_values()
|
||||
|
||||
fg.close()
|
||||
if errors:
|
||||
raise Exception('Failure')
|
||||
|
||||
log('test_motion() passed')
|
||||
|
||||
|
||||
def test_carrier(fgfs):
|
||||
'''
|
||||
Checks that mp carrier motion is even.
|
||||
'''
|
||||
# We require simple-time. Can probably also work by setting the default
|
||||
# timing system's lag parameters but haven't figured this out yet.
|
||||
#
|
||||
simple_time = 'true'
|
||||
fg = Fg( 'harrier-gr3',
|
||||
f'{fgfs} --prop:int:/sim/mp-carriers/latch-always=1 --prop:bool:/sim/time/simple-time/enabled={simple_time} --callsign=cgdae3 --airport=ksfo',
|
||||
telnet_port=5500,
|
||||
telnet_hz=100,
|
||||
#out='out-rr-carrier-1',
|
||||
)
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
|
||||
fg_carrier = Fg('Nimitz',
|
||||
f'{fgfs} --prop:int:/sim/mp-carriers/latch-always=1 --prop:bool:/sim/time/simple-time/enabled={simple_time} --callsign=cgdae4 --multiplay=in,1,,5033 --read-only',
|
||||
telnet_port=5501,
|
||||
#out='out-rr-carrier-2',
|
||||
)
|
||||
fg_carrier.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
|
||||
fg.fg['/sim/replay/log-raw-speed-multiplayer'] = 'cgdae4'
|
||||
fg.fg['/sim/log-multiplayer-callsign'] = 'cgdae4'
|
||||
|
||||
def get_items(path, leafname, out=None):
|
||||
'''
|
||||
Finds list of tuples from properties <path>/<leafname>[]/*. Appends new
|
||||
items to <out> and returns new items.
|
||||
|
||||
Runs rather slowly because telnet commands appear to be throttled.
|
||||
'''
|
||||
if out is None:
|
||||
out = []
|
||||
out_len_original = len(out)
|
||||
items = fg.fg.ls(path)
|
||||
i = 0
|
||||
for item_i, item in enumerate(items):
|
||||
if item.name == leafname:
|
||||
if i == len(out):
|
||||
#print(f'len(items)={len(items)} item_i={item_i}: looking at {path}/{leafname}[{item.index}]')
|
||||
class Item:
|
||||
pass
|
||||
item2 = Item()
|
||||
item2.i = i
|
||||
for j in fg.fg.ls(f'{path}/{leafname}[{item.index}]'):
|
||||
setattr( item2, j.name, j)
|
||||
out.append(item2)
|
||||
i += 1
|
||||
return out[out_len_original:]
|
||||
|
||||
t0 = time.time()
|
||||
mps = []
|
||||
mppackets = []
|
||||
while 1:
|
||||
time.sleep(1)
|
||||
t = time.time() - t0
|
||||
log(f'test_carrier(): t={t:.1f}')
|
||||
if t > 60:
|
||||
print(f'finished, t={t}')
|
||||
break
|
||||
mps_new = get_items( '/sim/log-multiplayer', 'mp', mps)
|
||||
mppackets_new = get_items( '/sim/log-multiplayer', 'mppacket', mppackets)
|
||||
for mp in mps_new:
|
||||
log(f'test_carrier(): mp: i={mp.i}:'
|
||||
f' speed={mp.speed.value:20}'
|
||||
f' distance={mp.distance.value:20}'
|
||||
f' t={mp.t.value:20}'
|
||||
f' dt={mp.dt.value:20}={mp.dt.value*120:20}/120'
|
||||
f' ubody={mp.ubody.value:20}'
|
||||
f' vbody={mp.vbody.value:20}'
|
||||
f' wbody={mp.wbody.value:20}'
|
||||
)
|
||||
for mppacket in mppackets_new:
|
||||
log(f'test_carrier(): mppacket: i={mppacket.i}:'
|
||||
f' speed={mppacket.speed.value:20}'
|
||||
f' distance={mppacket.distance.value:20}'
|
||||
f' t={mppacket.t.value:20}'
|
||||
f' linear_vel={mppacket.linear_vel.value:20}'
|
||||
f' dt={mppacket.dt.value:20}={mppacket.dt.value*120:20}/120'
|
||||
)
|
||||
|
||||
# Check speed of multiplayer carrier is constant:
|
||||
knots2si = 1852.0/3600
|
||||
speed_expected = 10 * knots2si
|
||||
num_incorrect = 0
|
||||
for mp in mps[2:]: # First two items have bogus values.
|
||||
delta = mp.speed.value - speed_expected
|
||||
if abs(delta) > 0.001:
|
||||
num_incorrect += 1
|
||||
print(f' * speed={mp.speed.value:20}')
|
||||
assert num_incorrect == 0, f'num_incorrect={num_incorrect}'
|
||||
fg.close()
|
||||
fg_carrier.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
fgfs = f'./build-walk/fgfs.exe-run.sh'
|
||||
fgfs_old = None
|
||||
|
||||
do_test = 'all'
|
||||
continuous_s = [0, 1, 2] # 2 is continuous with compression.
|
||||
extra_properties_s = [0, 1]
|
||||
main_view_s = [0, 1]
|
||||
multiplayer_s = [0, 1]
|
||||
fgfs_reverse_s = [0]
|
||||
it_min = None
|
||||
it_max = None
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
do_all = True
|
||||
|
||||
args = iter(sys.argv[1:])
|
||||
while 1:
|
||||
try:
|
||||
arg = next(args)
|
||||
except StopIteration:
|
||||
break
|
||||
if arg == '--all':
|
||||
do_all = True
|
||||
elif arg == '--carrier':
|
||||
do_test = 'carrier'
|
||||
elif arg == '--continuous':
|
||||
continuous_s = [int(x) for x in next(args).split(',')]
|
||||
log(f'continuous_s={continuous_s}')
|
||||
elif arg == '--tape-dir':
|
||||
g_tapedir = next(args)
|
||||
elif arg == '--extra-properties':
|
||||
extra_properties_s = [int(x) for x in next(args).split(',')]
|
||||
elif arg == '--it-max':
|
||||
it_max = int(next(args))
|
||||
elif arg == '--it-min':
|
||||
it_min = int(next(args))
|
||||
elif arg == '--main-view':
|
||||
main_view_s = [int(x) for x in next(args).split(',')]
|
||||
elif arg == '--multiplayer':
|
||||
multiplayer_s = [int(x) for x in next(args).split(',')]
|
||||
elif arg == '-f':
|
||||
fgfs = next(args)
|
||||
elif arg == '--f-old':
|
||||
fgfs_old = next(args)
|
||||
fgfs_reverse = [0, 1]
|
||||
elif arg == '--test-motion':
|
||||
do_test = 'motion'
|
||||
elif arg == '--test-motion-mp':
|
||||
do_test = 'motion-mp'
|
||||
else:
|
||||
raise Exception(f'Unrecognised arg: {arg!r}')
|
||||
|
||||
g_tapedir = os.path.abspath(g_tapedir)
|
||||
os.makedirs( g_tapedir, exist_ok=True)
|
||||
if 0:
|
||||
pass
|
||||
elif do_test == 'carrier':
|
||||
test_carrier(fgfs)
|
||||
elif do_test == 'motion':
|
||||
test_motion( fgfs)
|
||||
elif do_test == 'motion-mp':
|
||||
test_motion( fgfs, True)
|
||||
elif do_test == 'all':
|
||||
try:
|
||||
if fgfs_old:
|
||||
for fgfs1, fgfs2 in [(fgfs, fgfs_old), (fgfs_old, fgfs)]:
|
||||
for multiplayer in 0, 1:
|
||||
test_record_replay(
|
||||
fgfs1,
|
||||
fgfs2,
|
||||
multiplayer,
|
||||
continuous=0,
|
||||
extra_properties=0,
|
||||
main_view=0,
|
||||
length=10,
|
||||
)
|
||||
else:
|
||||
log(f'continuous_s={continuous_s}')
|
||||
its_max = len(multiplayer_s) * len(continuous_s) * len(extra_properties_s) * len(main_view_s) * len(fgfs_reverse_s)
|
||||
it = 0
|
||||
for multiplayer in multiplayer_s:
|
||||
for continuous in continuous_s:
|
||||
for extra_properties in extra_properties_s:
|
||||
for main_view in main_view_s:
|
||||
for fgfs_reverse in fgfs_reverse_s:
|
||||
if fgfs_reverse:
|
||||
fgfs_save = fgfs_old
|
||||
fgfs_load = fgfs
|
||||
else:
|
||||
fgfs_save = fgfs
|
||||
fgfs_load = fgfs_old
|
||||
|
||||
ok = True
|
||||
if it_min is not None:
|
||||
if it < it_min:
|
||||
ok = False
|
||||
if it_max is not None:
|
||||
if it >= it_max:
|
||||
ok = False
|
||||
log('')
|
||||
log(f'===')
|
||||
log(f'=== {it}/{its_max}')
|
||||
if ok:
|
||||
test_record_replay(
|
||||
fgfs_save,
|
||||
fgfs_load,
|
||||
multiplayer=multiplayer,
|
||||
continuous=continuous,
|
||||
extra_properties=extra_properties,
|
||||
main_view=main_view,
|
||||
length=10
|
||||
)
|
||||
it += 1
|
||||
finally:
|
||||
pass
|
||||
else:
|
||||
assert 0, f'do_test={do_test}'
|
||||
|
||||
# If everything passed, cleanup. Otherwise leave recordings in place, as
|
||||
# they can be useful for debugging.
|
||||
#
|
||||
for f in g_cleanup:
|
||||
try:
|
||||
f()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
log(f'{__file__}: Returning 0')
|
||||
165
scripts/python/video.py
Executable file
165
scripts/python/video.py
Executable file
@@ -0,0 +1,165 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
Test script for video encoding.
|
||||
|
||||
Example usage:
|
||||
scripts/python/video.py -f dac/run_fgfs.sh
|
||||
|
||||
'''
|
||||
|
||||
import recordreplay
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
def main():
|
||||
fgfs = f'./build-walk/fgfs.exe-run.sh'
|
||||
|
||||
args = iter(sys.argv[1:])
|
||||
while 1:
|
||||
try:
|
||||
arg = next(args)
|
||||
except StopIteration:
|
||||
break
|
||||
if arg == '-f':
|
||||
fgfs = next(args)
|
||||
else:
|
||||
raise Exception(f'Unrecognised arg: {arg}')
|
||||
|
||||
fg = recordreplay.Fg(
|
||||
'harrier-gr3',
|
||||
f'{fgfs}'
|
||||
+ f' --state=vto --airport=egtk'
|
||||
+ f' --prop:/sim/replay/record-main-view=1'
|
||||
+ f' --prop:bool:/sim/replay/record-main-window=0'
|
||||
,
|
||||
)
|
||||
|
||||
fg.waitfor('/sim/fdm-initialized', 1, timeout=45)
|
||||
fg.fg['/sim/current-view/view-number-raw'] = 1 # helicopter
|
||||
|
||||
# Rotation speed.
|
||||
fg.fg['/controls/auto-hover/rotation-speed-target'] = 4
|
||||
fg.fg['/controls/auto-hover/rotation-mode'] = 'speed'
|
||||
|
||||
# These will have been set by --state=vto
|
||||
# Sideways speed.
|
||||
fg.fg['/controls/auto-hover/x-speed-target'] = '0'
|
||||
fg.fg['/controls/auto-hover/x-mode'] = 'speed'
|
||||
|
||||
# Vertical speed.
|
||||
fg.fg['/controls/auto-hover/y-speed-target'] = '0'
|
||||
fg.fg['/controls/auto-hover/y-mode'] = 'speed'
|
||||
|
||||
# Forwards speed.
|
||||
fg.fg['/controls/auto-hover/z-speed-target'] = '0'
|
||||
fg.fg['/controls/auto-hover/z-mode'] = 'speed'
|
||||
|
||||
results = []
|
||||
|
||||
def make_recording(codec, container, quality, speed, fixed_dt=None):
|
||||
'''
|
||||
Create recording using specified codec etc.
|
||||
'''
|
||||
fg.fg['/sim/video/container'] = container
|
||||
fdm_time_begin = fg.fg['/sim/time/simple-time/fdm']
|
||||
if fixed_dt:
|
||||
fg.fg['/sim/time/simple-time/enabled'] = True
|
||||
fg.fg['/sim/time/fixed-dt'] = fixed_dt
|
||||
name = f'video-test-c={codec}-q={quality}-s={speed}.{container}'
|
||||
frames_start = fg.fg['/sim/frame-number']
|
||||
fg.run_command( f'run video-start name={name} quality={quality} speed={speed} codec={codec}')
|
||||
dt = 10
|
||||
time.sleep(dt)
|
||||
frames = fg.fg['/sim/frame-number'] - frames_start
|
||||
fdm_time_end = fg.fg['/sim/time/simple-time/fdm']
|
||||
fg.run_command( f'run video-stop')
|
||||
fdm_time = fdm_time_end - fdm_time_begin
|
||||
e = fg.fg['/sim/video/error']
|
||||
e = f'error e={e:3}' if e else 'success'
|
||||
e = f'{e:12}'
|
||||
if not e:
|
||||
if not os.path.isfile(name):
|
||||
e = 'error no output file'
|
||||
result = f'Video encoding result: {e}: codec={codec:12} container={container:6} quality={quality:6} speed={speed:6} frames={frames:6} frame_rate={frames/dt:6} fdm_time={fdm_time:6} size={os.path.getsize(name):9}: {name}'
|
||||
results.append( result)
|
||||
print( result)
|
||||
|
||||
if 1:
|
||||
# Create Continuous recording, replay and create video, check video is
|
||||
# new.
|
||||
|
||||
# Create Continuous recording.
|
||||
video_suffix = 'mkv'
|
||||
tstart = time.time()
|
||||
fg.fg['/sim/video/container'] = video_suffix
|
||||
fg.fg['/sim/video/codec'] = 'libx265'
|
||||
fg.fg['/sim/video/quality'] = 0.75
|
||||
fg.fg['/sim/video/speed'] = 1.0
|
||||
fg.fg['/sim/replay/record-continuous-compression'] = 1
|
||||
fg.fg['/sim/replay/record-continuous'] = 1
|
||||
fg.fg['/sim/replay/record-main-view'] = 1
|
||||
endtime = tstart + 10
|
||||
while 1:
|
||||
if time.time() > endtime:
|
||||
break
|
||||
time.sleep(1)
|
||||
fg.run_command('run view-step step=1')
|
||||
fg.fg['/sim/replay/record-continuous'] = 0
|
||||
|
||||
# Replay to create video.
|
||||
tstart = time.time()
|
||||
fg.fg['sim/replay/replay-main-view'] = 1
|
||||
fg.fg['sim/replay/replay-windows-position'] = 0
|
||||
fg.fg['sim/replay/replay-windows-size'] = 0
|
||||
fg.fg['/sim/video/container'] = 'mkv'
|
||||
fg.fg['/sim/video/codec'] = 'libx265'
|
||||
fg.fg['/sim/video/quality'] = 0.75
|
||||
fg.fg['/sim/video/speed'] = 1.0
|
||||
fg.run_command( f'run load-tape tape={fg.aircraft}-continuous create-video=1 fixed-dt=0.04')
|
||||
fg.waitfor('/sim/replay/replay-state', 1) # Wait for replay to start.
|
||||
fg.waitfor('/sim/replay/replay-state-eof', 1) # Wait for replay eof.
|
||||
|
||||
# Check video looks ok.
|
||||
video_path = f'fgvideo-harrier-gr3.{video_suffix}'
|
||||
video_path2 = recordreplay.readlink( video_path)
|
||||
print(f'*** video_path={video_path} video_path2={video_path2}')
|
||||
t = os.path.getmtime(video_path2)
|
||||
assert t > tstart, f'Video file too old: {video_path2}'
|
||||
|
||||
elif 1:
|
||||
make_recording('libtheora', 'ogv', quality=1, speed=1, fixed_dt=0.02)
|
||||
make_recording('libx265', 'mkv', quality=1, speed=1, fixed_dt=0.02)
|
||||
|
||||
else:
|
||||
for codec in (
|
||||
'libtheora',
|
||||
'libx265',
|
||||
'mpeg2video',
|
||||
'libx264',
|
||||
'libvpx',
|
||||
):
|
||||
fg.fg['/sim/video/codec'] = codec
|
||||
for container in (
|
||||
'mpeg',
|
||||
'ogv',
|
||||
'mkv',
|
||||
):
|
||||
# High quality can semi-freeze Flightgear.
|
||||
for quality, speed in [
|
||||
('0.4', '0.5'),
|
||||
('0.4', '0.9'),
|
||||
('0.1', '0.5'),
|
||||
('0.1', '0.9'),
|
||||
]:
|
||||
# This ordering should give increasing frame rate.
|
||||
make_recording(codec, container, quality, speed)
|
||||
|
||||
print('Results:')
|
||||
for result in results:
|
||||
print( f' {result}')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user