Added documentation
- Using sphinx to documente the project - Autodocumenting Python using numpy-style docstr and sphinx autodoc with napoleon
This commit is contained in:
parent
f0ce32250a
commit
c22049f499
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,6 +1,7 @@
|
|||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*/__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
|
|
@ -10,6 +11,7 @@ __pycache__/
|
|||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
docs/build
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# transport-accessibility
|
||||
For polish public transport
|
||||
### Tool for cities to easily publish their public transport in a standardized form
|
||||
Public transport is often provided by many, smaller companies, operating mostly in their respective regions. A problem deriving from this is that, on a interregional, national and international, few machine-readable data is available. This impacts customers, as it makes it more difficult to find public transport routes, more so while travelling in regions they are less familiar with.
|
||||
|
||||
We designed a tool to provide city officials or volunteers to map the public transport in their jurisdiction using an easy-to-use graphical utility.
|
||||
|
||||
|
|
|
|||
1
TODO.md
1
TODO.md
|
|
@ -1,6 +1,7 @@
|
|||
## General
|
||||
- Decide for a license
|
||||
- Decide how to license produced data as the work of inserting the data and the data itself is not ours. Make the users agree on a FLOSS or CC license for the GTFS files produced from their data? What if someone doesn't own the data they upload? What if it's not free - How can we produce data in that case? ToS?
|
||||
- Add documentation. Sphinx?
|
||||
|
||||
## Frontend
|
||||
- Add TODOs
|
||||
|
|
|
|||
8
bin/docutils
Executable file
8
bin/docutils
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from docutils.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/pybabel
Executable file
8
bin/pybabel
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from babel.messages.frontend import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/pygmentize
Executable file
8
bin/pygmentize
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pygments.cmdline import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
23
bin/rst2html.py
Executable file
23
bin/rst2html.py
Executable file
|
|
@ -0,0 +1,23 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2html.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML documents from standalone reStructuredText '
|
||||
'sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='html', description=description)
|
||||
26
bin/rst2html4.py
Executable file
26
bin/rst2html4.py
Executable file
|
|
@ -0,0 +1,26 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2html4.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing (X)HTML.
|
||||
|
||||
The output conforms to XHTML 1.0 transitional
|
||||
and almost to HTML 4.01 transitional (except for closing empty tags).
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML documents from standalone reStructuredText '
|
||||
'sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='html4', description=description)
|
||||
33
bin/rst2html5.py
Executable file
33
bin/rst2html5.py
Executable file
|
|
@ -0,0 +1,33 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# :Copyright: © 2015 Günter Milde.
|
||||
# :License: Released under the terms of the `2-Clause BSD license`_, in short:
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification,
|
||||
# are permitted in any medium without royalty provided the copyright
|
||||
# notice and this notice are preserved.
|
||||
# This file is offered as-is, without any warranty.
|
||||
#
|
||||
# .. _2-Clause BSD license: https://opensource.org/licenses/BSD-2-Clause
|
||||
#
|
||||
# Revision: $Revision: 9021 $
|
||||
# Date: $Date: 2022-03-04 16:54:22 +0100 (Fr, 04. Mär 2022) $
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML 5 documents.
|
||||
|
||||
The output is also valid XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale # module missing in Jython
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except locale.Error:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
description = ('Generates HTML5 documents from standalone '
|
||||
'reStructuredText sources.\n'
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(writer_name='html5', description=description)
|
||||
26
bin/rst2latex.py
Executable file
26
bin/rst2latex.py
Executable file
|
|
@ -0,0 +1,26 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2latex.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing LaTeX.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline
|
||||
|
||||
description = ('Generates LaTeX documents from standalone reStructuredText '
|
||||
'sources. '
|
||||
'Reads from <source> (default is stdin) and writes to '
|
||||
'<destination> (default is stdout). See '
|
||||
'<https://docutils.sourceforge.io/docs/user/latex.html> for '
|
||||
'the full reference.')
|
||||
|
||||
publish_cmdline(writer_name='latex', description=description)
|
||||
27
bin/rst2man.py
Executable file
27
bin/rst2man.py
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# Author:
|
||||
# Contact: grubert@users.sf.net
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
man.py
|
||||
======
|
||||
|
||||
This module provides a simple command line interface that uses the
|
||||
man page writer to output from ReStructuredText source.
|
||||
"""
|
||||
|
||||
import locale
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
from docutils.writers import manpage
|
||||
|
||||
description = ("Generates plain unix manual documents. "
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(writer=manpage.Writer(), description=description)
|
||||
28
bin/rst2odt.py
Executable file
28
bin/rst2odt.py
Executable file
|
|
@ -0,0 +1,28 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2odt.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Dave Kuhlman <dkuhlman@rexx.com>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A front end to the Docutils Publisher, producing OpenOffice documents.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline_to_binary, default_description
|
||||
from docutils.writers.odf_odt import Writer, Reader
|
||||
|
||||
|
||||
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
|
||||
'standalone reStructuredText sources. ' + default_description)
|
||||
|
||||
|
||||
writer = Writer()
|
||||
reader = Reader()
|
||||
output = publish_cmdline_to_binary(reader=reader, writer=writer,
|
||||
description=description)
|
||||
20
bin/rst2odt_prepstyles.py
Executable file
20
bin/rst2odt_prepstyles.py
Executable file
|
|
@ -0,0 +1,20 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
Adapt a word-processor-generated styles.odt for odtwriter use:
|
||||
|
||||
Drop page size specifications from styles.xml in STYLE_FILE.odt.
|
||||
See https://docutils.sourceforge.io/docs/user/odt.html#page-size
|
||||
|
||||
Provisional backwards compatibility stub (to be removed in Docutils >= 0.21).
|
||||
|
||||
The actual code moved to the "docutils" library package and can be started
|
||||
with ``python -m docutils.writers.odf_odt.prepstyles``.
|
||||
"""
|
||||
|
||||
from docutils.writers.odf_odt import prepstyles
|
||||
|
||||
if __name__ == '__main__':
|
||||
prepstyles.main()
|
||||
23
bin/rst2pseudoxml.py
Executable file
23
bin/rst2pseudoxml.py
Executable file
|
|
@ -0,0 +1,23 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2pseudoxml.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing pseudo-XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates pseudo-XML from standalone reStructuredText '
|
||||
'sources (for testing purposes). ' + default_description)
|
||||
|
||||
publish_cmdline(description=description)
|
||||
24
bin/rst2s5.py
Executable file
24
bin/rst2s5.py
Executable file
|
|
@ -0,0 +1,24 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2s5.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Chris Liechti <cliechti@gmx.net>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML slides using
|
||||
the S5 template system.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates S5 (X)HTML slideshow documents from standalone '
|
||||
'reStructuredText sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='s5', description=description)
|
||||
27
bin/rst2xetex.py
Executable file
27
bin/rst2xetex.py
Executable file
|
|
@ -0,0 +1,27 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2xetex.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Guenter Milde
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing Lua/XeLaTeX code.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline
|
||||
|
||||
description = ('Generates LaTeX documents from standalone reStructuredText '
|
||||
'sources for compilation with the Unicode-aware TeX variants '
|
||||
'XeLaTeX or LuaLaTeX. '
|
||||
'Reads from <source> (default is stdin) and writes to '
|
||||
'<destination> (default is stdout). See '
|
||||
'<https://docutils.sourceforge.io/docs/user/latex.html> for '
|
||||
'the full reference.')
|
||||
|
||||
publish_cmdline(writer_name='xetex', description=description)
|
||||
23
bin/rst2xml.py
Executable file
23
bin/rst2xml.py
Executable file
|
|
@ -0,0 +1,23 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rst2xml.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing Docutils XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates Docutils-native XML from standalone '
|
||||
'reStructuredText sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='xml', description=description)
|
||||
25
bin/rstpep2html.py
Executable file
25
bin/rstpep2html.py
Executable file
|
|
@ -0,0 +1,25 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
|
||||
# $Id: rstpep2html.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML from PEP
|
||||
(Python Enhancement Proposal) documents.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML from reStructuredText-format PEP files. '
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(reader_name='pep', writer_name='pep_html',
|
||||
description=description)
|
||||
8
bin/sphinx-apidoc
Executable file
8
bin/sphinx-apidoc
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sphinx.ext.apidoc import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/sphinx-autobuild
Executable file
8
bin/sphinx-autobuild
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sphinx_autobuild.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/sphinx-autogen
Executable file
8
bin/sphinx-autogen
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sphinx.ext.autosummary.generate import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/sphinx-build
Executable file
8
bin/sphinx-build
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sphinx.cmd.build import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/sphinx-quickstart
Executable file
8
bin/sphinx-quickstart
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from sphinx.cmd.quickstart import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/uvicorn
Executable file
8
bin/uvicorn
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from uvicorn.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
bin/watchfiles
Executable file
8
bin/watchfiles
Executable file
|
|
@ -0,0 +1,8 @@
|
|||
#!/home/johannes/code/transport-accessibility/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from watchfiles.cli import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
20
docs/Makefile
Normal file
20
docs/Makefile
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.https://www.sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
42
docs/source/conf.py
Normal file
42
docs/source/conf.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
import os
|
||||
import sys
|
||||
import django
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../../transport_accessibility'))
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'transport_accessibility.settings'
|
||||
django.setup()
|
||||
|
||||
project = 'Transport Accessibility'
|
||||
copyright = '2024, Jan Kiljanski and Johannes Randerath'
|
||||
author = 'Jan Kiljanski and Johannes Randerath'
|
||||
release = '0.1 Beta 1'
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.autosummary',
|
||||
]
|
||||
|
||||
templates_path = ['_templates']
|
||||
exclude_patterns = ['**/migrations/*', 'manage.py', 'settings.py', 'admin.py', ]
|
||||
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
# html_theme = 'alabaster'
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_static_path = ['_static']
|
||||
|
||||
autosummary_generate = True
|
||||
20
docs/source/index.rst
Normal file
20
docs/source/index.rst
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
.. Transport Accessibility documentation master file, created by
|
||||
sphinx-quickstart on Mon Jun 3 13:49:21 2024.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to Transport Accessibility's documentation!
|
||||
===================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
modules
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
7
docs/source/manage.rst
Normal file
7
docs/source/manage.rst
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
manage module
|
||||
=============
|
||||
|
||||
.. automodule:: manage
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
9
docs/source/modules.rst
Normal file
9
docs/source/modules.rst
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
transport_accessibility
|
||||
=======================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
manage
|
||||
pt_map
|
||||
transport_accessibility
|
||||
84
docs/source/pt_map.rst
Normal file
84
docs/source/pt_map.rst
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
pt\_map package
|
||||
===============
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 4
|
||||
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
pt\_map.admin module
|
||||
--------------------
|
||||
|
||||
.. automodule:: pt_map.admin
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.apps module
|
||||
-------------------
|
||||
|
||||
.. automodule:: pt_map.apps
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.bridge module
|
||||
---------------------
|
||||
|
||||
.. automodule:: pt_map.bridge
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.gtfs module
|
||||
-------------------
|
||||
|
||||
.. automodule:: pt_map.gtfs
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.models module
|
||||
---------------------
|
||||
|
||||
.. automodule:: pt_map.models
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.tests module
|
||||
--------------------
|
||||
|
||||
.. automodule:: pt_map.tests
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.urls module
|
||||
-------------------
|
||||
|
||||
.. automodule:: pt_map.urls
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
pt\_map.views module
|
||||
--------------------
|
||||
|
||||
.. automodule:: pt_map.views
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: pt_map
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
45
docs/source/transport_accessibility.rst
Normal file
45
docs/source/transport_accessibility.rst
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
transport\_accessibility package
|
||||
================================
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
transport\_accessibility.asgi module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: transport_accessibility.asgi
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
transport\_accessibility.settings module
|
||||
----------------------------------------
|
||||
|
||||
.. automodule:: transport_accessibility.settings
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
transport\_accessibility.urls module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: transport_accessibility.urls
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
transport\_accessibility.wsgi module
|
||||
------------------------------------
|
||||
|
||||
.. automodule:: transport_accessibility.wsgi
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Module contents
|
||||
---------------
|
||||
|
||||
.. automodule:: transport_accessibility
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
|
@ -1,18 +1,86 @@
|
|||
alabaster==0.7.16
|
||||
anyio==4.4.0
|
||||
asgiref==3.8.1
|
||||
astroid==3.2.2
|
||||
autocommand==2.2.2
|
||||
Babel==2.15.0
|
||||
backports.csv==1.0.7
|
||||
beautifulsoup4==4.12.3
|
||||
certifi==2024.6.2
|
||||
cffi==1.16.0
|
||||
charset-normalizer==3.3.2
|
||||
cheroot==10.0.1
|
||||
CherryPy==18.9.0
|
||||
click==8.1.7
|
||||
colorama==0.4.6
|
||||
cryptography==42.0.7
|
||||
dill==0.3.8
|
||||
Django==5.0.6
|
||||
docutils==0.20.1
|
||||
feedparser==6.0.11
|
||||
future==1.0.0
|
||||
h11==0.14.0
|
||||
idna==3.7
|
||||
imagesize==1.4.1
|
||||
inflect==7.2.1
|
||||
isort==5.13.2
|
||||
jaraco.collections==5.0.1
|
||||
jaraco.context==5.3.0
|
||||
jaraco.functools==4.0.1
|
||||
jaraco.text==3.12.0
|
||||
Jinja2==3.1.4
|
||||
joblib==1.4.2
|
||||
lxml==5.2.2
|
||||
MarkupSafe==2.0.1
|
||||
mccabe==0.7.0
|
||||
more-itertools==10.2.0
|
||||
mysqlclient==2.2.4
|
||||
nltk==3.8.1
|
||||
numpy==1.26.4
|
||||
packaging==24.0
|
||||
pandas==2.2.2
|
||||
parsimonious==0.10.0
|
||||
Pattern==3.6
|
||||
pdfminer.six==20231228
|
||||
platformdirs==4.2.2
|
||||
portend==3.2.0
|
||||
pycparser==2.22
|
||||
Pygments==2.18.0
|
||||
pylint==3.2.2
|
||||
python-dateutil==2.9.0.post0
|
||||
python-docx==1.1.2
|
||||
pytz==2024.1
|
||||
regex==2024.5.15
|
||||
requests==2.32.3
|
||||
scipy==1.13.1
|
||||
setuptools==70.0.0
|
||||
sgmllib3k==1.0.0
|
||||
six==1.16.0
|
||||
sniffio==1.3.1
|
||||
snowballstemmer==2.2.0
|
||||
soupsieve==2.5
|
||||
Sphinx==7.3.7
|
||||
sphinx-autobuild==2024.4.16
|
||||
sphinx-js==3.2.2
|
||||
sphinx-rtd-theme==2.0.0
|
||||
sphinxcontrib-applehelp==1.0.8
|
||||
sphinxcontrib-devhelp==1.0.6
|
||||
sphinxcontrib-htmlhelp==2.0.5
|
||||
sphinxcontrib-jquery==4.1
|
||||
sphinxcontrib-jsmath==1.0.1
|
||||
sphinxcontrib-qthelp==1.0.7
|
||||
sphinxcontrib-serializinghtml==1.1.10
|
||||
sqlparse==0.5.0
|
||||
starlette==0.37.2
|
||||
tempora==5.5.1
|
||||
tomlkit==0.12.5
|
||||
tqdm==4.66.4
|
||||
typeguard==4.3.0
|
||||
typing_extensions==4.12.1
|
||||
tzdata==2024.1
|
||||
urllib3==2.2.1
|
||||
uvicorn==0.30.1
|
||||
watchfiles==0.22.0
|
||||
websockets==12.0
|
||||
wheel==0.43.0
|
||||
zc.lockfile==3.0.post1
|
||||
|
|
|
|||
|
|
@ -1,3 +1,37 @@
|
|||
"""
|
||||
Bridge
|
||||
======
|
||||
Bridge between Django with its models and database and gtfs.GTFS as intermediate object for File IO.
|
||||
|
||||
Contents
|
||||
--------
|
||||
Constants
|
||||
---------
|
||||
gtfs_schema : dir{str,list[str]}
|
||||
Maps GTFS file names (without filename extension) to fields described by the GTFS Reference
|
||||
reversed_file_mapping : dict(str,str)
|
||||
Map CamelCased filenames to '_'-separated
|
||||
|
||||
Functions
|
||||
---------
|
||||
to_camel_case(s):
|
||||
Converts '_'-separated str to CamelCase with capital first letter
|
||||
|
||||
standardize_time(time_str):
|
||||
Converts str in unicode time format to %H:%M:%S format with normalized 24 hour time
|
||||
|
||||
is_NaN(v):
|
||||
Checks if given variable is either a str expressing NaN or NaN as object
|
||||
|
||||
stdz(v):
|
||||
Standardize date and time formats
|
||||
|
||||
gtfs_to_db(g):
|
||||
Write an existing gtfs.GTFS object to the database using the GTFS compliant models
|
||||
|
||||
db_to_gtfs(q, folder_path):
|
||||
Convert list of query sets to gtfs.GTFS object and write to specified folder if validation for GTFS compliance passes.
|
||||
"""
|
||||
import pt_map.gtfs
|
||||
import pt_map.models
|
||||
import pandas as pd
|
||||
|
|
@ -250,9 +284,34 @@ gtfs_schema = {
|
|||
}
|
||||
|
||||
def to_camel_case(s: str):
|
||||
"""
|
||||
Convert '_'-separated str to CamelCase with the first letter capitalized.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
s : str
|
||||
'_'-separated string
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
CamelCased str, first letter capitalized
|
||||
"""
|
||||
return ''.join(word.capitalize() for word in s.split('_'))
|
||||
|
||||
def standardize_time(time_str: str):
|
||||
"""
|
||||
Convert time str to standardized %H:%M:%S format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
time_str: str
|
||||
str encoding time
|
||||
|
||||
Returns
|
||||
-------
|
||||
str in format '%H:%M:%S'
|
||||
"""
|
||||
date_str = f"Jan 19, 1999 {time_str}"
|
||||
ntuple=email.utils.parsedate(date_str)
|
||||
timestamp=time.mktime(ntuple)
|
||||
|
|
@ -261,9 +320,36 @@ def standardize_time(time_str: str):
|
|||
|
||||
|
||||
def is_NaN(v):
|
||||
"""
|
||||
Returns
|
||||
-------
|
||||
True
|
||||
If v is either a str representing NaN or NaN as an object
|
||||
False
|
||||
Otherwise
|
||||
"""
|
||||
return (isinstance(v, str) and v.lower() == "nan") or (isinstance(v, numbers.Number) and math.isnan(v))
|
||||
|
||||
def stdz(v, m: django.db.models.Model, f: str):
|
||||
"""
|
||||
Convert Time and Date str to a format our db can easily work with.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
v : object
|
||||
object to be standardized
|
||||
m : django.db.models.Model
|
||||
model to be written to
|
||||
f : str
|
||||
field name in question
|
||||
|
||||
Returns
|
||||
-------
|
||||
Converted str
|
||||
If m.f is a DateField or a TimeField
|
||||
Unchanged str
|
||||
Otherwise
|
||||
"""
|
||||
if m._meta.get_field(f).get_internal_type() == 'DateField':
|
||||
return str(v)
|
||||
if m._meta.get_field(f).get_internal_type() == 'TimeField':
|
||||
|
|
@ -272,6 +358,14 @@ def stdz(v, m: django.db.models.Model, f: str):
|
|||
|
||||
|
||||
def gtfs_to_db(g: pt_map.gtfs.GTFS):
|
||||
"""
|
||||
Given a gtfs.GTFS object, write GTFS-compliantly to db by creating the correct models
|
||||
|
||||
Parameters
|
||||
----------
|
||||
g : gtfs.GTFS
|
||||
GTFS object to be saved to db
|
||||
"""
|
||||
for k,v in gtfs_schema.items():
|
||||
name = to_camel_case(singularize(k))
|
||||
m = getattr(pt_map.models, name)
|
||||
|
|
@ -319,7 +413,22 @@ reversed_file_mapping = {
|
|||
}
|
||||
|
||||
|
||||
def db_to_gtfs(q: list[django.db.models.query.QuerySet], folder_path: str):
|
||||
def db_to_gtfs(q: list[django.db.models.query.QuerySet], folder_path: str = ""):
|
||||
"""
|
||||
Convert given list of query sets to gtfs.GTFS object
|
||||
|
||||
Parameters
|
||||
----------
|
||||
q : list[django.db.models.query.QuerySet]
|
||||
List of QuerySets containing the retrieved data to be Converted
|
||||
folder_path : str
|
||||
path to be set as the results folder_path instance variable
|
||||
|
||||
Returns
|
||||
-------
|
||||
gtfs.GTFS
|
||||
object containing the queried data
|
||||
"""
|
||||
dfs = {reversed_file_mapping[m.model.__name__]: (pd.DataFrame(list(m.values())) if m else pd.DataFrame()) for m in q}
|
||||
g = pt_map.gtfs.GTFS(folder_path, dfs)
|
||||
g.validate()
|
||||
|
|
|
|||
|
|
@ -1,8 +1,57 @@
|
|||
import pandas as pd
|
||||
import os
|
||||
|
||||
|
||||
|
||||
class GTFS:
|
||||
def __init__(self, folder_path: str, dfs: list[list] = None):
|
||||
"""
|
||||
DataFrame based representation of the GTFS standard, able to read folder of GTFS files, validate a GTFS object for accordance with the standard and write its data to a GTFS folder.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
folder_path : str
|
||||
Path to folder where the data is read from and/or to be written to
|
||||
agency, stops, routes, trips, stop_times, calendar, calendar_dates, fare_attributes, fare_rules, timeframes, fare_media, fare_products, fare_leg_rules, fare_transfer_rules, areas, stop_areas, networks, route_networks, shapes, frequencies, transfers, pathways, levels, location_groups, location_group_stops, locations_geojson, booking_rules, translations, feed_info, attributions : GTFSFile
|
||||
Objects representing the data in the corresponding .txt/.geojson files in the GTFS Reference.
|
||||
errors: list[str]
|
||||
Human readable messages explaining why a validation failed if it did.
|
||||
|
||||
Methods
|
||||
-------
|
||||
get_files():
|
||||
Return all member objects of type GTFSFile
|
||||
get_fields(name):
|
||||
Return all fields present in a given instance of a GTFSFile
|
||||
export(path, dirname):
|
||||
Save all GTFS data represented by the current instance as a folder of files corresponding to the GTFS Reference.
|
||||
validate():
|
||||
For all GTFSFile member objects, validate if they individually conmply with GTFS.
|
||||
validate_required_fields(df, required_fields):
|
||||
Check if a DataFrame contains all required fields according to the GTFS reference for the file it represents.
|
||||
validate_optional_fields(df, optional_fields):
|
||||
Check if a DataFrame does not contain any unexpected fields, not compliant with the GTFS reference for the file it represents.
|
||||
validate_lat_lon(df):
|
||||
Check if a Stop is correctly mapped using geographical coordinates.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, folder_path: str = "", dfs: dict[str, pd.DataFrame] = None):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
folder_path : str
|
||||
Path of the folder to read GTFS data from or potentially write it to when export() is called. Defaults to an empty str.
|
||||
dfs : dict[str : pd.DataFrame]
|
||||
DataFrames containing the data to be represented by this object as values, corresponding GTFSFile.file_names as keys.
|
||||
|
||||
Raises
|
||||
------
|
||||
TypeError
|
||||
If neither folder_path nor dfs is provided
|
||||
If folder_path is not a valid str or dfs is not a dict of DataFrames
|
||||
ValueError
|
||||
If folder_path is not a well formatted path
|
||||
"""
|
||||
self.folder_path = folder_path
|
||||
self.agency = self.Agency(self.folder_path, dfs)
|
||||
self.stops = self.Stops(self.folder_path, dfs)
|
||||
|
|
@ -37,14 +86,50 @@ class GTFS:
|
|||
self.errors = []
|
||||
|
||||
class GTFSFile:
|
||||
"""
|
||||
All given fields and their corresponding values are stored as a DataFrame.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
file_name : str
|
||||
Extension-less name of the corresponding .txt file from the GTFS Reference
|
||||
folder_path : str
|
||||
Folder to read data from or potentially write it to
|
||||
data : pd.DataFrame
|
||||
All csv data from the corresponding .txt file represented as a Pandas DataFrame
|
||||
|
||||
Methods
|
||||
-------
|
||||
load_data(dfs):
|
||||
Load data from list of DataFrames if given else read it from the corresponding .txt file in csv format.
|
||||
"""
|
||||
def __init__(self, folder_path, file_name, dfs):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
folder_path : str
|
||||
Where to read GTFS files from or write it to
|
||||
file_name : str
|
||||
Name of the .txt file without the .txt Extension
|
||||
dfs : dict[str, pd.DataFrame]
|
||||
If given, data variable is set as corresponding DataFrame in this dict
|
||||
If not, data is read from the csv
|
||||
"""
|
||||
self.file_name = file_name
|
||||
self.file_path = f"{folder_path}/{file_name}.txt"
|
||||
self.data = self.load_data(dfs)
|
||||
|
||||
def load_data(self, dfs):
|
||||
"""
|
||||
Fill the data attribute with GTFS data either with a given DataFrame or from the corresponding csv
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dfs : dict[str, pd.DataFrame]
|
||||
Dict of dataframes mapped to the corresponding file names. If given, the corresponding DataFrame is returned if the key exists else an empty DataFrame
|
||||
"""
|
||||
if dfs:
|
||||
return dfs[self.file_name]
|
||||
return dfs[self.file_name] if self.file_name in dfs.keys() else pd.DataFrame()
|
||||
else:
|
||||
try:
|
||||
return pd.read_csv(self.file_path)
|
||||
|
|
@ -52,106 +137,184 @@ class GTFS:
|
|||
return pd.DataFrame()
|
||||
|
||||
class Agency(GTFSFile):
|
||||
"""
|
||||
Represents agency.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'agency', dfs)
|
||||
|
||||
class Stops(GTFSFile):
|
||||
"""
|
||||
Represents stops.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'stops', dfs)
|
||||
|
||||
class Routes(GTFSFile):
|
||||
"""
|
||||
Represents routes.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'routes', dfs)
|
||||
|
||||
class Trips(GTFSFile):
|
||||
"""
|
||||
Represents trips.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'trips', dfs)
|
||||
|
||||
class StopTimes(GTFSFile):
|
||||
"""
|
||||
Represents stop_times.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'stop_times', dfs)
|
||||
|
||||
class Calendar(GTFSFile):
|
||||
"""
|
||||
Represents calendar.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'calendar', dfs)
|
||||
|
||||
class CalendarDates(GTFSFile):
|
||||
"""
|
||||
Represents calendar_dates.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'calendar_dates', dfs)
|
||||
|
||||
class FareAttributes(GTFSFile):
|
||||
"""
|
||||
Represents fare_attributes.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_attributes', dfs)
|
||||
|
||||
class FareRules(GTFSFile):
|
||||
"""
|
||||
Represents fare_rules.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_rules', dfs)
|
||||
|
||||
class Timeframes(GTFSFile):
|
||||
"""
|
||||
Represents timeframes.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'timeframes', dfs)
|
||||
|
||||
class FareMedia(GTFSFile):
|
||||
"""
|
||||
Represents fare_media.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_media', dfs)
|
||||
|
||||
class FareProducts(GTFSFile):
|
||||
"""
|
||||
Represents fare_products.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_products', dfs)
|
||||
|
||||
class FareLegRules(GTFSFile):
|
||||
"""
|
||||
Represents fare_leg_rules.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_leg_rules', dfs)
|
||||
|
||||
class FareTransferRules(GTFSFile):
|
||||
"""
|
||||
Represents fare_transfer_rules.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'fare_transfer_rules', dfs)
|
||||
|
||||
class Areas(GTFSFile):
|
||||
"""
|
||||
Represents areas.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'areas', dfs)
|
||||
|
||||
class StopAreas(GTFSFile):
|
||||
"""
|
||||
Represents stop_areas.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'stop_areas', dfs)
|
||||
|
||||
class Networks(GTFSFile):
|
||||
"""
|
||||
Represents networks.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'networks', dfs)
|
||||
|
||||
class RouteNetworks(GTFSFile):
|
||||
"""
|
||||
Represents route_networks.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'route_networks', dfs)
|
||||
|
||||
class Shapes(GTFSFile):
|
||||
"""
|
||||
Represents shapes.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'shapes', dfs)
|
||||
|
||||
class Frequencies(GTFSFile):
|
||||
"""
|
||||
Represents frequencies.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'frequencies', dfs)
|
||||
|
||||
class Transfers(GTFSFile):
|
||||
"""
|
||||
Represents transfers.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'transfers', dfs)
|
||||
|
||||
class Pathways(GTFSFile):
|
||||
"""
|
||||
Represents pathways.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'pathways', dfs)
|
||||
|
||||
class Levels(GTFSFile):
|
||||
"""
|
||||
Represents levels.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'levels', dfs)
|
||||
|
||||
class LocationGroups(GTFSFile):
|
||||
"""
|
||||
Represents location_groups.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'location_groups', dfs)
|
||||
|
||||
class LocationGroupStops(GTFSFile):
|
||||
"""
|
||||
Represents location_group_stops.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'location_group_stops', dfs)
|
||||
|
||||
class LocationsGeojson(GTFSFile):
|
||||
"""
|
||||
Represents locations.geojson from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
self.file_path = f"{folder_path}/locations.geojson"
|
||||
if os.path.exists(self.file_path):
|
||||
|
|
@ -166,31 +329,77 @@ class GTFS:
|
|||
return pd.DataFrame()
|
||||
|
||||
class BookingRules(GTFSFile):
|
||||
"""
|
||||
Represents booking_rules.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'booking_rules', dfs)
|
||||
|
||||
class Translations(GTFSFile):
|
||||
"""
|
||||
Represents translations.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'translations', dfs)
|
||||
|
||||
class FeedInfo(GTFSFile):
|
||||
"""
|
||||
Represents feed_info.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'feed_info', dfs)
|
||||
|
||||
class Attributions(GTFSFile):
|
||||
"""
|
||||
Represents attributions.txt from the GTFS reference
|
||||
"""
|
||||
def __init__(self, folder_path, dfs):
|
||||
super().__init__(folder_path, 'attributions', dfs)
|
||||
|
||||
def get_files(self):
|
||||
"""
|
||||
Get all GTFSFile object
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[GTFSFile]
|
||||
All member objects of type GTFSFile
|
||||
"""
|
||||
return [attr for attr in dir(self) if isinstance(getattr(self, attr), self.GTFSFile)]
|
||||
|
||||
def get_fields(self, name):
|
||||
"""
|
||||
Given the name of a file specified in the GTFS specification, return all fields present in the data.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name of a file as specified by GTFS (ommiting the .txt/.geojson extension)
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[pd.core.indexes.range.RangeIndex]
|
||||
a list of all the fields present in the datastream of the specified file
|
||||
"""
|
||||
file = getattr(self, name)
|
||||
if not file:
|
||||
return None
|
||||
return list(file.data.columns)
|
||||
|
||||
def export(self, path, dirname):
|
||||
def export(self, path = None, dirname = ""):
|
||||
"""
|
||||
Save this object's data to files as specified by GTFS.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : str
|
||||
parent directory where to save the files, defaults to the objects folder_path property
|
||||
dirname : str
|
||||
If specified, subdirectory to create or use inside path. Default behaviour is to save directly to path.
|
||||
"""
|
||||
if not path:
|
||||
path = self.folder_path
|
||||
else:
|
||||
path = f"{os.path.normpath(path)}/{dirname}"
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
|
|
@ -204,6 +413,15 @@ class GTFS:
|
|||
df.to_csv(fpath, index=False)
|
||||
|
||||
def validate(self):
|
||||
"""
|
||||
Check this object's data for compliance with the GTFS reference. Resets self.errors and stores human readable error messages to it.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[str]
|
||||
List of human readable error messages, also saved to self.errors, if any, else None.
|
||||
"""
|
||||
self.error = []
|
||||
if not self.agency.data.empty:
|
||||
self.validate_agency()
|
||||
if not self.stops.data.empty:
|
||||
|
|
@ -271,12 +489,18 @@ class GTFS:
|
|||
return self.errors
|
||||
|
||||
def validate_agency(self):
|
||||
"""
|
||||
Check Agency object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["agency_name", "agency_url", "agency_timezone"]
|
||||
optional_fields = ["agency_id", "agency_lang", "agency_phone", "agency_fare_url", "agency_email"]
|
||||
self.validate_required_fields(self.agency.data, required_fields, "agency.txt")
|
||||
self.validate_optional_fields(self.agency.data, optional_fields, "agency.txt")
|
||||
|
||||
def validate_stops(self):
|
||||
"""
|
||||
Check Stops object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["stop_id", "stop_name"]
|
||||
optional_fields = ["stop_code", "stop_desc", "stop_lat", "stop_lon", "zone_id", "stop_url",
|
||||
"location_type", "parent_station", "stop_timezone", "wheelchair_boarding",
|
||||
|
|
@ -286,6 +510,9 @@ class GTFS:
|
|||
self.validate_lat_lon(self.stops.data)
|
||||
|
||||
def validate_routes(self):
|
||||
"""
|
||||
Check Routes object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["route_id", "route_short_name", "route_long_name", "route_type"]
|
||||
optional_fields = ["agency_id", "route_desc", "route_url", "route_color", "route_text_color",
|
||||
"route_sort_order", "continuous_pickup", "continuous_drop_off"]
|
||||
|
|
@ -293,6 +520,9 @@ class GTFS:
|
|||
self.validate_optional_fields(self.routes.data, optional_fields, "routes.txt")
|
||||
|
||||
def validate_trips(self):
|
||||
"""
|
||||
Check Trips object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["route_id", "service_id", "trip_id"]
|
||||
optional_fields = ["trip_headsign", "trip_short_name", "direction_id", "block_id", "shape_id",
|
||||
"wheelchair_accessible", "bikes_allowed"]
|
||||
|
|
@ -300,6 +530,9 @@ class GTFS:
|
|||
self.validate_optional_fields(self.trips.data, optional_fields, "trips.txt")
|
||||
|
||||
def validate_stop_times(self):
|
||||
"""
|
||||
Check StopTimes object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["trip_id", "arrival_time", "departure_time", "stop_id", "stop_sequence"]
|
||||
optional_fields = ["stop_headsign", "pickup_type", "drop_off_type", "shape_dist_traveled",
|
||||
"timepoint"]
|
||||
|
|
@ -307,164 +540,276 @@ class GTFS:
|
|||
self.validate_optional_fields(self.stop_times.data, optional_fields, "stop_times.txt")
|
||||
|
||||
def validate_calendar(self):
|
||||
"""
|
||||
Check Calendar object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["service_id", "monday", "tuesday", "wednesday", "thursday", "friday",
|
||||
"saturday", "sunday", "start_date", "end_date"]
|
||||
self.validate_required_fields(self.calendar.data, required_fields, "calendar.txt")
|
||||
|
||||
def validate_calendar_dates(self):
|
||||
"""
|
||||
Check CalendarDates object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["service_id", "date", "exception_type"]
|
||||
self.validate_required_fields(self.calendar_dates.data, required_fields, "calendar_dates.txt")
|
||||
|
||||
def validate_fare_attributes(self):
|
||||
"""
|
||||
Check FareAttributes object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["fare_id", "price", "currency_type", "payment_method", "transfers"]
|
||||
optional_fields = ["agency_id", "transfer_duration"]
|
||||
self.validate_required_fields(self.fare_attributes.data, required_fields, "fare_attributes.txt")
|
||||
self.validate_optional_fields(self.fare_attributes.data, optional_fields, "fare_attributes.txt")
|
||||
|
||||
def validate_fare_rules(self):
|
||||
"""
|
||||
Check FareRules object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["fare_id"]
|
||||
optional_fields = ["route_id", "origin_id", "destination_id", "contains_id"]
|
||||
self.validate_required_fields(self.fare_rules.data, required_fields, "fare_rules.txt")
|
||||
self.validate_optional_fields(self.fare_rules.data, optional_fields, "fare_rules.txt")
|
||||
|
||||
def validate_timeframes(self):
|
||||
"""
|
||||
Check Timeframes object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["timeframe_id", "start_time", "end_time"]
|
||||
optional_fields = ["timeframe_name", "timeframe_desc"]
|
||||
self.validate_required_fields(self.timeframes.data, required_fields, "timeframes.txt")
|
||||
self.validate_optional_fields(self.timeframes.data, optional_fields, "timeframes.txt")
|
||||
|
||||
def validate_fare_media(self):
|
||||
"""
|
||||
Check FareMedia object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["media_id", "media_name", "media_type"]
|
||||
optional_fields = ["media_desc"]
|
||||
self.validate_required_fields(self.fare_media.data, required_fields, "fare_media.txt")
|
||||
self.validate_optional_fields(self.fare_media.data, optional_fields, "fare_media.txt")
|
||||
|
||||
def validate_fare_products(self):
|
||||
"""
|
||||
Check FareProducts object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["product_id", "product_name", "product_type", "product_price", "currency"]
|
||||
optional_fields = ["product_desc"]
|
||||
self.validate_required_fields(self.fare_products.data, required_fields, "fare_products.txt")
|
||||
self.validate_optional_fields(self.fare_products.data, optional_fields, "fare_products.txt")
|
||||
|
||||
def validate_fare_leg_rules(self):
|
||||
"""
|
||||
Check FareLegRules object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["leg_id", "from_stop_id", "to_stop_id"]
|
||||
optional_fields = ["leg_desc"]
|
||||
self.validate_required_fields(self.fare_leg_rules.data, required_fields, "fare_leg_rules.txt")
|
||||
self.validate_optional_fields(self.fare_leg_rules.data, optional_fields, "fare_leg_rules.txt")
|
||||
|
||||
def validate_fare_transfer_rules(self):
|
||||
"""
|
||||
Check FareTransferRules object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["from_leg_id", "to_leg_id", "transfer_type"]
|
||||
optional_fields = ["transfer_time"]
|
||||
self.validate_required_fields(self.fare_transfer_rules.data, required_fields, "fare_transfer_rules.txt")
|
||||
self.validate_optional_fields(self.fare_transfer_rules.data, optional_fields, "fare_transfer_rules.txt")
|
||||
|
||||
def validate_areas(self):
|
||||
"""
|
||||
Check Areas object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["area_id", "area_name"]
|
||||
optional_fields = ["area_desc"]
|
||||
self.validate_required_fields(self.areas.data, required_fields, "areas.txt")
|
||||
self.validate_optional_fields(self.areas.data, optional_fields, "areas.txt")
|
||||
|
||||
def validate_stop_areas(self):
|
||||
"""
|
||||
Check StopAreas object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["stop_id", "area_id"]
|
||||
optional_fields = []
|
||||
self.validate_required_fields(self.stop_areas.data, required_fields, "stop_areas.txt")
|
||||
self.validate_optional_fields(self.stop_areas.data, optional_fields, "stop_areas.txt")
|
||||
|
||||
def validate_networks(self):
|
||||
"""
|
||||
Check Networks object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["network_id", "network_name"]
|
||||
optional_fields = ["network_desc"]
|
||||
self.validate_required_fields(self.networks.data, required_fields, "networks.txt")
|
||||
self.validate_optional_fields(self.networks.data, optional_fields, "networks.txt")
|
||||
|
||||
def validate_route_networks(self):
|
||||
"""
|
||||
Check RouteNetworks object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["route_id", "network_id"]
|
||||
optional_fields = []
|
||||
self.validate_required_fields(self.route_networks.data, required_fields, "route_networks.txt")
|
||||
self.validate_optional_fields(self.route_networks.data, optional_fields, "route_networks.txt")
|
||||
|
||||
def validate_shapes(self):
|
||||
"""
|
||||
Check Shapes object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["shape_id", "shape_pt_lat", "shape_pt_lon", "shape_pt_sequence"]
|
||||
optional_fields = ["shape_dist_traveled"]
|
||||
self.validate_required_fields(self.shapes.data, required_fields, "shapes.txt")
|
||||
self.validate_optional_fields(self.shapes.data, optional_fields, "shapes.txt")
|
||||
|
||||
def validate_frequencies(self):
|
||||
"""
|
||||
Check Frequencies object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["trip_id", "start_time", "end_time", "headway_secs"]
|
||||
optional_fields = ["exact_times"]
|
||||
self.validate_required_fields(self.frequencies.data, required_fields, "frequencies.txt")
|
||||
self.validate_optional_fields(self.frequencies.data, optional_fields, "frequencies.txt")
|
||||
|
||||
def validate_transfers(self):
|
||||
"""
|
||||
Check Transfers object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["from_stop_id", "to_stop_id", "transfer_type"]
|
||||
optional_fields = ["min_transfer_time"]
|
||||
self.validate_required_fields(self.transfers.data, required_fields, "transfers.txt")
|
||||
self.validate_optional_fields(self.transfers.data, optional_fields, "transfers.txt")
|
||||
|
||||
def validate_pathways(self):
|
||||
"""
|
||||
Check Pathways object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["pathway_id", "from_stop_id", "to_stop_id", "pathway_mode", "is_bidirectional"]
|
||||
optional_fields = ["length", "traversal_time", "stair_count", "max_slope", "min_width", "signposted_as", "reversed_signposted_as"]
|
||||
self.validate_required_fields(self.pathways.data, required_fields, "pathways.txt")
|
||||
self.validate_optional_fields(self.pathways.data, optional_fields, "pathways.txt")
|
||||
|
||||
def validate_levels(self):
|
||||
"""
|
||||
Check Levels object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["level_id", "level_index"]
|
||||
optional_fields = ["level_name"]
|
||||
self.validate_required_fields(self.levels.data, required_fields, "levels.txt")
|
||||
self.validate_optional_fields(self.levels.data, optional_fields, "levels.txt")
|
||||
|
||||
def validate_location_groups(self):
|
||||
"""
|
||||
Check Agency LocationGroups for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["location_group_id", "location_group_name"]
|
||||
optional_fields = ["location_group_desc"]
|
||||
self.validate_required_fields(self.location_groups.data, required_fields, "location_groups.txt")
|
||||
self.validate_optional_fields(self.location_groups.data, optional_fields, "location_groups.txt")
|
||||
|
||||
def validate_location_group_stops(self):
|
||||
"""
|
||||
Check LocationGroupStops object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["location_group_id", "stop_id"]
|
||||
optional_fields = []
|
||||
self.validate_required_fields(self.location_group_stops.data, required_fields, "location_group_stops.txt")
|
||||
self.validate_optional_fields(self.location_group_stops.data, optional_fields, "location_group_stops.txt")
|
||||
|
||||
def validate_locations_geojson(self):
|
||||
"""
|
||||
Check LocationsGeojson object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["type", "features"]
|
||||
optional_fields = []
|
||||
self.validate_required_fields(self.locations_geojson.data, required_fields, "locations.geojson")
|
||||
self.validate_optional_fields(self.locations_geojson.data, optional_fields, "locations.geojson")
|
||||
|
||||
def validate_booking_rules(self):
|
||||
"""
|
||||
Check BookingRules object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["booking_rule_id"]
|
||||
optional_fields = ["booking_rule_name", "booking_rule_desc"]
|
||||
self.validate_required_fields(self.booking_rules.data, required_fields, "booking_rules.txt")
|
||||
self.validate_optional_fields(self.booking_rules.data, optional_fields, "booking_rules.txt")
|
||||
|
||||
def validate_translations(self):
|
||||
"""
|
||||
Check Translations object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["table_name", "field_name", "language", "translation"]
|
||||
optional_fields = ["record_id", "record_sub_id", "field_value"]
|
||||
self.validate_required_fields(self.translations.data, required_fields, "translations.txt")
|
||||
self.validate_optional_fields(self.translations.data, optional_fields, "translations.txt")
|
||||
|
||||
def validate_feed_info(self):
|
||||
"""
|
||||
Check FeedInfo object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["feed_publisher_name", "feed_publisher_url", "feed_lang"]
|
||||
optional_fields = ["feed_start_date", "feed_end_date", "feed_version"]
|
||||
self.validate_required_fields(self.feed_info.data, required_fields, "feed_info.txt")
|
||||
self.validate_optional_fields(self.feed_info.data, optional_fields, "feed_info.txt")
|
||||
|
||||
def validate_attributions(self):
|
||||
"""
|
||||
Check Attributions object for compliance with the GTFS reference.
|
||||
"""
|
||||
required_fields = ["attribution_id"]
|
||||
optional_fields = ["agency_id", "route_id", "trip_id", "organization_name", "is_producer", "is_operator", "is_authority", "attribution_url", "attribution_email", "attribution_phone"]
|
||||
self.validate_required_fields(self.attributions.data, required_fields, "attributions.txt")
|
||||
self.validate_optional_fields(self.attributions.data, optional_fields, "attributions.txt")
|
||||
|
||||
def validate_required_fields(self, df, required_fields, file_name):
|
||||
"""
|
||||
Check if a DataFrame contains all required fields according to the GTFS reference for the file it represents.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
df : pd.DataFrame
|
||||
DataFrame to be checked
|
||||
required_fields : list[str]
|
||||
list of field names to check for inclusion in df
|
||||
file_name : str
|
||||
Name to be printed in error message if validation fails
|
||||
"""
|
||||
missing_fields = set(required_fields) - set(df.columns)
|
||||
if missing_fields:
|
||||
self.errors.append(f"{file_name} is missing required fields: {missing_fields}")
|
||||
|
||||
def validate_optional_fields(self, df, optional_fields, file_name):
|
||||
"""
|
||||
Check if DataFrame contains fields not specified in the GTFS Reference.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
df : pd.DataFrame
|
||||
DataFrame to be checked
|
||||
optional_fields: list[str]
|
||||
list of field names allowed in df
|
||||
file_name : str
|
||||
Name to be printed in error message if validation fails
|
||||
|
||||
"""
|
||||
unexpected_fields = set(df.columns) - set(optional_fields) - set(df.columns)
|
||||
if unexpected_fields:
|
||||
self.errors.append(f"{file_name} has unexpected fields: {unexpected_fields}")
|
||||
|
||||
def validate_lat_lon(self, df):
|
||||
if 'stop_lat' in df.columns and 'stop_lon' in df.columns:
|
||||
if df[['stop_lat', 'stop_lon']].isnull().any().any():
|
||||
self.errors.append(f"stops.txt has missing lat/lon values.")
|
||||
def validate_lat_lon(self, df, file_name="stops.txt", prefix="stop"):
|
||||
"""
|
||||
Check if a DataFrame contains either no coordinates of a given prefix or both latitude and longitude.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
df : pd.DataFrame
|
||||
DataFrame to be checked
|
||||
file_name : str
|
||||
Name to be printed in error message if validation fails
|
||||
prefix : str
|
||||
Prefix to be used for coordinate fileds. Expands to {prefix}_lat and {prefix}_lon
|
||||
"""
|
||||
if f"{prefix}_lat" in df.columns and f"{prefix}_lon" in df.columns:
|
||||
if df[[f"{prefix}_lat", f"{prefix}_lon"]].isnull().any().any():
|
||||
self.errors.append(f"{file_name} has missing lat/lon values.")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,21 @@
|
|||
"""
|
||||
Models
|
||||
======
|
||||
Django database models representing the files of the GTFS Reference with all their fields
|
||||
|
||||
Attributes
|
||||
----------
|
||||
Classes
|
||||
-------
|
||||
Agency, Stop, Route, Trip, StopTime, Calendar, CalendarDate, FareAttribute, FareRule, Shape, Frequency, Transfer, Pathway, Level, FeedInfo, LocationsGeojson, BookingRule, Translation, Attribution, LocationGroup, LocationGroupStop, RouteNetwork, Network, StopArea, Area, FareMedium, FareProduct, FareLegRule, FareTransferRule, Timeframe
|
||||
Different files as described in the GTFS Reference
|
||||
"""
|
||||
from django.db import models
|
||||
|
||||
class Agency(models.Model):
|
||||
"""
|
||||
Represents agency.txt from the GTFS Reference.
|
||||
"""
|
||||
agency_id = models.CharField(max_length=255, primary_key=True)
|
||||
agency_name = models.CharField(max_length=255)
|
||||
agency_url = models.URLField()
|
||||
|
|
@ -11,6 +26,9 @@ class Agency(models.Model):
|
|||
agency_email = models.EmailField(blank=True, null=True)
|
||||
|
||||
class Stop(models.Model):
|
||||
"""
|
||||
Represents stop.txt from the GTFS Reference.
|
||||
"""
|
||||
stop_id = models.CharField(max_length=255, primary_key=True)
|
||||
stop_code = models.CharField(max_length=50, blank=True, null=True)
|
||||
stop_name = models.CharField(max_length=255)
|
||||
|
|
@ -27,6 +45,9 @@ class Stop(models.Model):
|
|||
platform_code = models.CharField(max_length=50, blank=True, null=True)
|
||||
|
||||
class Route(models.Model):
|
||||
"""
|
||||
Represents route.txt from the GTFS Reference.
|
||||
"""
|
||||
route_id = models.CharField(max_length=255, primary_key=True)
|
||||
agency = models.ForeignKey(Agency, on_delete=models.CASCADE, blank=True, null=True)
|
||||
route_short_name = models.CharField(max_length=50)
|
||||
|
|
@ -41,6 +62,9 @@ class Route(models.Model):
|
|||
continuous_drop_off = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class Trip(models.Model):
|
||||
"""
|
||||
Represents trip.txt from the GTFS Reference.
|
||||
"""
|
||||
trip_id = models.CharField(max_length=255, primary_key=True)
|
||||
route = models.ForeignKey(Route, on_delete=models.CASCADE)
|
||||
service_id = models.CharField(max_length=255)
|
||||
|
|
@ -53,6 +77,9 @@ class Trip(models.Model):
|
|||
bikes_allowed = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class StopTime(models.Model):
|
||||
"""
|
||||
Represents stop_time.txt from the GTFS Reference.
|
||||
"""
|
||||
trip = models.ForeignKey(Trip, on_delete=models.CASCADE)
|
||||
arrival_time = models.TimeField(blank=True, null=True)
|
||||
departure_time = models.TimeField(blank=True, null=True)
|
||||
|
|
@ -68,6 +95,9 @@ class StopTime(models.Model):
|
|||
unique_together = (('trip', 'stop_sequence'),)
|
||||
|
||||
class Calendar(models.Model):
|
||||
"""
|
||||
Represents calendar.txt from the GTFS Reference.
|
||||
"""
|
||||
service_id = models.CharField(max_length=255, primary_key=True)
|
||||
monday = models.BooleanField()
|
||||
tuesday = models.BooleanField()
|
||||
|
|
@ -80,6 +110,9 @@ class Calendar(models.Model):
|
|||
end_date = models.DateField()
|
||||
|
||||
class CalendarDate(models.Model):
|
||||
"""
|
||||
Represents calendar_date.txt from the GTFS Reference.
|
||||
"""
|
||||
service_id = models.CharField(max_length=255)
|
||||
date = models.DateField()
|
||||
exception_type = models.IntegerField()
|
||||
|
|
@ -88,6 +121,9 @@ class CalendarDate(models.Model):
|
|||
unique_together = (('service_id', 'date'),)
|
||||
|
||||
class FareAttribute(models.Model):
|
||||
"""
|
||||
Represents fare_attribute.txt from the GTFS Reference.
|
||||
"""
|
||||
fare_id = models.CharField(max_length=255, primary_key=True)
|
||||
price = models.FloatField()
|
||||
currency_type = models.CharField(max_length=3)
|
||||
|
|
@ -97,6 +133,9 @@ class FareAttribute(models.Model):
|
|||
transfer_duration = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class FareRule(models.Model):
|
||||
"""
|
||||
Represents fare_rule.txt from the GTFS Reference.
|
||||
"""
|
||||
fare = models.ForeignKey(FareAttribute, on_delete=models.CASCADE)
|
||||
route = models.ForeignKey(Route, on_delete=models.CASCADE, blank=True, null=True)
|
||||
origin_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
|
@ -104,6 +143,9 @@ class FareRule(models.Model):
|
|||
contains_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class Shape(models.Model):
|
||||
"""
|
||||
Represents shape.txt from the GTFS Reference.
|
||||
"""
|
||||
shape_id = models.CharField(max_length=255)
|
||||
shape_pt_lat = models.FloatField()
|
||||
shape_pt_lon = models.FloatField()
|
||||
|
|
@ -114,6 +156,9 @@ class Shape(models.Model):
|
|||
unique_together = (('shape_id', 'shape_pt_sequence'),)
|
||||
|
||||
class Frequency(models.Model):
|
||||
"""
|
||||
Represents frequency.txt from the GTFS Reference.
|
||||
"""
|
||||
trip = models.ForeignKey(Trip, on_delete=models.CASCADE)
|
||||
start_time = models.TimeField()
|
||||
end_time = models.TimeField()
|
||||
|
|
@ -121,6 +166,9 @@ class Frequency(models.Model):
|
|||
exact_times = models.IntegerField(blank=True, null=True)
|
||||
|
||||
class Transfer(models.Model):
|
||||
"""
|
||||
Represents transfer.txt from the GTFS Reference.
|
||||
"""
|
||||
from_stop = models.ForeignKey(Stop, on_delete=models.CASCADE, related_name='transfers_from')
|
||||
to_stop = models.ForeignKey(Stop, on_delete=models.CASCADE, related_name='transfers_to')
|
||||
transfer_type = models.IntegerField()
|
||||
|
|
@ -130,6 +178,9 @@ class Transfer(models.Model):
|
|||
unique_together = (('from_stop', 'to_stop'),)
|
||||
|
||||
class Pathway(models.Model):
|
||||
"""
|
||||
Represents lathway.txt from the GTFS Reference.
|
||||
"""
|
||||
pathway_id = models.CharField(max_length=255, primary_key=True)
|
||||
from_stop = models.ForeignKey(Stop, on_delete=models.CASCADE, related_name='pathways_from')
|
||||
to_stop = models.ForeignKey(Stop, on_delete=models.CASCADE, related_name='pathways_to')
|
||||
|
|
@ -144,11 +195,17 @@ class Pathway(models.Model):
|
|||
reversed_signposted_as = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class Level(models.Model):
|
||||
"""
|
||||
Represents level.txt from the GTFS Reference.
|
||||
"""
|
||||
level_id = models.CharField(max_length=255, primary_key=True)
|
||||
level_index = models.FloatField()
|
||||
level_name = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class FeedInfo(models.Model):
|
||||
"""
|
||||
Represents feed_info.txt from the GTFS Reference.
|
||||
"""
|
||||
feed_publisher_name = models.CharField(max_length=255)
|
||||
feed_publisher_url = models.URLField()
|
||||
feed_lang = models.CharField(max_length=255)
|
||||
|
|
@ -161,6 +218,9 @@ class FeedInfo(models.Model):
|
|||
feed_id = models.BigAutoField(primary_key=True)
|
||||
|
||||
class LocationsGeojson(models.Model):
|
||||
"""
|
||||
Represents locations.geojson from the GTFS Reference.
|
||||
"""
|
||||
location_id = models.CharField(max_length=255, primary_key=True)
|
||||
location_name = models.CharField(max_length=255)
|
||||
location_lat = models.FloatField()
|
||||
|
|
@ -170,6 +230,9 @@ class LocationsGeojson(models.Model):
|
|||
wheelchair_boarding = models.BooleanField(blank=True, null=True)
|
||||
|
||||
class BookingRule(models.Model):
|
||||
"""
|
||||
Represents booking_rule.txt from the GTFS Reference.
|
||||
"""
|
||||
booking_rule_id = models.CharField(max_length=255, primary_key=True)
|
||||
trip = models.ForeignKey(Trip, on_delete=models.CASCADE)
|
||||
start_time = models.TimeField(blank=True, null=True)
|
||||
|
|
@ -179,12 +242,18 @@ class BookingRule(models.Model):
|
|||
booking_rule_instructions = models.TextField(blank=True, null=True)
|
||||
|
||||
class Translation(models.Model):
|
||||
"""
|
||||
Represents translation.txt from the GTFS Reference.
|
||||
"""
|
||||
table_name = models.CharField(max_length=255)
|
||||
field_name = models.CharField(max_length=255)
|
||||
language = models.CharField(max_length=2)
|
||||
translation = models.TextField()
|
||||
|
||||
class Attribution(models.Model):
|
||||
"""
|
||||
Represents attribution.txt from the GTFS Reference.
|
||||
"""
|
||||
attribution_id = models.CharField(max_length=255, primary_key=True)
|
||||
agency = models.ForeignKey(Agency, on_delete=models.CASCADE)
|
||||
attribution_name = models.CharField(max_length=255)
|
||||
|
|
@ -193,53 +262,86 @@ class Attribution(models.Model):
|
|||
attribution_phone = models.CharField(max_length=50, blank=True, null=True)
|
||||
|
||||
class LocationGroup(models.Model):
|
||||
"""
|
||||
Represents location_group.txt from the GTFS Reference.
|
||||
"""
|
||||
location_group_id = models.CharField(max_length=255, primary_key=True)
|
||||
location_group_name = models.CharField(max_length=255)
|
||||
location_group_type = models.CharField(max_length=255)
|
||||
|
||||
class LocationGroupStop(models.Model):
|
||||
"""
|
||||
Represents location_groupStop.txt from the GTFS Reference.
|
||||
"""
|
||||
location_group = models.ForeignKey(LocationGroup, on_delete=models.CASCADE)
|
||||
stop = models.ForeignKey(Stop, on_delete=models.CASCADE)
|
||||
|
||||
class RouteNetwork(models.Model):
|
||||
"""
|
||||
Represents route_network.txt from the GTFS Reference.
|
||||
"""
|
||||
route_network_id = models.CharField(max_length=255, primary_key=True)
|
||||
route_network_name = models.CharField(max_length=255)
|
||||
|
||||
class Network(models.Model):
|
||||
"""
|
||||
Represents network.txt from the GTFS Reference.
|
||||
"""
|
||||
network_id = models.CharField(max_length=255, primary_key=True)
|
||||
network_name = models.CharField(max_length=255)
|
||||
|
||||
class StopArea(models.Model):
|
||||
"""
|
||||
Represents stop_area.txt from the GTFS Reference.
|
||||
"""
|
||||
stop_area_id = models.CharField(max_length=255, primary_key=True)
|
||||
stop_area_name = models.CharField(max_length=255)
|
||||
stop_area_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class Area(models.Model):
|
||||
"""
|
||||
Represents area.txt from the GTFS Reference.
|
||||
"""
|
||||
area_id = models.CharField(max_length=255, primary_key=True)
|
||||
area_name = models.CharField(max_length=255)
|
||||
area_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class FareMedium(models.Model):
|
||||
"""
|
||||
Represents fare_medium.txt from the GTFS Reference.
|
||||
"""
|
||||
fare_media_id = models.CharField(max_length=255, primary_key=True)
|
||||
fare_media_name = models.CharField(max_length=255)
|
||||
fare_media_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class FareProduct(models.Model):
|
||||
"""
|
||||
Represents fare_product.txt from the GTFS Reference.
|
||||
"""
|
||||
fare_product_id = models.CharField(max_length=255, primary_key=True)
|
||||
fare_product_name = models.CharField(max_length=255)
|
||||
fare_product_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class FareLegRule(models.Model):
|
||||
"""
|
||||
Represents fare_legRule.txt from the GTFS Reference.
|
||||
"""
|
||||
fare_leg_rule_id = models.CharField(max_length=255, primary_key=True)
|
||||
fare_leg_rule_name = models.CharField(max_length=255)
|
||||
fare_leg_rule_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class FareTransferRule(models.Model):
|
||||
"""
|
||||
Represents faretransfer_rule.txt from the GTFS Reference.
|
||||
"""
|
||||
fare_transfer_rule_id = models.CharField(max_length=255, primary_key=True)
|
||||
fare_transfer_rule_name = models.CharField(max_length=255)
|
||||
fare_transfer_rule_description = models.TextField(blank=True, null=True)
|
||||
|
||||
class Timeframe(models.Model):
|
||||
"""
|
||||
Represents timeframe.txt from the GTFS Reference.
|
||||
"""
|
||||
time_frame_id = models.CharField(max_length=255, primary_key=True)
|
||||
start_date = models.DateField()
|
||||
end_date = models.DateField()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
"""
|
||||
Redirecting HTTP requests destined for the app pt_map to the correct views
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
path("", views.index, name="index"),
|
||||
path("testing/", views.testing, name="testing"),
|
||||
]
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,13 @@
|
|||
"""
|
||||
Views
|
||||
=====
|
||||
Views reacting to Http Requests by interfacing between backend and frontend.
|
||||
|
||||
Functions
|
||||
---------
|
||||
index(request)
|
||||
Home page
|
||||
"""
|
||||
from django.shortcuts import render
|
||||
from django.http import HttpResponse
|
||||
from .models import Agency, Stop, Route, Trip, StopTime, Calendar, CalendarDate, FareAttribute, FareRule, Shape, Frequency, Transfer, FeedInfo
|
||||
|
|
|
|||
|
|
@ -1,18 +1,5 @@
|
|||
"""
|
||||
URL configuration for transport_accessibility project.
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/5.0/topics/http/urls/
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.urls import include, path
|
||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.urls import path, include
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user