Reorganizing code, adding stops display, connect with backend
This commit is contained in:
parent
2d33ac8e67
commit
d757fec4f0
BIN
Scripts/calc-prorate.exe
Normal file
BIN
Scripts/calc-prorate.exe
Normal file
Binary file not shown.
BIN
Scripts/cheroot.exe
Normal file
BIN
Scripts/cheroot.exe
Normal file
Binary file not shown.
BIN
Scripts/cherryd.exe
Normal file
BIN
Scripts/cherryd.exe
Normal file
Binary file not shown.
BIN
Scripts/docutils.exe
Normal file
BIN
Scripts/docutils.exe
Normal file
Binary file not shown.
473
Scripts/dumppdf.py
Normal file
473
Scripts/dumppdf.py
Normal file
|
|
@ -0,0 +1,473 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
"""Extract pdf structure in XML format"""
|
||||
import logging
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Container, Dict, Iterable, List, Optional, TextIO, Union, cast
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import pdfminer
|
||||
from pdfminer.pdfdocument import PDFDocument, PDFNoOutlines, PDFXRefFallback
|
||||
from pdfminer.pdfpage import PDFPage
|
||||
from pdfminer.pdfparser import PDFParser
|
||||
from pdfminer.pdftypes import PDFObjectNotFound, PDFValueError
|
||||
from pdfminer.pdftypes import PDFStream, PDFObjRef, resolve1, stream_value
|
||||
from pdfminer.psparser import PSKeyword, PSLiteral, LIT
|
||||
from pdfminer.utils import isnumber
|
||||
|
||||
logging.basicConfig()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ESC_PAT = re.compile(r'[\000-\037&<>()"\042\047\134\177-\377]')
|
||||
|
||||
|
||||
def escape(s: Union[str, bytes]) -> str:
|
||||
if isinstance(s, bytes):
|
||||
us = str(s, "latin-1")
|
||||
else:
|
||||
us = s
|
||||
return ESC_PAT.sub(lambda m: "&#%d;" % ord(m.group(0)), us)
|
||||
|
||||
|
||||
def dumpxml(out: TextIO, obj: object, codec: Optional[str] = None) -> None:
|
||||
if obj is None:
|
||||
out.write("<null />")
|
||||
return
|
||||
|
||||
if isinstance(obj, dict):
|
||||
out.write('<dict size="%d">\n' % len(obj))
|
||||
for (k, v) in obj.items():
|
||||
out.write("<key>%s</key>\n" % k)
|
||||
out.write("<value>")
|
||||
dumpxml(out, v)
|
||||
out.write("</value>\n")
|
||||
out.write("</dict>")
|
||||
return
|
||||
|
||||
if isinstance(obj, list):
|
||||
out.write('<list size="%d">\n' % len(obj))
|
||||
for v in obj:
|
||||
dumpxml(out, v)
|
||||
out.write("\n")
|
||||
out.write("</list>")
|
||||
return
|
||||
|
||||
if isinstance(obj, (str, bytes)):
|
||||
out.write('<string size="%d">%s</string>' % (len(obj), escape(obj)))
|
||||
return
|
||||
|
||||
if isinstance(obj, PDFStream):
|
||||
if codec == "raw":
|
||||
# Bug: writing bytes to text I/O. This will raise TypeError.
|
||||
out.write(obj.get_rawdata()) # type: ignore [arg-type]
|
||||
elif codec == "binary":
|
||||
# Bug: writing bytes to text I/O. This will raise TypeError.
|
||||
out.write(obj.get_data()) # type: ignore [arg-type]
|
||||
else:
|
||||
out.write("<stream>\n<props>\n")
|
||||
dumpxml(out, obj.attrs)
|
||||
out.write("\n</props>\n")
|
||||
if codec == "text":
|
||||
data = obj.get_data()
|
||||
out.write('<data size="%d">%s</data>\n' % (len(data), escape(data)))
|
||||
out.write("</stream>")
|
||||
return
|
||||
|
||||
if isinstance(obj, PDFObjRef):
|
||||
out.write('<ref id="%d" />' % obj.objid)
|
||||
return
|
||||
|
||||
if isinstance(obj, PSKeyword):
|
||||
# Likely bug: obj.name is bytes, not str
|
||||
out.write("<keyword>%s</keyword>" % obj.name) # type: ignore [str-bytes-safe]
|
||||
return
|
||||
|
||||
if isinstance(obj, PSLiteral):
|
||||
# Likely bug: obj.name may be bytes, not str
|
||||
out.write("<literal>%s</literal>" % obj.name) # type: ignore [str-bytes-safe]
|
||||
return
|
||||
|
||||
if isnumber(obj):
|
||||
out.write("<number>%s</number>" % obj)
|
||||
return
|
||||
|
||||
raise TypeError(obj)
|
||||
|
||||
|
||||
def dumptrailers(
|
||||
out: TextIO, doc: PDFDocument, show_fallback_xref: bool = False
|
||||
) -> None:
|
||||
for xref in doc.xrefs:
|
||||
if not isinstance(xref, PDFXRefFallback) or show_fallback_xref:
|
||||
out.write("<trailer>\n")
|
||||
dumpxml(out, xref.get_trailer())
|
||||
out.write("\n</trailer>\n\n")
|
||||
no_xrefs = all(isinstance(xref, PDFXRefFallback) for xref in doc.xrefs)
|
||||
if no_xrefs and not show_fallback_xref:
|
||||
msg = (
|
||||
"This PDF does not have an xref. Use --show-fallback-xref if "
|
||||
"you want to display the content of a fallback xref that "
|
||||
"contains all objects."
|
||||
)
|
||||
logger.warning(msg)
|
||||
return
|
||||
|
||||
|
||||
def dumpallobjs(
|
||||
out: TextIO,
|
||||
doc: PDFDocument,
|
||||
codec: Optional[str] = None,
|
||||
show_fallback_xref: bool = False,
|
||||
) -> None:
|
||||
visited = set()
|
||||
out.write("<pdf>")
|
||||
for xref in doc.xrefs:
|
||||
for objid in xref.get_objids():
|
||||
if objid in visited:
|
||||
continue
|
||||
visited.add(objid)
|
||||
try:
|
||||
obj = doc.getobj(objid)
|
||||
if obj is None:
|
||||
continue
|
||||
out.write('<object id="%d">\n' % objid)
|
||||
dumpxml(out, obj, codec=codec)
|
||||
out.write("\n</object>\n\n")
|
||||
except PDFObjectNotFound as e:
|
||||
print("not found: %r" % e)
|
||||
dumptrailers(out, doc, show_fallback_xref)
|
||||
out.write("</pdf>")
|
||||
return
|
||||
|
||||
|
||||
def dumpoutline(
|
||||
outfp: TextIO,
|
||||
fname: str,
|
||||
objids: Any,
|
||||
pagenos: Container[int],
|
||||
password: str = "",
|
||||
dumpall: bool = False,
|
||||
codec: Optional[str] = None,
|
||||
extractdir: Optional[str] = None,
|
||||
) -> None:
|
||||
fp = open(fname, "rb")
|
||||
parser = PDFParser(fp)
|
||||
doc = PDFDocument(parser, password)
|
||||
pages = {
|
||||
page.pageid: pageno
|
||||
for (pageno, page) in enumerate(PDFPage.create_pages(doc), 1)
|
||||
}
|
||||
|
||||
def resolve_dest(dest: object) -> Any:
|
||||
if isinstance(dest, (str, bytes)):
|
||||
dest = resolve1(doc.get_dest(dest))
|
||||
elif isinstance(dest, PSLiteral):
|
||||
dest = resolve1(doc.get_dest(dest.name))
|
||||
if isinstance(dest, dict):
|
||||
dest = dest["D"]
|
||||
if isinstance(dest, PDFObjRef):
|
||||
dest = dest.resolve()
|
||||
return dest
|
||||
|
||||
try:
|
||||
outlines = doc.get_outlines()
|
||||
outfp.write("<outlines>\n")
|
||||
for (level, title, dest, a, se) in outlines:
|
||||
pageno = None
|
||||
if dest:
|
||||
dest = resolve_dest(dest)
|
||||
pageno = pages[dest[0].objid]
|
||||
elif a:
|
||||
action = a
|
||||
if isinstance(action, dict):
|
||||
subtype = action.get("S")
|
||||
if subtype and repr(subtype) == "/'GoTo'" and action.get("D"):
|
||||
dest = resolve_dest(action["D"])
|
||||
pageno = pages[dest[0].objid]
|
||||
s = escape(title)
|
||||
outfp.write('<outline level="{!r}" title="{}">\n'.format(level, s))
|
||||
if dest is not None:
|
||||
outfp.write("<dest>")
|
||||
dumpxml(outfp, dest)
|
||||
outfp.write("</dest>\n")
|
||||
if pageno is not None:
|
||||
outfp.write("<pageno>%r</pageno>\n" % pageno)
|
||||
outfp.write("</outline>\n")
|
||||
outfp.write("</outlines>\n")
|
||||
except PDFNoOutlines:
|
||||
pass
|
||||
parser.close()
|
||||
fp.close()
|
||||
return
|
||||
|
||||
|
||||
LITERAL_FILESPEC = LIT("Filespec")
|
||||
LITERAL_EMBEDDEDFILE = LIT("EmbeddedFile")
|
||||
|
||||
|
||||
def extractembedded(fname: str, password: str, extractdir: str) -> None:
|
||||
def extract1(objid: int, obj: Dict[str, Any]) -> None:
|
||||
filename = os.path.basename(obj.get("UF") or cast(bytes, obj.get("F")).decode())
|
||||
fileref = obj["EF"].get("UF") or obj["EF"].get("F")
|
||||
fileobj = doc.getobj(fileref.objid)
|
||||
if not isinstance(fileobj, PDFStream):
|
||||
error_msg = (
|
||||
"unable to process PDF: reference for %r is not a "
|
||||
"PDFStream" % filename
|
||||
)
|
||||
raise PDFValueError(error_msg)
|
||||
if fileobj.get("Type") is not LITERAL_EMBEDDEDFILE:
|
||||
raise PDFValueError(
|
||||
"unable to process PDF: reference for %r "
|
||||
"is not an EmbeddedFile" % (filename)
|
||||
)
|
||||
path = os.path.join(extractdir, "%.6d-%s" % (objid, filename))
|
||||
if os.path.exists(path):
|
||||
raise IOError("file exists: %r" % path)
|
||||
print("extracting: %r" % path)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
out = open(path, "wb")
|
||||
out.write(fileobj.get_data())
|
||||
out.close()
|
||||
return
|
||||
|
||||
with open(fname, "rb") as fp:
|
||||
parser = PDFParser(fp)
|
||||
doc = PDFDocument(parser, password)
|
||||
extracted_objids = set()
|
||||
for xref in doc.xrefs:
|
||||
for objid in xref.get_objids():
|
||||
obj = doc.getobj(objid)
|
||||
if (
|
||||
objid not in extracted_objids
|
||||
and isinstance(obj, dict)
|
||||
and obj.get("Type") is LITERAL_FILESPEC
|
||||
):
|
||||
extracted_objids.add(objid)
|
||||
extract1(objid, obj)
|
||||
return
|
||||
|
||||
|
||||
def dumppdf(
|
||||
outfp: TextIO,
|
||||
fname: str,
|
||||
objids: Iterable[int],
|
||||
pagenos: Container[int],
|
||||
password: str = "",
|
||||
dumpall: bool = False,
|
||||
codec: Optional[str] = None,
|
||||
extractdir: Optional[str] = None,
|
||||
show_fallback_xref: bool = False,
|
||||
) -> None:
|
||||
fp = open(fname, "rb")
|
||||
parser = PDFParser(fp)
|
||||
doc = PDFDocument(parser, password)
|
||||
if objids:
|
||||
for objid in objids:
|
||||
obj = doc.getobj(objid)
|
||||
dumpxml(outfp, obj, codec=codec)
|
||||
if pagenos:
|
||||
for (pageno, page) in enumerate(PDFPage.create_pages(doc)):
|
||||
if pageno in pagenos:
|
||||
if codec:
|
||||
for obj in page.contents:
|
||||
obj = stream_value(obj)
|
||||
dumpxml(outfp, obj, codec=codec)
|
||||
else:
|
||||
dumpxml(outfp, page.attrs)
|
||||
if dumpall:
|
||||
dumpallobjs(outfp, doc, codec, show_fallback_xref)
|
||||
if (not objids) and (not pagenos) and (not dumpall):
|
||||
dumptrailers(outfp, doc, show_fallback_xref)
|
||||
fp.close()
|
||||
if codec not in ("raw", "binary"):
|
||||
outfp.write("\n")
|
||||
return
|
||||
|
||||
|
||||
def create_parser() -> ArgumentParser:
|
||||
parser = ArgumentParser(description=__doc__, add_help=True)
|
||||
parser.add_argument(
|
||||
"files",
|
||||
type=str,
|
||||
default=None,
|
||||
nargs="+",
|
||||
help="One or more paths to PDF files.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
"-v",
|
||||
action="version",
|
||||
version="pdfminer.six v{}".format(pdfminer.__version__),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug",
|
||||
"-d",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Use debug logging level.",
|
||||
)
|
||||
procedure_parser = parser.add_mutually_exclusive_group()
|
||||
procedure_parser.add_argument(
|
||||
"--extract-toc",
|
||||
"-T",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Extract structure of outline",
|
||||
)
|
||||
procedure_parser.add_argument(
|
||||
"--extract-embedded", "-E", type=str, help="Extract embedded files"
|
||||
)
|
||||
|
||||
parse_params = parser.add_argument_group(
|
||||
"Parser", description="Used during PDF parsing"
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--page-numbers",
|
||||
type=int,
|
||||
default=None,
|
||||
nargs="+",
|
||||
help="A space-seperated list of page numbers to parse.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--pagenos",
|
||||
"-p",
|
||||
type=str,
|
||||
help="A comma-separated list of page numbers to parse. Included for "
|
||||
"legacy applications, use --page-numbers for more idiomatic "
|
||||
"argument entry.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--objects",
|
||||
"-i",
|
||||
type=str,
|
||||
help="Comma separated list of object numbers to extract",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--all",
|
||||
"-a",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If the structure of all objects should be extracted",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--show-fallback-xref",
|
||||
action="store_true",
|
||||
help="Additionally show the fallback xref. Use this if the PDF "
|
||||
"has zero or only invalid xref's. This setting is ignored if "
|
||||
"--extract-toc or --extract-embedded is used.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--password",
|
||||
"-P",
|
||||
type=str,
|
||||
default="",
|
||||
help="The password to use for decrypting PDF file.",
|
||||
)
|
||||
|
||||
output_params = parser.add_argument_group(
|
||||
"Output", description="Used during output generation."
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--outfile",
|
||||
"-o",
|
||||
type=str,
|
||||
default="-",
|
||||
help='Path to file where output is written. Or "-" (default) to '
|
||||
"write to stdout.",
|
||||
)
|
||||
codec_parser = output_params.add_mutually_exclusive_group()
|
||||
codec_parser.add_argument(
|
||||
"--raw-stream",
|
||||
"-r",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Write stream objects without encoding",
|
||||
)
|
||||
codec_parser.add_argument(
|
||||
"--binary-stream",
|
||||
"-b",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Write stream objects with binary encoding",
|
||||
)
|
||||
codec_parser.add_argument(
|
||||
"--text-stream",
|
||||
"-t",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Write stream objects as plain text",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main(argv: Optional[List[str]] = None) -> None:
|
||||
parser = create_parser()
|
||||
args = parser.parse_args(args=argv)
|
||||
|
||||
if args.debug:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
if args.outfile == "-":
|
||||
outfp = sys.stdout
|
||||
else:
|
||||
outfp = open(args.outfile, "w")
|
||||
|
||||
if args.objects:
|
||||
objids = [int(x) for x in args.objects.split(",")]
|
||||
else:
|
||||
objids = []
|
||||
|
||||
if args.page_numbers:
|
||||
pagenos = {x - 1 for x in args.page_numbers}
|
||||
elif args.pagenos:
|
||||
pagenos = {int(x) - 1 for x in args.pagenos.split(",")}
|
||||
else:
|
||||
pagenos = set()
|
||||
|
||||
password = args.password
|
||||
|
||||
if args.raw_stream:
|
||||
codec: Optional[str] = "raw"
|
||||
elif args.binary_stream:
|
||||
codec = "binary"
|
||||
elif args.text_stream:
|
||||
codec = "text"
|
||||
else:
|
||||
codec = None
|
||||
|
||||
for fname in args.files:
|
||||
if args.extract_toc:
|
||||
dumpoutline(
|
||||
outfp,
|
||||
fname,
|
||||
objids,
|
||||
pagenos,
|
||||
password=password,
|
||||
dumpall=args.all,
|
||||
codec=codec,
|
||||
extractdir=None,
|
||||
)
|
||||
elif args.extract_embedded:
|
||||
extractembedded(fname, password=password, extractdir=args.extract_embedded)
|
||||
else:
|
||||
dumppdf(
|
||||
outfp,
|
||||
fname,
|
||||
objids,
|
||||
pagenos,
|
||||
password=password,
|
||||
dumpall=args.all,
|
||||
codec=codec,
|
||||
extractdir=None,
|
||||
show_fallback_xref=args.show_fallback_xref,
|
||||
)
|
||||
|
||||
outfp.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
Scripts/futurize.exe
Normal file
BIN
Scripts/futurize.exe
Normal file
Binary file not shown.
BIN
Scripts/nltk.exe
Normal file
BIN
Scripts/nltk.exe
Normal file
Binary file not shown.
BIN
Scripts/normalizer.exe
Normal file
BIN
Scripts/normalizer.exe
Normal file
Binary file not shown.
BIN
Scripts/pasteurize.exe
Normal file
BIN
Scripts/pasteurize.exe
Normal file
Binary file not shown.
317
Scripts/pdf2txt.py
Normal file
317
Scripts/pdf2txt.py
Normal file
|
|
@ -0,0 +1,317 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
"""A command line tool for extracting text and images from PDF and
|
||||
output it to plain text, html, xml or tags."""
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Container, Iterable, List, Optional
|
||||
|
||||
import pdfminer.high_level
|
||||
from pdfminer.layout import LAParams
|
||||
from pdfminer.utils import AnyIO
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
OUTPUT_TYPES = ((".htm", "html"), (".html", "html"), (".xml", "xml"), (".tag", "tag"))
|
||||
|
||||
|
||||
def float_or_disabled(x: str) -> Optional[float]:
|
||||
if x.lower().strip() == "disabled":
|
||||
return None
|
||||
try:
|
||||
return float(x)
|
||||
except ValueError:
|
||||
raise argparse.ArgumentTypeError("invalid float value: {}".format(x))
|
||||
|
||||
|
||||
def extract_text(
|
||||
files: Iterable[str] = [],
|
||||
outfile: str = "-",
|
||||
laparams: Optional[LAParams] = None,
|
||||
output_type: str = "text",
|
||||
codec: str = "utf-8",
|
||||
strip_control: bool = False,
|
||||
maxpages: int = 0,
|
||||
page_numbers: Optional[Container[int]] = None,
|
||||
password: str = "",
|
||||
scale: float = 1.0,
|
||||
rotation: int = 0,
|
||||
layoutmode: str = "normal",
|
||||
output_dir: Optional[str] = None,
|
||||
debug: bool = False,
|
||||
disable_caching: bool = False,
|
||||
**kwargs: Any
|
||||
) -> AnyIO:
|
||||
if not files:
|
||||
raise ValueError("Must provide files to work upon!")
|
||||
|
||||
if output_type == "text" and outfile != "-":
|
||||
for override, alttype in OUTPUT_TYPES:
|
||||
if outfile.endswith(override):
|
||||
output_type = alttype
|
||||
|
||||
if outfile == "-":
|
||||
outfp: AnyIO = sys.stdout
|
||||
if sys.stdout.encoding is not None:
|
||||
codec = "utf-8"
|
||||
else:
|
||||
outfp = open(outfile, "wb")
|
||||
|
||||
for fname in files:
|
||||
with open(fname, "rb") as fp:
|
||||
pdfminer.high_level.extract_text_to_fp(fp, **locals())
|
||||
return outfp
|
||||
|
||||
|
||||
def create_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
|
||||
parser.add_argument(
|
||||
"files",
|
||||
type=str,
|
||||
default=None,
|
||||
nargs="+",
|
||||
help="One or more paths to PDF files.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
"-v",
|
||||
action="version",
|
||||
version="pdfminer.six v{}".format(pdfminer.__version__),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug",
|
||||
"-d",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Use debug logging level.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--disable-caching",
|
||||
"-C",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If caching or resources, such as fonts, should be disabled.",
|
||||
)
|
||||
|
||||
parse_params = parser.add_argument_group(
|
||||
"Parser", description="Used during PDF parsing"
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--page-numbers",
|
||||
type=int,
|
||||
default=None,
|
||||
nargs="+",
|
||||
help="A space-seperated list of page numbers to parse.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--pagenos",
|
||||
"-p",
|
||||
type=str,
|
||||
help="A comma-separated list of page numbers to parse. "
|
||||
"Included for legacy applications, use --page-numbers "
|
||||
"for more idiomatic argument entry.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--maxpages",
|
||||
"-m",
|
||||
type=int,
|
||||
default=0,
|
||||
help="The maximum number of pages to parse.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--password",
|
||||
"-P",
|
||||
type=str,
|
||||
default="",
|
||||
help="The password to use for decrypting PDF file.",
|
||||
)
|
||||
parse_params.add_argument(
|
||||
"--rotation",
|
||||
"-R",
|
||||
default=0,
|
||||
type=int,
|
||||
help="The number of degrees to rotate the PDF "
|
||||
"before other types of processing.",
|
||||
)
|
||||
|
||||
la_params = LAParams() # will be used for defaults
|
||||
la_param_group = parser.add_argument_group(
|
||||
"Layout analysis", description="Used during layout analysis."
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--no-laparams",
|
||||
"-n",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="If layout analysis parameters should be ignored.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--detect-vertical",
|
||||
"-V",
|
||||
default=la_params.detect_vertical,
|
||||
action="store_true",
|
||||
help="If vertical text should be considered during layout analysis",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--line-overlap",
|
||||
type=float,
|
||||
default=la_params.line_overlap,
|
||||
help="If two characters have more overlap than this they "
|
||||
"are considered to be on the same line. The overlap is specified "
|
||||
"relative to the minimum height of both characters.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--char-margin",
|
||||
"-M",
|
||||
type=float,
|
||||
default=la_params.char_margin,
|
||||
help="If two characters are closer together than this margin they "
|
||||
"are considered to be part of the same line. The margin is "
|
||||
"specified relative to the width of the character.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--word-margin",
|
||||
"-W",
|
||||
type=float,
|
||||
default=la_params.word_margin,
|
||||
help="If two characters on the same line are further apart than this "
|
||||
"margin then they are considered to be two separate words, and "
|
||||
"an intermediate space will be added for readability. The margin "
|
||||
"is specified relative to the width of the character.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--line-margin",
|
||||
"-L",
|
||||
type=float,
|
||||
default=la_params.line_margin,
|
||||
help="If two lines are close together they are considered to "
|
||||
"be part of the same paragraph. The margin is specified "
|
||||
"relative to the height of a line.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--boxes-flow",
|
||||
"-F",
|
||||
type=float_or_disabled,
|
||||
default=la_params.boxes_flow,
|
||||
help="Specifies how much a horizontal and vertical position of a "
|
||||
"text matters when determining the order of lines. The value "
|
||||
"should be within the range of -1.0 (only horizontal position "
|
||||
"matters) to +1.0 (only vertical position matters). You can also "
|
||||
"pass `disabled` to disable advanced layout analysis, and "
|
||||
"instead return text based on the position of the bottom left "
|
||||
"corner of the text box.",
|
||||
)
|
||||
la_param_group.add_argument(
|
||||
"--all-texts",
|
||||
"-A",
|
||||
default=la_params.all_texts,
|
||||
action="store_true",
|
||||
help="If layout analysis should be performed on text in figures.",
|
||||
)
|
||||
|
||||
output_params = parser.add_argument_group(
|
||||
"Output", description="Used during output generation."
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--outfile",
|
||||
"-o",
|
||||
type=str,
|
||||
default="-",
|
||||
help="Path to file where output is written. "
|
||||
'Or "-" (default) to write to stdout.',
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--output_type",
|
||||
"-t",
|
||||
type=str,
|
||||
default="text",
|
||||
help="Type of output to generate {text,html,xml,tag}.",
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--codec",
|
||||
"-c",
|
||||
type=str,
|
||||
default="utf-8",
|
||||
help="Text encoding to use in output file.",
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--output-dir",
|
||||
"-O",
|
||||
default=None,
|
||||
help="The output directory to put extracted images in. If not given, "
|
||||
"images are not extracted.",
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--layoutmode",
|
||||
"-Y",
|
||||
default="normal",
|
||||
type=str,
|
||||
help="Type of layout to use when generating html "
|
||||
"{normal,exact,loose}. If normal,each line is"
|
||||
" positioned separately in the html. If exact"
|
||||
", each character is positioned separately in"
|
||||
" the html. If loose, same result as normal "
|
||||
"but with an additional newline after each "
|
||||
"text line. Only used when output_type is html.",
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--scale",
|
||||
"-s",
|
||||
type=float,
|
||||
default=1.0,
|
||||
help="The amount of zoom to use when generating html file. "
|
||||
"Only used when output_type is html.",
|
||||
)
|
||||
output_params.add_argument(
|
||||
"--strip-control",
|
||||
"-S",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Remove control statement from text. "
|
||||
"Only used when output_type is xml.",
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def parse_args(args: Optional[List[str]]) -> argparse.Namespace:
|
||||
parsed_args = create_parser().parse_args(args=args)
|
||||
|
||||
# Propagate parsed layout parameters to LAParams object
|
||||
if parsed_args.no_laparams:
|
||||
parsed_args.laparams = None
|
||||
else:
|
||||
parsed_args.laparams = LAParams(
|
||||
line_overlap=parsed_args.line_overlap,
|
||||
char_margin=parsed_args.char_margin,
|
||||
line_margin=parsed_args.line_margin,
|
||||
word_margin=parsed_args.word_margin,
|
||||
boxes_flow=parsed_args.boxes_flow,
|
||||
detect_vertical=parsed_args.detect_vertical,
|
||||
all_texts=parsed_args.all_texts,
|
||||
)
|
||||
|
||||
if parsed_args.page_numbers:
|
||||
parsed_args.page_numbers = {x - 1 for x in parsed_args.page_numbers}
|
||||
|
||||
if parsed_args.pagenos:
|
||||
parsed_args.page_numbers = {int(x) - 1 for x in parsed_args.pagenos.split(",")}
|
||||
|
||||
if parsed_args.output_type == "text" and parsed_args.outfile != "-":
|
||||
for override, alttype in OUTPUT_TYPES:
|
||||
if parsed_args.outfile.endswith(override):
|
||||
parsed_args.output_type = alttype
|
||||
|
||||
return parsed_args
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
parsed_args = parse_args(args)
|
||||
outfp = extract_text(**vars(parsed_args))
|
||||
outfp.close()
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
BIN
Scripts/pybabel.exe
Normal file
BIN
Scripts/pybabel.exe
Normal file
Binary file not shown.
BIN
Scripts/pygmentize.exe
Normal file
BIN
Scripts/pygmentize.exe
Normal file
Binary file not shown.
23
Scripts/rst2html.py
Normal file
23
Scripts/rst2html.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2html.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML documents from standalone reStructuredText '
|
||||
'sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='html', description=description)
|
||||
26
Scripts/rst2html4.py
Normal file
26
Scripts/rst2html4.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2html4.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing (X)HTML.
|
||||
|
||||
The output conforms to XHTML 1.0 transitional
|
||||
and almost to HTML 4.01 transitional (except for closing empty tags).
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML documents from standalone reStructuredText '
|
||||
'sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='html4', description=description)
|
||||
33
Scripts/rst2html5.py
Normal file
33
Scripts/rst2html5.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
# :Copyright: © 2015 Günter Milde.
|
||||
# :License: Released under the terms of the `2-Clause BSD license`_, in short:
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification,
|
||||
# are permitted in any medium without royalty provided the copyright
|
||||
# notice and this notice are preserved.
|
||||
# This file is offered as-is, without any warranty.
|
||||
#
|
||||
# .. _2-Clause BSD license: https://opensource.org/licenses/BSD-2-Clause
|
||||
#
|
||||
# Revision: $Revision: 9021 $
|
||||
# Date: $Date: 2022-03-04 16:54:22 +0100 (Fr, 04. Mär 2022) $
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML 5 documents.
|
||||
|
||||
The output is also valid XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale # module missing in Jython
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except locale.Error:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
description = ('Generates HTML5 documents from standalone '
|
||||
'reStructuredText sources.\n'
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(writer_name='html5', description=description)
|
||||
26
Scripts/rst2latex.py
Normal file
26
Scripts/rst2latex.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2latex.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing LaTeX.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline
|
||||
|
||||
description = ('Generates LaTeX documents from standalone reStructuredText '
|
||||
'sources. '
|
||||
'Reads from <source> (default is stdin) and writes to '
|
||||
'<destination> (default is stdout). See '
|
||||
'<https://docutils.sourceforge.io/docs/user/latex.html> for '
|
||||
'the full reference.')
|
||||
|
||||
publish_cmdline(writer_name='latex', description=description)
|
||||
27
Scripts/rst2man.py
Normal file
27
Scripts/rst2man.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# Author:
|
||||
# Contact: grubert@users.sf.net
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
man.py
|
||||
======
|
||||
|
||||
This module provides a simple command line interface that uses the
|
||||
man page writer to output from ReStructuredText source.
|
||||
"""
|
||||
|
||||
import locale
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
from docutils.writers import manpage
|
||||
|
||||
description = ("Generates plain unix manual documents. "
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(writer=manpage.Writer(), description=description)
|
||||
28
Scripts/rst2odt.py
Normal file
28
Scripts/rst2odt.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2odt.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Dave Kuhlman <dkuhlman@rexx.com>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A front end to the Docutils Publisher, producing OpenOffice documents.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline_to_binary, default_description
|
||||
from docutils.writers.odf_odt import Writer, Reader
|
||||
|
||||
|
||||
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
|
||||
'standalone reStructuredText sources. ' + default_description)
|
||||
|
||||
|
||||
writer = Writer()
|
||||
reader = Reader()
|
||||
output = publish_cmdline_to_binary(reader=reader, writer=writer,
|
||||
description=description)
|
||||
20
Scripts/rst2odt_prepstyles.py
Normal file
20
Scripts/rst2odt_prepstyles.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
Adapt a word-processor-generated styles.odt for odtwriter use:
|
||||
|
||||
Drop page size specifications from styles.xml in STYLE_FILE.odt.
|
||||
See https://docutils.sourceforge.io/docs/user/odt.html#page-size
|
||||
|
||||
Provisional backwards compatibility stub (to be removed in Docutils >= 0.21).
|
||||
|
||||
The actual code moved to the "docutils" library package and can be started
|
||||
with ``python -m docutils.writers.odf_odt.prepstyles``.
|
||||
"""
|
||||
|
||||
from docutils.writers.odf_odt import prepstyles
|
||||
|
||||
if __name__ == '__main__':
|
||||
prepstyles.main()
|
||||
23
Scripts/rst2pseudoxml.py
Normal file
23
Scripts/rst2pseudoxml.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2pseudoxml.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing pseudo-XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates pseudo-XML from standalone reStructuredText '
|
||||
'sources (for testing purposes). ' + default_description)
|
||||
|
||||
publish_cmdline(description=description)
|
||||
24
Scripts/rst2s5.py
Normal file
24
Scripts/rst2s5.py
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2s5.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Chris Liechti <cliechti@gmx.net>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML slides using
|
||||
the S5 template system.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates S5 (X)HTML slideshow documents from standalone '
|
||||
'reStructuredText sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='s5', description=description)
|
||||
27
Scripts/rst2xetex.py
Normal file
27
Scripts/rst2xetex.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2xetex.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: Guenter Milde
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing Lua/XeLaTeX code.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline
|
||||
|
||||
description = ('Generates LaTeX documents from standalone reStructuredText '
|
||||
'sources for compilation with the Unicode-aware TeX variants '
|
||||
'XeLaTeX or LuaLaTeX. '
|
||||
'Reads from <source> (default is stdin) and writes to '
|
||||
'<destination> (default is stdout). See '
|
||||
'<https://docutils.sourceforge.io/docs/user/latex.html> for '
|
||||
'the full reference.')
|
||||
|
||||
publish_cmdline(writer_name='xetex', description=description)
|
||||
23
Scripts/rst2xml.py
Normal file
23
Scripts/rst2xml.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rst2xml.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing Docutils XML.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates Docutils-native XML from standalone '
|
||||
'reStructuredText sources. ' + default_description)
|
||||
|
||||
publish_cmdline(writer_name='xml', description=description)
|
||||
25
Scripts/rstpep2html.py
Normal file
25
Scripts/rstpep2html.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
#!E:\Studia\Ogólnonaukowe\transport_exclusion\transport-accessibility\Scripts\python.exe
|
||||
|
||||
# $Id: rstpep2html.py 9115 2022-07-28 17:06:24Z milde $
|
||||
# Author: David Goodger <goodger@python.org>
|
||||
# Copyright: This module has been placed in the public domain.
|
||||
|
||||
"""
|
||||
A minimal front end to the Docutils Publisher, producing HTML from PEP
|
||||
(Python Enhancement Proposal) documents.
|
||||
"""
|
||||
|
||||
try:
|
||||
import locale
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
|
||||
|
||||
description = ('Generates (X)HTML from reStructuredText-format PEP files. '
|
||||
+ default_description)
|
||||
|
||||
publish_cmdline(reader_name='pep', writer_name='pep_html',
|
||||
description=description)
|
||||
BIN
Scripts/sphinx-apidoc.exe
Normal file
BIN
Scripts/sphinx-apidoc.exe
Normal file
Binary file not shown.
BIN
Scripts/sphinx-autobuild.exe
Normal file
BIN
Scripts/sphinx-autobuild.exe
Normal file
Binary file not shown.
BIN
Scripts/sphinx-autogen.exe
Normal file
BIN
Scripts/sphinx-autogen.exe
Normal file
Binary file not shown.
BIN
Scripts/sphinx-build.exe
Normal file
BIN
Scripts/sphinx-build.exe
Normal file
Binary file not shown.
BIN
Scripts/sphinx-quickstart.exe
Normal file
BIN
Scripts/sphinx-quickstart.exe
Normal file
Binary file not shown.
BIN
Scripts/tqdm.exe
Normal file
BIN
Scripts/tqdm.exe
Normal file
Binary file not shown.
BIN
Scripts/uvicorn.exe
Normal file
BIN
Scripts/uvicorn.exe
Normal file
Binary file not shown.
BIN
Scripts/watchfiles.exe
Normal file
BIN
Scripts/watchfiles.exe
Normal file
Binary file not shown.
|
|
@ -37,6 +37,11 @@
|
|||
max-height: 60vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.route-list {
|
||||
max-height: 400px;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
|
@ -51,6 +56,7 @@
|
|||
<h5 id="currentGTFS">
|
||||
Current GTFS
|
||||
<h6 id="newShape">
|
||||
<button class="list-group-item list-group-item-action" onclick="drawNewShape()">Draw New Shape</button>
|
||||
<button class="list-group-item list-group-item-action" onclick="addNewShape()">Add New Shape</button>
|
||||
</h6>
|
||||
<h6 id="chosenShape">
|
||||
|
|
@ -97,6 +103,11 @@
|
|||
<script src="https://unpkg.com/leaflet-routing-machine/dist/leaflet-routing-machine.js"></script>
|
||||
<script src="https://unpkg.com/leaflet-control-geocoder/dist/Control.Geocoder.js"></script>
|
||||
<script src="https://unpkg.com/leaflet-draw/dist/leaflet.draw.js"></script>
|
||||
<!-- Scripts with helper functions -->
|
||||
<script src="sidebar.js"></script>
|
||||
<script src="shapes_on_map.js"></script>
|
||||
<script src="parsing_files.js"></script>
|
||||
|
||||
<script>
|
||||
// Initialize the map centered on Toruń, Poland
|
||||
var map = L.map('map').setView([53.0138, 18.5984], 13);
|
||||
|
|
@ -106,222 +117,20 @@
|
|||
maxZoom: 19,
|
||||
}).addTo(map);
|
||||
|
||||
// Assuming route is an array of objects with lon and lat properties
|
||||
// Given route coordinates
|
||||
// Given route coordinates
|
||||
const route = [
|
||||
{ lat: 53.015498, lon: 18.548179, stop: true },
|
||||
{ lat: 53.0155061509397, lon: 18.5483665019274, stop: false },
|
||||
{ lat: 53.0154795263022, lon: 18.5488848388195, stop: false },
|
||||
{ lat: 53.0153786752536, lon: 18.5508066415787, stop: false },
|
||||
{ lat: 53.0152608809305, lon: 18.5528719425201, stop: false },
|
||||
{ lat: 53.0152253812084, lon: 18.5534271597862, stop: false },
|
||||
{ lat: 53.0150269049514, lon: 18.5555809736252, stop: false },
|
||||
{ lat: 53.014975, lon: 18.55602, stop: false },
|
||||
{ lat: 53.0149397687454, lon: 18.5565304756165, stop: false },
|
||||
{ lat: 53.0148816778436, lon: 18.5571071505547, stop: false },
|
||||
{ lat: 53.0148155186657, lon: 18.5575430095196, stop: false },
|
||||
{ lat: 53.014707404669, lon: 18.558078110218, stop: false },
|
||||
{ lat: 53.0145960631056, lon: 18.5584764182568, stop: false },
|
||||
{ lat: 53.0144782666461, lon: 18.5588613152504, stop: false },
|
||||
{ lat: 53.0143330376918, lon: 18.559333384037, stop: false },
|
||||
{ lat: 53.0141047045143, lon: 18.5600320994854, stop: false },
|
||||
{ lat: 53.0140401578139, lon: 18.5602413117886, stop: false },
|
||||
{ lat: 53.013946968345, lon: 18.5605558007956, stop: false },
|
||||
{ lat: 53.01379, lon: 18.56118, stop: false },
|
||||
{ lat: 53.0137287186281, lon: 18.5616239905357, stop: false },
|
||||
{ lat: 53.0136657850481, lon: 18.5622046887875, stop: false },
|
||||
{ lat: 53.0136310908558, lon: 18.5627853870392, stop: false },
|
||||
{ lat: 53.0136302840138, lon: 18.5634854435921, stop: false },
|
||||
{ lat: 53.0136294771718, lon: 18.5643585026264, stop: false },
|
||||
{ lat: 53.0136359319076, lon: 18.5656929016113, stop: false },
|
||||
{ lat: 53.013623, lon: 18.566067, stop: false },
|
||||
{ lat: 53.0136472276929, lon: 18.5697859525681, stop: false },
|
||||
{ lat: 53.013666, lon: 18.570766, stop: false },
|
||||
{ lat: 53.0136940244864, lon: 18.5711324214935, stop: false },
|
||||
{ lat: 53.0137174228641, lon: 18.5718646645546, stop: false },
|
||||
{ lat: 53.0137279117879, lon: 18.5728490352631, stop: false },
|
||||
{ lat: 53.0137569580252, lon: 18.5748016834259, stop: false },
|
||||
{ lat: 53.0138279598562, lon: 18.5799622535706, stop: false },
|
||||
{ lat: 53.0138271530179, lon: 18.5806059837341, stop: false },
|
||||
{ lat: 53.013829, lon: 18.581098, stop: false },
|
||||
{ lat: 53.0138465171337, lon: 18.5817566514015, stop: false },
|
||||
{ lat: 53.0138731427787, lon: 18.5820704698563, stop: false },
|
||||
{ lat: 53.013915905144, lon: 18.5823293030262, stop: false },
|
||||
{ lat: 53.0139990092421, lon: 18.5826712846756, stop: false },
|
||||
{ lat: 53.0141377846607, lon: 18.5830146074295, stop: false },
|
||||
{ lat: 53.0143023781834, lon: 18.5833364725113, stop: false },
|
||||
{ lat: 53.0147412911747, lon: 18.5841679573059, stop: false },
|
||||
{ lat: 53.0149058823955, lon: 18.5845327377319, stop: false },
|
||||
{ lat: 53.0149736550686, lon: 18.5848170518875, stop: false },
|
||||
{ lat: 53.0150059277327, lon: 18.5851013660431, stop: false },
|
||||
{ lat: 53.0149768823361, lon: 18.5854822397232, stop: false },
|
||||
{ lat: 53.0149381551103, lon: 18.5858201980591, stop: false },
|
||||
{ lat: 53.0148832914808, lon: 18.5861045122147, stop: false },
|
||||
{ lat: 53.0147412911747, lon: 18.5865873098373, stop: false },
|
||||
{ lat: 53.0144766529938, lon: 18.5873705148697, stop: false },
|
||||
{ lat: 53.0143185147695, lon: 18.5879445075989, stop: false },
|
||||
{ lat: 53.0142475137456, lon: 18.588320016861, stop: false },
|
||||
{ lat: 53.0142216951625, lon: 18.5889208316803, stop: false },
|
||||
{ lat: 53.014231377133, lon: 18.5903531312943, stop: false },
|
||||
{ lat: 53.014179739932, lon: 18.591029047966, stop: false },
|
||||
{ lat: 53.014093, lon: 18.591931, stop: false },
|
||||
{ lat: 53.0140635560039, lon: 18.5925793647766, stop: false },
|
||||
{ lat: 53.0138634607279, lon: 18.5950094461441, stop: false },
|
||||
{ lat: 53.0137860042429, lon: 18.5960957407951, stop: false },
|
||||
{ lat: 53.0137884247602, lon: 18.5964578390121, stop: false },
|
||||
{ lat: 53.013811016248, lon: 18.5968963801861, stop: false },
|
||||
{ lat: 53.013823118826, lon: 18.5972410440445, stop: false },
|
||||
{ lat: 53.0138626538902, lon: 18.5976192355156, stop: false },
|
||||
{ lat: 53.0138743530349, lon: 18.5977774858475, stop: false },
|
||||
{ lat: 53.0138929102925, lon: 18.5980108380318, stop: false },
|
||||
{ lat: 53.0139522127793, lon: 18.5982844233513, stop: false },
|
||||
{ lat: 53.0140054639226, lon: 18.5985177755356, stop: false },
|
||||
{ lat: 53.0141829672589, lon: 18.5990703105927, stop: false },
|
||||
{ lat: 53.0145186079356, lon: 18.5998964309692, stop: false },
|
||||
{ lat: 53.014776791295, lon: 18.600577712059, stop: false },
|
||||
{ lat: 53.0149026551227, lon: 18.6009210348129, stop: false },
|
||||
{ lat: 53.0150091549978, lon: 18.6013180017471, stop: false },
|
||||
{ lat: 53.015039, lon: 18.601521, stop: false },
|
||||
{ lat: 53.0150914501758, lon: 18.601650595665, stop: false },
|
||||
{ lat: 53.0151769724494, lon: 18.6019402742386, stop: false },
|
||||
{ lat: 53.0153157440799, lon: 18.6025303602219, stop: false },
|
||||
{ lat: 53.0159063253302, lon: 18.6050033569336, stop: false },
|
||||
{ lat: 53.0159595740621, lon: 18.6051052808762, stop: false },
|
||||
{ lat: 53.016212908038, lon: 18.6057087779045, stop: false },
|
||||
{ lat: 53.016392015875, lon: 18.6062023043633, stop: false },
|
||||
{ lat: 53.0167034358397, lon: 18.6071813106537, stop: false },
|
||||
{ lat: 53.0168970637, lon: 18.6077579855919, stop: false },
|
||||
{ lat: 53.0172342968156, lon: 18.6090561747551, stop: false },
|
||||
{ lat: 53.0178200112259, lon: 18.6114004254341, stop: false },
|
||||
{ lat: 53.017955, lon: 18.61257, stop: false },
|
||||
{ lat: 53.0180571990971, lon: 18.6132296919823, stop: false },
|
||||
{ lat: 53.0180975370407, lon: 18.6134120821953, stop: false },
|
||||
{ lat: 53.0182330722551, lon: 18.6135971546173, stop: false },
|
||||
{ lat: 53.0183702205505, lon: 18.6136749386787, stop: false },
|
||||
{ lat: 53.0185638409312, lon: 18.6137366294861, stop: false },
|
||||
{ lat: 53.018813932637, lon: 18.61372590065, stop: false },
|
||||
{ lat: 53.0190462746003, lon: 18.6136186122894, stop: false },
|
||||
{ lat: 53.0198610750697, lon: 18.6131680011749, stop: false },
|
||||
{ lat: 53.0206968346139, lon: 18.6127656698227, stop: false },
|
||||
{ lat: 53.020924326204, lon: 18.6126691102982, stop: false },
|
||||
{ lat: 53.021166337217, lon: 18.6126852035522, stop: false },
|
||||
{ lat: 53.0213776923911, lon: 18.6128380894661, stop: false },
|
||||
{ lat: 53.021561, lon: 18.61306, stop: false },
|
||||
{ lat: 53.0225732010204, lon: 18.6140289902687, stop: false },
|
||||
{ lat: 53.0226667751966, lon: 18.6141550540924, stop: false },
|
||||
{ lat: 53.0227329223366, lon: 18.6142757534981, stop: false },
|
||||
{ lat: 53.0228281095064, lon: 18.6145010590553, stop: false },
|
||||
{ lat: 53.0231007631186, lon: 18.6154532432556, stop: false },
|
||||
{ lat: 53.0233121088159, lon: 18.6160218715668, stop: false },
|
||||
{ lat: 53.0233766416468, lon: 18.616236448288, stop: false },
|
||||
{ lat: 53.0236105723499, lon: 18.617859184742, stop: false },
|
||||
{ lat: 53.023596, lon: 18.617925, stop: false },
|
||||
{ lat: 53.0236589716473, lon: 18.6182025074959, stop: false },
|
||||
{ lat: 53.0240042183936, lon: 18.6206004023552, stop: false },
|
||||
{ lat: 53.0243446225271, lon: 18.6229392886162, stop: false },
|
||||
{ lat: 53.024355, lon: 18.623137, stop: false },
|
||||
{ lat: 53.0244172202175, lon: 18.6234569549561, stop: false },
|
||||
{ lat: 53.0245866143533, lon: 18.6245834827423, stop: false },
|
||||
{ lat: 53.0248173119636, lon: 18.6261793971062, stop: false },
|
||||
{ lat: 53.0248770027528, lon: 18.6267426609993, stop: false },
|
||||
{ lat: 53.0249044282228, lon: 18.6272844672203, stop: false },
|
||||
{ lat: 53.0248866823325, lon: 18.6278986930847, stop: false },
|
||||
{ lat: 53.0248657099071, lon: 18.6282339692116, stop: false },
|
||||
{ lat: 53.0248156986979, lon: 18.6286067962646, stop: false },
|
||||
{ lat: 53.024689863786, lon: 18.629746735096, stop: false },
|
||||
{ lat: 53.0245382160966, lon: 18.6309456825256, stop: false },
|
||||
{ lat: 53.0243494623769, lon: 18.632450401783, stop: false },
|
||||
{ lat: 53.0243284896904, lon: 18.6327347159386, stop: false },
|
||||
{ lat: 53.0243510756601, lon: 18.6329787969589, stop: false },
|
||||
{ lat: 53.0244285131806, lon: 18.6332631111145, stop: false },
|
||||
{ lat: 53.0245382160966, lon: 18.6334374547005, stop: false },
|
||||
{ lat: 53.0248818425429, lon: 18.6338827013969, stop: false },
|
||||
{ lat: 53.0249608923709, lon: 18.6340570449829, stop: false },
|
||||
{ lat: 53.0252238530014, lon: 18.6343467235565, stop: false },
|
||||
{ lat: 53.0255368224599, lon: 18.6348643898964, stop: false },
|
||||
{ lat: 53.0261401695438, lon: 18.6358863115311, stop: false },
|
||||
{ lat: 53.0273194144766, lon: 18.6379784345627, stop: false },
|
||||
{ lat: 53.0284873352723, lon: 18.6399900913239, stop: false },
|
||||
{ lat: 53.0290357954512, lon: 18.640918135643, stop: false },
|
||||
{ lat: 53.0293326062866, lon: 18.6414679884911, stop: false },
|
||||
{ lat: 53.029613, lon: 18.642062, stop: false },
|
||||
{ lat: 53.0297358786052, lon: 18.6422833800316, stop: false },
|
||||
{ lat: 53.0298875080216, lon: 18.6425060033798, stop: false },
|
||||
{ lat: 53.030047202256, lon: 18.6426830291748, stop: false },
|
||||
{ lat: 53.0304585331672, lon: 18.6433508992195, stop: false },
|
||||
{ lat: 53.0312456966792, lon: 18.6447349190712, stop: false },
|
||||
{ lat: 53.0326973949821, lon: 18.6473366618156, stop: false },
|
||||
{ lat: 53.0329312751379, lon: 18.6478275060654, stop: false },
|
||||
{ lat: 53.033373224521, lon: 18.6488091945648, stop: false },
|
||||
{ lat: 53.0337700073347, lon: 18.6497667431831, stop: false },
|
||||
{ lat: 53.034585, lon: 18.652202, stop: false },
|
||||
{ lat: 53.0348103351817, lon: 18.6528244614601, stop: false },
|
||||
{ lat: 53.0350071075487, lon: 18.6533984541893, stop: false },
|
||||
{ lat: 53.0355458076316, lon: 18.6549916863441, stop: false },
|
||||
{ lat: 53.0360490183959, lon: 18.6564803123474, stop: false },
|
||||
{ lat: 53.0363490066345, lon: 18.6574995517731, stop: false },
|
||||
{ lat: 53.036634477375, lon: 18.658752143383, stop: false },
|
||||
{ lat: 53.036779631264, lon: 18.6595916748047, stop: false },
|
||||
{ lat: 53.036913494973, lon: 18.6604714393616, stop: false },
|
||||
{ lat: 53.0369796202662, lon: 18.6611768603325, stop: false },
|
||||
{ lat: 53.037042, lon: 18.662501, stop: false },
|
||||
{ lat: 53.037087678454, lon: 18.6630356311798, stop: false },
|
||||
{ lat: 53.0370844528404, lon: 18.6644732952118, stop: false },
|
||||
{ lat: 53.0370650991536, lon: 18.6653906106949, stop: false },
|
||||
{ lat: 53.0370538094989, lon: 18.6655354499817, stop: false },
|
||||
{ lat: 53.0369957483712, lon: 18.6657473444939, stop: false },
|
||||
{ lat: 53.0368941412094, lon: 18.6659109592438, stop: false },
|
||||
{ lat: 53.0367812440823, lon: 18.6660236120224, stop: false },
|
||||
{ lat: 53.0366635081919, lon: 18.6660584807396, stop: false },
|
||||
{ lat: 53.03626836484, lon: 18.6660531163216, stop: false },
|
||||
{ lat: 53.0360667596939, lon: 18.6660772562027, stop: false },
|
||||
{ lat: 53.035969988889, lon: 18.666130900383, stop: false },
|
||||
{ lat: 53.0359119263018, lon: 18.6662194132805, stop: false },
|
||||
{ lat: 53.0358716050147, lon: 18.666318655014, stop: false },
|
||||
{ lat: 53.0358522507835, lon: 18.6664044857025, stop: false },
|
||||
{ lat: 53.0358425736646, lon: 18.666487634182, stop: false },
|
||||
{ lat: 53.0358457993711, lon: 18.6665761470795, stop: false },
|
||||
{ lat: 53.0358490250774, lon: 18.6666512489319, stop: false },
|
||||
{ lat: 53.0358828949789, lon: 18.6667665839195, stop: false },
|
||||
{ lat: 53.0359409576052, lon: 18.6668872833252, stop: false },
|
||||
{ lat: 53.0360135357781, lon: 18.6669623851776, stop: false },
|
||||
{ lat: 53.0360909523612, lon: 18.6669918894768, stop: false },
|
||||
{ lat: 53.0361586917574, lon: 18.6669918894768, stop: false },
|
||||
{ lat: 53.0362425594339, lon: 18.6669623851776, stop: false },
|
||||
{ lat: 53.0363183627703, lon: 18.6668953299522, stop: false },
|
||||
{ lat: 53.0367167313055, lon: 18.6663025617599, stop: true },
|
||||
];
|
||||
|
||||
|
||||
const stops = route.filter(point => point.stop).map(point => [point.lat, point.lon]);
|
||||
|
||||
/*if (stops.length < 2) {
|
||||
console.error('Not enough stops to calculate route');
|
||||
} else {
|
||||
const coordinates = stops.map(stop => stop.join(',')).join(';');
|
||||
|
||||
axios.get(`http://router.project-osrm.org/route/v1/driving/${coordinates}?overview=full&geometries=geojson`)
|
||||
.then(response => {
|
||||
const route = response.data.routes[0];
|
||||
const routeLine = L.geoJSON(route.geometry).addTo(map);
|
||||
map.fitBounds(routeLine.getBounds());
|
||||
|
||||
stops.forEach(stop => {
|
||||
L.marker(stop).addTo(map);
|
||||
});
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching route from OSRM', error);
|
||||
});
|
||||
}*/
|
||||
|
||||
// Holder for data imported from the server.
|
||||
let imported_data = null;
|
||||
let routesData = [];
|
||||
let tripsData = [];
|
||||
let stopsData = [];
|
||||
let stopTimesData = [];
|
||||
// Holder of currently inspected shape ID value
|
||||
let currentShapeID = null;
|
||||
// Create empty dictionary for shapes to be displayed.
|
||||
let currentShapeIDs = {};
|
||||
currentShapeIDs["routes"] = [];
|
||||
currentShapeIDs["stops"] = [];
|
||||
// Create empty dictionary for shapes to be displayed on the map.
|
||||
let shapes = {};
|
||||
shapes["routes"] = {};
|
||||
shapes["stops"] = {};
|
||||
// Total number of newly defined Shapes
|
||||
// In a mature version the indexing has to be changed.
|
||||
let numNewShapes = 0;
|
||||
|
|
@ -330,7 +139,7 @@
|
|||
|
||||
// Setting up the drawing control
|
||||
|
||||
// FeatureGroup to store editable layers
|
||||
// FeatureGroup to store layers created by drawing
|
||||
var drawnItems = new L.FeatureGroup();
|
||||
map.addLayer(drawnItems);
|
||||
|
||||
|
|
@ -343,478 +152,13 @@
|
|||
draw: false
|
||||
});
|
||||
|
||||
// Polyline drawer that will be used
|
||||
// Polyline drawer that will be used for drawing on the map
|
||||
let polylineDrawer = new L.Draw.Polyline(map, drawControl.options.polyline);
|
||||
|
||||
document.getElementById('fileInput').addEventListener('change', handleFileSelect, false);
|
||||
|
||||
function handleFileSelect(event) {
|
||||
const files = event.target.files;
|
||||
const routesFile = Array.from(files).find(file => file.name === 'routes.txt');
|
||||
const tripsFile = Array.from(files).find(file => file.name === 'trips.txt');
|
||||
|
||||
if (routesFile && tripsFile) {
|
||||
// Read and parse the routes file
|
||||
const reader = new FileReader();
|
||||
reader.onload = function(e) {
|
||||
routesData = parseCSV(e.target.result);
|
||||
displayRouteIds(routesData);
|
||||
};
|
||||
reader.readAsText(routesFile);
|
||||
|
||||
// Read and parse the trips file
|
||||
const tripsReader = new FileReader();
|
||||
tripsReader.onload = function(e) {
|
||||
tripsData = parseCSV(e.target.result);
|
||||
};
|
||||
tripsReader.readAsText(tripsFile);
|
||||
} else {
|
||||
alert('Please select a valid GTFS folder containing routes.txt and trips.txt.');
|
||||
}
|
||||
|
||||
// Handle shapes file
|
||||
let shapesFile;
|
||||
for (let file of files) {
|
||||
if (file.name === 'shapes.txt') {
|
||||
shapesFile = file;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (shapesFile) {
|
||||
// Parse shapes.txt file and draw on the map
|
||||
parseShapesFile(shapesFile);
|
||||
} else {
|
||||
alert('shapes.txt file not found in the selected folder.');
|
||||
}
|
||||
}
|
||||
|
||||
function parseCSV(data) {
|
||||
const lines = data.split('\n');
|
||||
const headers = lines[0].split(',');
|
||||
return lines.slice(1).map(line => {
|
||||
const values = line.split(',');
|
||||
return headers.reduce((object, header, index) => {
|
||||
object[header] = values[index];
|
||||
return object;
|
||||
}, {});
|
||||
});
|
||||
}
|
||||
|
||||
function displayRouteIds(routes) {
|
||||
const currentRoutes = document.getElementById('currentRoutes');
|
||||
const routeList = document.createElement('div');
|
||||
routeList.className = 'list-group';
|
||||
routes.forEach(route => {
|
||||
const routeItem = document.createElement('button');
|
||||
routeItem.className = 'list-group-item list-group-item-action';
|
||||
routeItem.textContent = route.route_id;
|
||||
routeItem.onclick = () => showTrips(route.route_id);
|
||||
routeList.appendChild(routeItem);
|
||||
});
|
||||
currentRoutes.appendChild(routeList);
|
||||
}
|
||||
|
||||
function showTrips(routeId) {
|
||||
const trips = tripsData.filter(trip => trip.route_id === routeId);
|
||||
const tripsTable = document.getElementById('tripsTable');
|
||||
tripsTable.innerHTML = '';
|
||||
|
||||
if (trips.length > 0) {
|
||||
const headers = Object.keys(trips[0]);
|
||||
const headerRow = document.createElement('tr');
|
||||
headers.forEach(header => {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = header;
|
||||
headerRow.appendChild(th);
|
||||
});
|
||||
tripsTable.appendChild(headerRow);
|
||||
|
||||
trips.forEach(trip => {
|
||||
const row = document.createElement('tr');
|
||||
headers.forEach(header => {
|
||||
const cell = document.createElement('td');
|
||||
cell.textContent = trip[header];
|
||||
row.appendChild(cell);
|
||||
});
|
||||
tripsTable.appendChild(row);
|
||||
});
|
||||
} else {
|
||||
const noDataRow = document.createElement('tr');
|
||||
const noDataCell = document.createElement('td');
|
||||
noDataCell.colSpan = Object.keys(trips[0] || {}).length;
|
||||
noDataCell.textContent = 'No trips found for this route.';
|
||||
noDataRow.appendChild(noDataCell);
|
||||
tripsTable.appendChild(noDataRow);
|
||||
}
|
||||
|
||||
$('#tripsModal').modal('show');
|
||||
}
|
||||
|
||||
// Function to parse shapes.txt file and draw shapes on the map
|
||||
function parseShapesFile(file) {
|
||||
const reader = new FileReader();
|
||||
reader.onload = function(event) {
|
||||
const text = event.target.result;
|
||||
const lines = text.split('\n');
|
||||
const headers = lines[0].split(',');
|
||||
const shapeIdIndex = headers.indexOf('shape_id');
|
||||
const shapePtSequenceIndex = headers.indexOf('shape_pt_sequence');
|
||||
const shapePtLatIndex = headers.indexOf('shape_pt_lat');
|
||||
const shapePtLonIndex = headers.indexOf('shape_pt_lon');
|
||||
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line) {
|
||||
const columns = line.split(',');
|
||||
const shape_id = columns[shapeIdIndex];
|
||||
const shape_pt_sequence = parseInt(columns[shapePtSequenceIndex], 10);
|
||||
const shape_pt_lat = parseFloat(columns[shapePtLatIndex]);
|
||||
const shape_pt_lon = parseFloat(columns[shapePtLonIndex]);
|
||||
|
||||
if (!shapes[shape_id]) {
|
||||
shapes[shape_id] = [];
|
||||
}
|
||||
shapes[shape_id].push({
|
||||
sequence: shape_pt_sequence,
|
||||
lat: shape_pt_lat,
|
||||
lon: shape_pt_lon
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the shape points by sequence and draw them with click listeners
|
||||
for (let shape_id in shapes) {
|
||||
if (shapes.hasOwnProperty(shape_id)) {
|
||||
shapes[shape_id].sort((a, b) => a.sequence - b.sequence);
|
||||
const latlngs = shapes[shape_id].map(point => [point.lat, point.lon]);
|
||||
|
||||
addClickableShape (latlngs, shape_id)
|
||||
}
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
}
|
||||
|
||||
function addClickableShape (latlngs, shape_id) {
|
||||
// Create polyline and add to map
|
||||
const polyline = L.polyline(latlngs, { color: 'blue', weight: 5 }).addTo(map);
|
||||
|
||||
// Add click event listener to polyline
|
||||
polyline.on('click', function (e) {
|
||||
// Reset previous polyline to blue
|
||||
if (currentShapeID !== null) {
|
||||
shapes[currentShapeID].polyline.setStyle({ color: 'blue' });
|
||||
}
|
||||
// Set new currentShapeID value
|
||||
currentShapeID = shape_id;
|
||||
|
||||
// Highlight the clicked polyline
|
||||
polyline.setStyle({ color: 'red' }).bringToFront();
|
||||
|
||||
displayShapeOptions(shape_id, shapes);
|
||||
|
||||
console.log('Shape clicked:', shape_id);
|
||||
});
|
||||
|
||||
// Store polyline reference in shapes object for reset purpose
|
||||
shapes[shape_id].polyline = polyline;
|
||||
}
|
||||
|
||||
function addNewShape() {
|
||||
shapeStops = [];
|
||||
cancelShapeEdit();
|
||||
|
||||
// Create a container div for the buttons
|
||||
const buttonContainer = document.createElement('div');
|
||||
buttonContainer.className = 'button-container mt-3 p-3 border rounded';
|
||||
|
||||
// Create Toggle OSM router machine button
|
||||
const editButton = document.createElement('button');
|
||||
editButton.className = 'btn btn-primary btn-block my-2';
|
||||
editButton.textContent = 'Toggle OSM router machine';
|
||||
editButton.onclick = () => {
|
||||
// Open route editing tool
|
||||
shapeToOSRM(shapes[shapeID].polyline);
|
||||
};
|
||||
buttonContainer.appendChild(editButton);
|
||||
|
||||
// Create Save button
|
||||
const saveButton = document.createElement('button');
|
||||
saveButton.className = 'btn btn-secondary btn-block my-2';
|
||||
saveButton.textContent = 'Save Shape';
|
||||
saveButton.setAttribute('ready-to-save', 'false');
|
||||
saveButton.onclick = () => {
|
||||
if (saveButton.getAttribute('ready-to-save') === 'true') {
|
||||
// Append coordinates to stops list
|
||||
var latlngs = currentLayer.getLatLngs();
|
||||
latlngs.forEach(function(latlng) {
|
||||
stops.push([latlng.lat, latlng.lng]);
|
||||
});
|
||||
|
||||
// Transform the polyline into the specified format in the shapes dictionary
|
||||
shape_id = "n_" + numNewShapes;
|
||||
numNewShapes += 1;
|
||||
shapes[shape_id] = [];
|
||||
latlngs.forEach(function(latlng, index) {
|
||||
shapes[shape_id].push({
|
||||
sequence: index + 1,
|
||||
lat: latlng.lat,
|
||||
lon: latlng.lng
|
||||
});
|
||||
});
|
||||
|
||||
// Delete the layer with the drawn polyline
|
||||
map.removeLayer(currentLayer);
|
||||
|
||||
// Create polyline and add to map
|
||||
addClickableShape (latlngs, shape_id);
|
||||
|
||||
// Log the results
|
||||
console.log('Stops:', stops);
|
||||
console.log('Shapes:', shapes);
|
||||
|
||||
// Change button esthetics to default
|
||||
saveButton.style.backgroundColor = '';
|
||||
saveButton.style.color = '';
|
||||
|
||||
cancelShapeEdit();
|
||||
}
|
||||
};
|
||||
buttonContainer.appendChild(saveButton);
|
||||
|
||||
// Create Cancel button
|
||||
const cancelButton = document.createElement('button');
|
||||
cancelButton.className = 'btn btn-secondary btn-block my-2';
|
||||
cancelButton.textContent = 'Cancel';
|
||||
cancelButton.onclick = () => {
|
||||
cancelShapeEdit();
|
||||
};
|
||||
buttonContainer.appendChild(cancelButton);
|
||||
|
||||
// Append the button container to the chosenShape element
|
||||
chosenShape.appendChild(buttonContainer);
|
||||
|
||||
polylineDrawer.enable();
|
||||
|
||||
// Add created polylines to the map
|
||||
map.on(L.Draw.Event.CREATED, function (event) {
|
||||
var layer = event.layer;
|
||||
currentLayer = layer;
|
||||
drawnItems.addLayer(layer);
|
||||
});
|
||||
|
||||
// Add created shapes to the map and handle the polyline
|
||||
map.on(L.Draw.Event.CREATED, function (event) {
|
||||
var layer = event.layer;
|
||||
|
||||
// Check if the drawn shape is a polyline
|
||||
if (layer instanceof L.Polyline && !(layer instanceof L.Polygon)) {
|
||||
// Disable the drawing control
|
||||
map.removeControl(drawControl);
|
||||
|
||||
saveButton.style.backgroundColor = 'green';
|
||||
saveButton.style.color = 'white'; // Ensure the text is readable
|
||||
saveButton.setAttribute('ready-to-save', 'true');
|
||||
} else {
|
||||
// Add the drawn layer to the map if it's not a polyline
|
||||
drawnItems.addLayer(layer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function displayShapeOptions(shapeID, shapes) {
|
||||
const chosenShape = document.getElementById('chosenShape');
|
||||
|
||||
// Clear any existing content inside chosenShape
|
||||
while (chosenShape.firstChild) {
|
||||
chosenShape.removeChild(chosenShape.firstChild);
|
||||
}
|
||||
|
||||
// Add text content at the top
|
||||
const textContent = document.createElement('div');
|
||||
textContent.textContent = "Shape " + shapeID;
|
||||
chosenShape.appendChild(textContent);
|
||||
|
||||
// Create a container div for the buttons
|
||||
const buttonContainer = document.createElement('div');
|
||||
buttonContainer.className = 'button-container mt-3 p-3 border rounded';
|
||||
|
||||
// Create Edit button
|
||||
const editButton = document.createElement('button');
|
||||
editButton.className = 'btn btn-primary btn-block my-2';
|
||||
editButton.textContent = 'Edit Shape';
|
||||
editButton.onclick = () => {
|
||||
// Open route editing tool
|
||||
shapeToOSRM(shapes[shapeID].polyline);
|
||||
};
|
||||
buttonContainer.appendChild(editButton);
|
||||
|
||||
// Create Delete button
|
||||
const deleteButton = document.createElement('button');
|
||||
deleteButton.className = 'btn btn-danger btn-block my-2';
|
||||
deleteButton.textContent = 'Delete Shape';
|
||||
deleteButton.onclick = () => {
|
||||
|
||||
// Cancel the edit view for the current shape
|
||||
cancelShapeEdit();
|
||||
// Delete the current shape and its representation from the map
|
||||
deleteShape(shapeID);
|
||||
};
|
||||
buttonContainer.appendChild(deleteButton);
|
||||
|
||||
// Create Cancel button
|
||||
const cancelButton = document.createElement('button');
|
||||
cancelButton.className = 'btn btn-secondary btn-block my-2';
|
||||
cancelButton.textContent = 'Cancel';
|
||||
cancelButton.onclick = () => {
|
||||
cancelShapeEdit();
|
||||
};
|
||||
buttonContainer.appendChild(cancelButton);
|
||||
|
||||
// Append the button container to the chosenShape element
|
||||
chosenShape.appendChild(buttonContainer);
|
||||
}
|
||||
|
||||
function cancelShapeEdit () {
|
||||
// Define reference to the chosenShape field
|
||||
const chosenShape = document.getElementById('chosenShape');
|
||||
|
||||
// Remove any existing routing control (if needed)
|
||||
if (window.routingControl) {
|
||||
map.removeControl(window.routingControl);
|
||||
}
|
||||
|
||||
// Disable the current drawing
|
||||
polylineDrawer.disable();
|
||||
|
||||
// Clear all drawn items
|
||||
drawnItems.clearLayers();
|
||||
|
||||
// Clear any existing content inside chosenShape
|
||||
while (chosenShape.firstChild) {
|
||||
chosenShape.removeChild(chosenShape.firstChild);
|
||||
}
|
||||
|
||||
// Add text content at the top
|
||||
const textContent = document.createElement('div');
|
||||
textContent.textContent = "Chosen Shape";
|
||||
chosenShape.appendChild(textContent);
|
||||
|
||||
// Reset previous polyline to blue
|
||||
if (currentShapeID !== null) {
|
||||
shapes[currentShapeID].polyline.setStyle({ color: 'blue' });
|
||||
}
|
||||
// Set currentShapeID to null
|
||||
currentShapeID = null;
|
||||
}
|
||||
|
||||
function deleteShape(shapeID) {
|
||||
if (shapes[shapeID] && shapes[shapeID].polyline) {
|
||||
map.removeLayer(shapes[shapeID].polyline); // Remove the polyline from the map
|
||||
delete shapes[shapeID]; // Delete the shape from the shapes object
|
||||
console.log('Shape deleted:', shapeID);
|
||||
}
|
||||
}
|
||||
|
||||
// Function that takes a shape of a route and draws an OSRM path along it
|
||||
function shapeToOSRM (polyline) {
|
||||
if (!polyline) {
|
||||
console.error('Invalid polyline');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the latlngs array from the polyline
|
||||
const latlngs = polyline.getLatLngs();
|
||||
|
||||
if (latlngs.length < 2) {
|
||||
console.error('Polyline should have at least two points');
|
||||
return;
|
||||
}
|
||||
|
||||
// Define start and end points
|
||||
const startPoint = latlngs[0];
|
||||
const endPoint = latlngs[latlngs.length - 1];
|
||||
|
||||
const initialWaypoints = [
|
||||
L.latLng(startPoint.lat, startPoint.lng),
|
||||
L.latLng(endPoint.lat, endPoint.lng)
|
||||
];
|
||||
|
||||
console.log('initialWaypoints', initialWaypoints[0]);
|
||||
|
||||
const routingControl = L.Routing.control({
|
||||
waypoints: initialWaypoints,
|
||||
routeWhileDragging: false,
|
||||
createMarker: () => null // Don't create markers for initial route
|
||||
}).addTo(map);
|
||||
|
||||
console.log('Added routingControl.');
|
||||
|
||||
routingControl.on('routesfound', function (e) {
|
||||
const routes = e.routes;
|
||||
const routePolyline = routes[0].coordinates;
|
||||
|
||||
console.log('routePolyline[0]: ', routePolyline[0]);
|
||||
|
||||
// Find the furthest point from the original polyline to the generated polyline
|
||||
const furthestPoint = findFurthestPoint(latlngs, routePolyline);
|
||||
|
||||
console.log('furthestPoint: ', furthestPoint);
|
||||
|
||||
// Create new waypoints with the furthest point added
|
||||
const newWaypoints = [
|
||||
L.latLng(startPoint.lat, startPoint.lng),
|
||||
L.latLng(furthestPoint.lat, furthestPoint.lng),
|
||||
L.latLng(endPoint.lat, endPoint.lng)
|
||||
];
|
||||
|
||||
// Generate the new route with the updated waypoints
|
||||
generateNewRoute(newWaypoints);
|
||||
});
|
||||
}
|
||||
|
||||
function generateNewRoute(waypoints) {
|
||||
if (window.finalRoutingControl) {
|
||||
map.removeControl(window.finalRoutingControl);
|
||||
}
|
||||
|
||||
window.finalRoutingControl = L.Routing.control({
|
||||
waypoints: waypoints,
|
||||
routeWhileDragging: false
|
||||
}).addTo(map);
|
||||
}
|
||||
|
||||
function findFurthestPoint(originalPolyline, generatedPolyline) {
|
||||
let maxDistance = -1;
|
||||
let furthestPoint = null;
|
||||
|
||||
originalPolyline.forEach(point => {
|
||||
const latlngPoint = L.latLng(point.lat, point.lng);
|
||||
const distance = findDistanceToPolyline(latlngPoint, generatedPolyline);
|
||||
|
||||
if (distance > maxDistance) {
|
||||
maxDistance = distance;
|
||||
furthestPoint = point;
|
||||
}
|
||||
});
|
||||
|
||||
return furthestPoint;
|
||||
}
|
||||
|
||||
function findDistanceToPolyline(point, polyline) {
|
||||
let minDistance = Infinity;
|
||||
|
||||
polyline.forEach((segmentPoint, index) => {
|
||||
if (index === 0) return;
|
||||
const prevPoint = polyline[index - 1];
|
||||
console.log('Current shapefile point: ', point);
|
||||
console.log('Current generated polyline point: ', segment);
|
||||
const segmentDistance = L.GeometryUtil.distanceSegment(map, point, prevPoint, segmentPoint);
|
||||
minDistance = Math.min(minDistance, segmentDistance);
|
||||
});
|
||||
|
||||
return minDistance;
|
||||
}
|
||||
// Add the map click event listener
|
||||
map.on('click', onMapClick);
|
||||
|
||||
// Function to write "Hello World!" to the console when the map is clicked
|
||||
function onMapClick(event) {
|
||||
|
|
@ -824,10 +168,8 @@
|
|||
}
|
||||
}
|
||||
|
||||
// Add the map click event listener
|
||||
map.on('click', onMapClick);
|
||||
|
||||
function importGTFS() {
|
||||
//imported_data = JSON.parse('{{ data }}');
|
||||
alert("Import existing GTFS clicked");
|
||||
// Logic to import existing GTFS would go here
|
||||
}
|
||||
|
|
|
|||
110
transport_accessibility/pt_map/templates/osrm_drawing.js
Normal file
110
transport_accessibility/pt_map/templates/osrm_drawing.js
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
// This script is not imported in the main script at all
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Function that takes a shape of a route and draws an OSRM path along it
|
||||
function shapeToOSRM (polyline) {
|
||||
if (!polyline) {
|
||||
console.error('Invalid polyline');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the latlngs array from the polyline
|
||||
const latlngs = polyline.getLatLngs();
|
||||
|
||||
if (latlngs.length < 2) {
|
||||
console.error('Polyline should have at least two points');
|
||||
return;
|
||||
}
|
||||
|
||||
// Define start and end points
|
||||
const startPoint = latlngs[0];
|
||||
const endPoint = latlngs[latlngs.length - 1];
|
||||
|
||||
const initialWaypoints = [
|
||||
L.latLng(startPoint.lat, startPoint.lng),
|
||||
L.latLng(endPoint.lat, endPoint.lng)
|
||||
];
|
||||
|
||||
console.log('initialWaypoints', initialWaypoints[0]);
|
||||
|
||||
const routingControl = L.Routing.control({
|
||||
waypoints: initialWaypoints,
|
||||
routeWhileDragging: false,
|
||||
createMarker: () => null // Don't create markers for initial route
|
||||
}).addTo(map);
|
||||
|
||||
console.log('Added routingControl.');
|
||||
|
||||
routingControl.on('routesfound', function (e) {
|
||||
const routes = e.routes;
|
||||
const routePolyline = routes[0].coordinates;
|
||||
|
||||
console.log('routePolyline[0]: ', routePolyline[0]);
|
||||
|
||||
// Find the furthest point from the original polyline to the generated polyline
|
||||
const furthestPoint = findFurthestPoint(latlngs, routePolyline);
|
||||
|
||||
console.log('furthestPoint: ', furthestPoint);
|
||||
|
||||
// Create new waypoints with the furthest point added
|
||||
const newWaypoints = [
|
||||
L.latLng(startPoint.lat, startPoint.lng),
|
||||
L.latLng(furthestPoint.lat, furthestPoint.lng),
|
||||
L.latLng(endPoint.lat, endPoint.lng)
|
||||
];
|
||||
|
||||
// Generate the new route with the updated waypoints
|
||||
generateNewRoute(newWaypoints);
|
||||
});
|
||||
}
|
||||
|
||||
function generateNewRoute(waypoints) {
|
||||
if (window.finalRoutingControl) {
|
||||
map.removeControl(window.finalRoutingControl);
|
||||
}
|
||||
|
||||
window.finalRoutingControl = L.Routing.control({
|
||||
waypoints: waypoints,
|
||||
routeWhileDragging: false
|
||||
}).addTo(map);
|
||||
}
|
||||
|
||||
function findFurthestPoint(originalPolyline, generatedPolyline) {
|
||||
let maxDistance = -1;
|
||||
let furthestPoint = null;
|
||||
|
||||
originalPolyline.forEach(point => {
|
||||
const latlngPoint = L.latLng(point.lat, point.lng);
|
||||
const distance = findDistanceToPolyline(latlngPoint, generatedPolyline);
|
||||
|
||||
if (distance > maxDistance) {
|
||||
maxDistance = distance;
|
||||
furthestPoint = point;
|
||||
}
|
||||
});
|
||||
|
||||
return furthestPoint;
|
||||
}
|
||||
|
||||
function findDistanceToPolyline(point, polyline) {
|
||||
let minDistance = Infinity;
|
||||
|
||||
polyline.forEach((segmentPoint, index) => {
|
||||
if (index === 0) return;
|
||||
const prevPoint = polyline[index - 1];
|
||||
console.log('Current shapefile point: ', point);
|
||||
console.log('Current generated polyline point: ', segment);
|
||||
const segmentDistance = L.GeometryUtil.distanceSegment(map, point, prevPoint, segmentPoint);
|
||||
minDistance = Math.min(minDistance, segmentDistance);
|
||||
});
|
||||
|
||||
return minDistance;
|
||||
}
|
||||
59
transport_accessibility/pt_map/templates/parsing_files.js
Normal file
59
transport_accessibility/pt_map/templates/parsing_files.js
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
function parseCSV(data) {
|
||||
const lines = data.split(/\r\n|\n|\r/); // parse by \n and/or by \n
|
||||
const headers = lines[0].split(',');
|
||||
return lines.slice(1).map(line => { // lines.slice(1) creates a new array that excludes the first line (headers)
|
||||
const values = line.split(','); // splits the line into an array of values
|
||||
return headers.reduce((object, header, index) => {
|
||||
object[header] = values[index];
|
||||
return object;
|
||||
}, {});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to parse shapes.txt file, add the shape geometries to the shapes["routes"] dictionary
|
||||
* and draw the shapes on the map.
|
||||
*/
|
||||
function parseShapesFile(file) {
|
||||
const reader = new FileReader();
|
||||
reader.onload = function(event) {
|
||||
const text = event.target.result;
|
||||
const lines = text.split('\n');
|
||||
const headers = lines[0].split(',');
|
||||
const shapeIdIndex = headers.indexOf('shape_id');
|
||||
const shapePtSequenceIndex = headers.indexOf('shape_pt_sequence');
|
||||
const shapePtLatIndex = headers.indexOf('shape_pt_lat');
|
||||
const shapePtLonIndex = headers.indexOf('shape_pt_lon');
|
||||
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (line) {
|
||||
const columns = line.split(',');
|
||||
const shape_id = columns[shapeIdIndex];
|
||||
const shape_pt_sequence = parseInt(columns[shapePtSequenceIndex], 10);
|
||||
const shape_pt_lat = parseFloat(columns[shapePtLatIndex]);
|
||||
const shape_pt_lon = parseFloat(columns[shapePtLonIndex]);
|
||||
|
||||
if (!shapes["routes"][shape_id]) {
|
||||
shapes["routes"][shape_id] = [];
|
||||
}
|
||||
shapes["routes"][shape_id].push({
|
||||
sequence: shape_pt_sequence,
|
||||
lat: shape_pt_lat,
|
||||
lon: shape_pt_lon
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the shape points by sequence and draw them with click listeners
|
||||
for (let shape_id in shapes["routes"]) {
|
||||
if (shapes["routes"].hasOwnProperty(shape_id)) {
|
||||
shapes["routes"][shape_id].sort((a, b) => a.sequence - b.sequence);
|
||||
const latlngs = shapes["routes"][shape_id].map(point => [point.lat, point.lon]);
|
||||
|
||||
addClickableShape (latlngs, shape_id)
|
||||
}
|
||||
}
|
||||
};
|
||||
reader.readAsText(file);
|
||||
}
|
||||
99
transport_accessibility/pt_map/templates/shapes_on_map.js
Normal file
99
transport_accessibility/pt_map/templates/shapes_on_map.js
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
function addClickableShape (latlngs, shape_id) {
|
||||
// Create polyline and add to map
|
||||
const polyline = L.polyline(latlngs, { color: 'blue', weight: 5 }).addTo(map);
|
||||
|
||||
// Store polyline reference in shapes object for reset purpose
|
||||
shapes["routes"][shape_id].polyline = polyline;
|
||||
|
||||
// Add click event listener to polyline
|
||||
polyline.on('click', function (e) {
|
||||
let shape_ids = [shape_id];
|
||||
highlightShapes(shape_ids, "routes");
|
||||
|
||||
displayShapeOptions(shape_id, shapes);
|
||||
|
||||
console.log('Shape clicked:', shape_id);
|
||||
});
|
||||
}
|
||||
|
||||
function addClickablePoint(latlng, point_id) {
|
||||
// latlng = a pair of coordinates, e.g., (52.2297, 21.0122) for Warsaw.
|
||||
|
||||
// Tags version
|
||||
// Create marker and add to map
|
||||
// const marker = L.marker(latlng).addTo(map);
|
||||
|
||||
// Points version
|
||||
// Create circle marker and add to map
|
||||
const marker = L.circleMarker(latlng, { radius: 10, color: 'blue'}).addTo(map);
|
||||
|
||||
|
||||
// Store marker reference in points object for reset purpose
|
||||
shapes["stops"][point_id]["marker"] = marker;
|
||||
|
||||
// Add click event listener to marker
|
||||
marker.on('click', function (e) {
|
||||
let point_ids = [point_id];
|
||||
highlightShapes(point_ids, "stops");
|
||||
console.log('Point clicked:', point_id);
|
||||
});
|
||||
}
|
||||
|
||||
function deleteShape(shapeID) {
|
||||
if (shapes["routes"][shapeID] && shapes["routes"][shapeID].polyline) {
|
||||
map.removeLayer(shapes["routes"][shapeID].polyline); // Remove the polyline from the map
|
||||
delete shapes["routes"][shapeID]; // Delete the shape from the shapes object
|
||||
console.log('Shape deleted:', shapeID);
|
||||
}
|
||||
}
|
||||
|
||||
function highlightShapes(shape_ids, type) {
|
||||
// Reset previous polylines to blue
|
||||
if (currentShapeIDs["routes"].length > 0) {
|
||||
for (const id of currentShapeIDs["routes"]) {
|
||||
if (shapes["routes"][id] && shapes["routes"][id].polyline) {
|
||||
shapes["routes"][id].polyline.setStyle({ color: 'blue' });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentShapeIDs["stops"].length > 0) {
|
||||
for (const id of currentShapeIDs["stops"]) {
|
||||
if (shapes["stops"][id] && shapes[id]["marker"]) {
|
||||
shapes["stops"][id]["marker"].setStyle({ color: 'blue' });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (type == "routes") {
|
||||
for (const id of shape_ids) {
|
||||
// Highlight the clicked polyline
|
||||
if (shapes[type][id] && shapes[type][id].polyline) {
|
||||
shapes[type][id].polyline.setStyle({ color: 'red' }).bringToFront();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (type == "stops") {
|
||||
for (const id of shape_ids) {
|
||||
// Highlight the clicked polyline
|
||||
if (shapes[type][id] && shapes[type][id]["marker"]) {
|
||||
shapes[type][id]["marker"].setStyle({ color: 'red' }).bringToFront();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set new currentShapeIDs value
|
||||
currentShapeIDs[type] = shape_ids;
|
||||
}
|
||||
|
||||
function makeShapesBlue() {
|
||||
// Reset previous polylines to blue
|
||||
if (currentShapeIDs["routes"].length > 0) {
|
||||
for (const id of currentShapeIDs["routes"]) {
|
||||
if (shapes["routes"][id] && shapes["routes"][id].polyline) {
|
||||
shapes["routes"][id].polyline.setStyle({ color: 'blue' });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
349
transport_accessibility/pt_map/templates/sidebar.js
Normal file
349
transport_accessibility/pt_map/templates/sidebar.js
Normal file
|
|
@ -0,0 +1,349 @@
|
|||
function handleFileSelect(event) {
|
||||
const files = event.target.files;
|
||||
const routesFile = Array.from(files).find(file => file.name === 'routes.txt');
|
||||
const tripsFile = Array.from(files).find(file => file.name === 'trips.txt');
|
||||
const stopsFile = Array.from(files).find(file => file.name === 'stops.txt');
|
||||
const stopTimesFile = Array.from(files).find(file => file.name === 'stop_times.txt');
|
||||
|
||||
if (routesFile && tripsFile && stopsFile && stopTimesFile) {
|
||||
// Read and parse the routes file
|
||||
const routesReader = new FileReader();
|
||||
routesReader.onload = function(e) {
|
||||
routesData = parseCSV(e.target.result);
|
||||
displayRouteIds(routesData);
|
||||
};
|
||||
routesReader.readAsText(routesFile);
|
||||
|
||||
// Read and parse the trips file
|
||||
const tripsReader = new FileReader();
|
||||
tripsReader.onload = function(e) {
|
||||
tripsData = parseCSV(e.target.result);
|
||||
};
|
||||
tripsReader.readAsText(tripsFile);
|
||||
|
||||
// Read and parse the stops file
|
||||
const stopsReader = new FileReader();
|
||||
stopsReader.onload = function(e) {
|
||||
stopsData = parseCSV(e.target.result);
|
||||
//console.log(stopsData[0]);
|
||||
for (let stop of stopsData) {
|
||||
let id = stop["stop_id"];
|
||||
// console.log("id: ", id);
|
||||
let stop_lat = stop["stop_lat"];
|
||||
let stop_lon = stop["stop_lon"];
|
||||
if (stop_lat == undefined || stop_lon == undefined) {
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
//console.log("stop_lat: ", stop_lat);
|
||||
//console.log("stop_lon: ", stop_lon);
|
||||
shapes["stops"][id] = {
|
||||
lat: stop_lat,
|
||||
lon: stop_lon
|
||||
};
|
||||
//console.log(shapes["stops"][stop["stop_id"]]);
|
||||
addClickablePoint([stop_lat, stop_lon], id);
|
||||
}
|
||||
}
|
||||
};
|
||||
stopsReader.readAsText(stopsFile);
|
||||
|
||||
// Read and parse the stops file
|
||||
const stopTimesReader = new FileReader();
|
||||
stopTimesReader.onload = function(e) {
|
||||
stopTimesData = parseCSV(e.target.result);
|
||||
};
|
||||
stopTimesReader.readAsText(stopTimesFile);
|
||||
console.log(stopsData.length);
|
||||
} else {
|
||||
alert('Please select a valid GTFS folder containing routes.txt, trips.txt, stops.txt, and stop_times.txt files.');
|
||||
}
|
||||
|
||||
// Handle shapes file
|
||||
let shapesFile;
|
||||
for (let file of files) {
|
||||
if (file.name === 'shapes.txt') {
|
||||
shapesFile = file;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (shapesFile) {
|
||||
// Parse shapes.txt file and draw on the map
|
||||
parseShapesFile(shapesFile);
|
||||
} else {
|
||||
alert('shapes.txt file not found in the selected folder.');
|
||||
}
|
||||
}
|
||||
|
||||
function displayRouteIds(routes) {
|
||||
const currentRoutes = document.getElementById('currentRoutes');
|
||||
const routeList = document.createElement('div');
|
||||
routeList.className = 'list-group route-list';
|
||||
routes.forEach(route => {
|
||||
const routeItem = document.createElement('a');
|
||||
routeItem.className = 'list-group-item list-group-item-action';
|
||||
routeItem.textContent = route.route_id;
|
||||
//routeItem.onclick = () => showTrips(route.route_id);
|
||||
routeItem.href = '/?timetable=' + route.route_id;
|
||||
routeList.appendChild(routeItem);
|
||||
});
|
||||
currentRoutes.appendChild(routeList);
|
||||
}
|
||||
|
||||
function showTrips(chosenRouteId) {
|
||||
// Filter the routesData rows where route_id is equal to the targetRouteId
|
||||
const filteredTrips = tripsData.filter(route => route.route_id == chosenRouteId);
|
||||
console.log('chosenRouteId: ', chosenRouteId);
|
||||
console.log('filteredTrips[0]: ', filteredTrips[0]);
|
||||
// Map the filtered rows to their shape_id values
|
||||
const shapeIds = filteredTrips.map(trip => trip.shape_id);
|
||||
|
||||
console.log('shapeIds[0]: ', shapeIds[0]);
|
||||
|
||||
highlightShapes(shapeIds, "routes");
|
||||
|
||||
const tripsTable = document.getElementById('tripsTable');
|
||||
tripsTable.innerHTML = '';
|
||||
|
||||
if (filteredTrips.length > 0) {
|
||||
const tripIds = filteredTrips.map(trip => trip.trip_id);;
|
||||
console.log('headers: ', tripIds);
|
||||
const headerRow = document.createElement('tr');
|
||||
tripIds.forEach(tripId => {
|
||||
const th = document.createElement('th');
|
||||
th.textContent = tripId;
|
||||
headerRow.appendChild(th);
|
||||
});
|
||||
tripsTable.appendChild(headerRow);
|
||||
|
||||
// Now filter data for table rows
|
||||
const filteredStopTimes = stopTimesData.filter(stopTime => tripIds.includes(stopTime.trip_id));
|
||||
const filteredDepartureTimes = filteredStopTimes.map(stopTime => stopTime.departure_time)
|
||||
const uniqueStopIds = [...new Set(filteredStopTimes.map(stopTime => stopTime.stop_id))];
|
||||
|
||||
// Create stop_names array by finding the stop_name for each unique stop_id
|
||||
const rowValues = uniqueStopIds.map(stopId => {
|
||||
const stop = stopsData.find(stop => stop.stop_id === stopId);
|
||||
stop_name = stop ? stop.stop_name : 'Unknown Stop';
|
||||
currentStopTimes = filteredStopTimes.filter(stopTime => stopTime.stop_id == stopId)
|
||||
const sortedStopTimes = currentStopTimes.sort((a, b) => {
|
||||
return tripIds.indexOf(a.trip_id) - tripIds.indexOf(b.trip_id);
|
||||
}).map(stop => stop.departure_time);
|
||||
return [stop_name].concat(sortedStopTimes);
|
||||
});
|
||||
|
||||
console.log(rowValues);
|
||||
|
||||
// Create rows with stop_names as row headers
|
||||
stop_names.forEach(stopName => {
|
||||
const row = document.createElement('tr');
|
||||
const rowHeader = document.createElement('th');
|
||||
rowHeader.textContent = stopName;
|
||||
row.appendChild(rowHeader);
|
||||
|
||||
// Add empty cells for each tripId
|
||||
tripIds.forEach(() => {
|
||||
const cell = document.createElement('td');
|
||||
row.appendChild(cell);
|
||||
});
|
||||
|
||||
tripsTable.appendChild(row);
|
||||
});
|
||||
} else {
|
||||
const noDataRow = document.createElement('tr');
|
||||
const noDataCell = document.createElement('td');
|
||||
noDataCell.colSpan = Object.keys(trips[0] || {}).length;
|
||||
noDataCell.textContent = 'No trips found for this route.';
|
||||
noDataRow.appendChild(noDataCell);
|
||||
tripsTable.appendChild(noDataRow);
|
||||
}
|
||||
|
||||
$('#tripsModal').modal('show');
|
||||
}
|
||||
|
||||
function addNewShape() {
|
||||
console.log("Function addNewShape() not defined.");
|
||||
}
|
||||
|
||||
function drawNewShape() {
|
||||
shapeStops = [];
|
||||
cancelShapeEdit();
|
||||
|
||||
// Create a container div for the buttons
|
||||
const buttonContainer = document.createElement('div');
|
||||
buttonContainer.className = 'button-container mt-3 p-3 border rounded';
|
||||
|
||||
// Create Toggle OSM router machine button
|
||||
const editButton = document.createElement('button');
|
||||
editButton.className = 'btn btn-primary btn-block my-2';
|
||||
editButton.textContent = 'Toggle OSM router machine';
|
||||
editButton.onclick = () => {
|
||||
// Open route editing tool
|
||||
shapeToOSRM(shapes["routes"][shapeId].polyline);
|
||||
};
|
||||
buttonContainer.appendChild(editButton);
|
||||
|
||||
// Create Save button
|
||||
const saveButton = document.createElement('button');
|
||||
saveButton.className = 'btn btn-secondary btn-block my-2';
|
||||
saveButton.textContent = 'Save Shape';
|
||||
saveButton.setAttribute('ready-to-save', 'false');
|
||||
saveButton.onclick = () => {
|
||||
if (saveButton.getAttribute('ready-to-save') === 'true') {
|
||||
// Append coordinates to stops list
|
||||
var latlngs = currentLayer.getLatLngs();
|
||||
latlngs.forEach(function(latlng) {
|
||||
//stops.push([latlng.lat, latlng.lng]);
|
||||
});
|
||||
|
||||
// Transform the polyline into the specified format in the shapes dictionary
|
||||
shape_id = "n_" + numNewShapes;
|
||||
numNewShapes += 1;
|
||||
shapes["routes"][shape_id] = [];
|
||||
latlngs.forEach(function(latlng, index) {
|
||||
shapes["routes"][shape_id].push({
|
||||
sequence: index + 1,
|
||||
lat: latlng.lat,
|
||||
lon: latlng.lng
|
||||
});
|
||||
});
|
||||
|
||||
// Delete the layer with the drawn polyline
|
||||
map.removeLayer(currentLayer);
|
||||
|
||||
// Create polyline and add to map
|
||||
addClickableShape (latlngs, shape_id);
|
||||
|
||||
// Log the results
|
||||
console.log('Stops:', stops);
|
||||
console.log('Shapes:', shapes["routes"]);
|
||||
|
||||
// Change button esthetics to default
|
||||
saveButton.style.backgroundColor = '';
|
||||
saveButton.style.color = '';
|
||||
|
||||
cancelShapeEdit();
|
||||
}
|
||||
};
|
||||
buttonContainer.appendChild(saveButton);
|
||||
|
||||
// Create Cancel button
|
||||
const cancelButton = document.createElement('button');
|
||||
cancelButton.className = 'btn btn-secondary btn-block my-2';
|
||||
cancelButton.textContent = 'Cancel';
|
||||
cancelButton.onclick = () => {
|
||||
cancelShapeEdit();
|
||||
};
|
||||
buttonContainer.appendChild(cancelButton);
|
||||
|
||||
// Append the button container to the chosenShape element
|
||||
chosenShape.appendChild(buttonContainer);
|
||||
|
||||
polylineDrawer.enable();
|
||||
|
||||
// Add created polylines to the map
|
||||
map.on(L.Draw.Event.CREATED, function (event) {
|
||||
var layer = event.layer;
|
||||
currentLayer = layer;
|
||||
drawnItems.addLayer(layer);
|
||||
});
|
||||
|
||||
// Add created shapes to the map and handle the polyline
|
||||
map.on(L.Draw.Event.CREATED, function (event) {
|
||||
var layer = event.layer;
|
||||
|
||||
// Check if the drawn shape is a polyline
|
||||
if (layer instanceof L.Polyline && !(layer instanceof L.Polygon)) {
|
||||
// Disable the drawing control
|
||||
map.removeControl(drawControl);
|
||||
|
||||
saveButton.style.backgroundColor = 'green';
|
||||
saveButton.style.color = 'white'; // Ensure the text is readable
|
||||
saveButton.setAttribute('ready-to-save', 'true');
|
||||
} else {
|
||||
// Add the drawn layer to the map if it's not a polyline
|
||||
drawnItems.addLayer(layer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function displayShapeOptions(shapeId, shapes) {
|
||||
const chosenShape = document.getElementById('chosenShape');
|
||||
|
||||
// Clear any existing content inside chosenShape
|
||||
while (chosenShape.firstChild) {
|
||||
chosenShape.removeChild(chosenShape.firstChild);
|
||||
}
|
||||
|
||||
// Add text content at the top
|
||||
const textContent = document.createElement('div');
|
||||
textContent.textContent = "Shape " + shapeId;
|
||||
chosenShape.appendChild(textContent);
|
||||
|
||||
// Create a container div for the buttons
|
||||
const buttonContainer = document.createElement('div');
|
||||
buttonContainer.className = 'button-container mt-3 p-3 border rounded';
|
||||
|
||||
// Create Edit button
|
||||
const editButton = document.createElement('button');
|
||||
editButton.className = 'btn btn-primary btn-block my-2';
|
||||
editButton.textContent = 'Edit Shape';
|
||||
editButton.onclick = () => {
|
||||
// Open route editing tool
|
||||
shapeToOSRM(shapes["routes"][shapeId].polyline);
|
||||
};
|
||||
buttonContainer.appendChild(editButton);
|
||||
|
||||
// Create Delete button
|
||||
const deleteButton = document.createElement('button');
|
||||
deleteButton.className = 'btn btn-danger btn-block my-2';
|
||||
deleteButton.textContent = 'Delete Shape';
|
||||
deleteButton.onclick = () => {
|
||||
|
||||
// Cancel the edit view for the current shape
|
||||
cancelShapeEdit();
|
||||
// Delete the current shape and its representation from the map
|
||||
deleteShape(shapeId);
|
||||
};
|
||||
buttonContainer.appendChild(deleteButton);
|
||||
|
||||
// Create Cancel button
|
||||
const cancelButton = document.createElement('button');
|
||||
cancelButton.className = 'btn btn-secondary btn-block my-2';
|
||||
cancelButton.textContent = 'Cancel';
|
||||
cancelButton.onclick = () => {
|
||||
cancelShapeEdit();
|
||||
};
|
||||
buttonContainer.appendChild(cancelButton);
|
||||
|
||||
// Append the button container to the chosenShape element
|
||||
chosenShape.appendChild(buttonContainer);
|
||||
}
|
||||
|
||||
function cancelShapeEdit () {
|
||||
// Define reference to the chosenShape field
|
||||
const chosenShape = document.getElementById('chosenShape');
|
||||
|
||||
// Remove any existing routing control (if needed)
|
||||
if (window.routingControl) {
|
||||
map.removeControl(window.routingControl);
|
||||
}
|
||||
|
||||
// Disable the current drawing
|
||||
polylineDrawer.disable();
|
||||
|
||||
// Clear all drawn items
|
||||
drawnItems.clearLayers();
|
||||
|
||||
// Clear any existing content inside chosenShape
|
||||
while (chosenShape.firstChild) {
|
||||
chosenShape.removeChild(chosenShape.firstChild);
|
||||
}
|
||||
|
||||
// Add text content at the top
|
||||
const textContent = document.createElement('div');
|
||||
textContent.textContent = "Chosen Shape";
|
||||
chosenShape.appendChild(textContent);
|
||||
|
||||
makeShapesBlue();
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user