diff --git a/.gitignore b/.gitignore index 0f7480f..2af2477 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,4 @@ docs/_build .venv +java/target diff --git a/Makefile b/Makefile index 6b48579..4606436 100644 --- a/Makefile +++ b/Makefile @@ -55,7 +55,7 @@ flake8: ## check style using flake8 for current Python (faster than lint) $(IN_VENV) flake8 --max-complexity 11 $(SOURCE_DIR) $(TEST_DIR) lint: ## check style using tox and flake8 for Python 2 and Python 3 - $(IN_VENV) tox -e py27-lint && tox -e py34-lint + $(IN_VENV) tox -e py27-lint && tox -e py35-lint lint-readme: ## check README formatting for PyPI $(IN_VENV) python setup.py check -r -s diff --git a/build_schema.sh b/build_schema.sh new file mode 100755 index 0000000..7f9fe50 --- /dev/null +++ b/build_schema.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set -x +set -e + +PROJECT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" + + +# Requires schema-salad-doc that recognizes --brandstyle and --brandinverse +for schema in "v19.09"; +do + cd schema/"$schema"; + python_schema_name=${schema//./_} + schema-salad-tool --codegen python workflow.yml > "${PROJECT_DIRECTORY}/gxformat2/schema/${python_schema_name}.py" + + out="../${schema}.html" + schema-salad-doc \ + --brandstyle '' \ + --brandinverse \ + --brand '' \ + --only "https://galaxyproject.org/gxformat2/${schema}#WorkflowDoc" \ + --only "https://galaxyproject.org/gxformat2/${schema}#GalaxyWorkflow" \ + workflow.yml > "$out" + + java_package="${PROJECT_DIRECTORY}/java" + schema-salad-tool --codegen java --codegen-target "$java_package" workflow.yml + cd "$java_package" + mvn test + mvn javadoc:javadoc + cd "${PROJECT_DIRECTORY}" +done diff --git a/dev-requirements.txt b/dev-requirements.txt index 6fa2c45..f4f3b7f 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,3 +1,6 @@ +# Optional dependencies +schema-salad + # For testing tox nose diff --git a/gxformat2/lint.py b/gxformat2/lint.py index 8aea660..a279ea6 100644 --- a/gxformat2/lint.py +++ b/gxformat2/lint.py @@ -1,3 +1,4 @@ +import os import sys from gxformat2._yaml import ordered_load @@ -8,36 +9,60 @@ EXIT_CODE_FILE_PARSE_FAILED = 3 -def lint_ga(workflow_dict): +def lint_ga(workflow_dict, path=None): if workflow_dict.get("format-version") != "0.1": return EXIT_CODE_FORMAT_ERROR if workflow_dict.get("a_galaxy_workflow") != "true": return EXIT_CODE_FORMAT_ERROR native_steps = workflow_dict.get("steps") + if not native_steps or not isinstance(native_steps, dict): + return EXIT_CODE_FORMAT_ERROR + found_outputs = False found_output_without_label = False - for step in native_steps.values(): + for order_index_str, step in native_steps.items(): + if not order_index_str.isdigit(): + return EXIT_CODE_FORMAT_ERROR + for workflow_output in step.get("workflow_outputs", []): found_outputs = True if not workflow_output.get("label"): found_output_without_label = True + step_type = step.get("type") + if step_type == "subworkflow": + subworkflow = step.get("subworkflow") + if subworkflow and not isinstance(subworkflow, dict): + return EXIT_CODE_FORMAT_ERROR + lint_subworkflow_ret = lint_ga(subworkflow) + if lint_subworkflow_ret != 0: + return lint_subworkflow_ret + if not found_outputs: return EXIT_CODE_LINT_FAILED - + if found_output_without_label: return EXIT_CODE_LINT_FAILED return EXIT_CODE_SUCCESS -def lint_format2(workflow_dict): +def lint_format2(workflow_dict, path=None): + from gxformat2.schema.v19_09 import load_document + from schema_salad.exceptions import SchemaSaladException + try: + load_document("file://" + os.path.normpath(path)) + except SchemaSaladException as e: + print(e) + return EXIT_CODE_FORMAT_ERROR + # Lint for outputs... if not workflow_dict.get("outputs", None): return EXIT_CODE_LINT_FAILED + return EXIT_CODE_SUCCESS @@ -49,10 +74,8 @@ def main(argv): except Exception: return EXIT_CODE_FILE_PARSE_FAILED workflow_class = workflow_dict.get("class") - if workflow_class == "GalaxyWorkflow": - exit_code = lint_format2(workflow_dict) - else: - exit_code = lint_ga(workflow_dict) + lint_func = lint_format2 if workflow_class == "GalaxyWorkflow" else lint_ga + exit_code = lint_func(workflow_dict, path=path) return exit_code diff --git a/gxformat2/schema/__init__.py b/gxformat2/schema/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/gxformat2/schema/v19_09.py b/gxformat2/schema/v19_09.py new file mode 100644 index 0000000..e2912b2 --- /dev/null +++ b/gxformat2/schema/v19_09.py @@ -0,0 +1,2714 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +# +import copy +import os +import re +import uuid # pylint: disable=unused-import # noqa: F401 +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from six import iteritems, string_types, text_type +from six.moves import StringIO, urllib +from typing_extensions import Text # pylint: disable=unused-import + +from ruamel import yaml +from ruamel.yaml.comments import CommentedMap +from schema_salad.ref_resolver import Fetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.exceptions import SchemaSaladException, ValidationException + +# move to a regular typing import when Python 3.3-3.6 is no longer supported + +_vocab = {} # type: Dict[Text, Text] +_rvocab = {} # type: Dict[Text, Text] + + +class Savable(object): + @classmethod + def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Savable + pass + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Text] + pass + + +class LoadingOptions(object): + def __init__( + self, + fetcher=None, # type: Optional[Fetcher] + namespaces=None, # type: Optional[Dict[Text, Text]] + fileuri=None, # type: Optional[Text] + copyfrom=None, # type: Optional[LoadingOptions] + original_doc=None, # type: Optional[Any] + ): # type: (...) -> None + self.idx = {} # type: Dict[Text, Dict[Text, Any]] + self.fileuri = fileuri # type: Optional[Text] + self.namespaces = namespaces + self.original_doc = original_doc + if copyfrom is not None: + self.idx = copyfrom.idx + if fetcher is None: + fetcher = copyfrom.fetcher + if fileuri is None: + self.fileuri = copyfrom.fileuri + if namespaces is None: + self.namespaces = copyfrom.namespaces + + if fetcher is None: + import requests + from cachecontrol.wrapper import CacheControl + from cachecontrol.caches import FileCache + from schema_salad.ref_resolver import DefaultFetcher + + if "HOME" in os.environ: + session = CacheControl( + requests.Session(), + cache=FileCache( + os.path.join(os.environ["HOME"], ".cache", "salad") + ), + ) + elif "TMPDIR" in os.environ: + session = CacheControl( + requests.Session(), + cache=FileCache( + os.path.join(os.environ["TMPDIR"], ".cache", "salad") + ), + ) + else: + session = CacheControl( + requests.Session(), cache=FileCache("/tmp", ".cache", "salad") + ) + self.fetcher = DefaultFetcher({}, session) # type: Fetcher + else: + self.fetcher = fetcher + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in iteritems(namespaces): + self.vocab[k] = v + self.rvocab[v] = k + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[Text, Dict[Text, Text]], _Loader, Text, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + return _document_load_by_url( + fieldtype, + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]), + loadingOptions, + ) + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + val = loadingOptions.fetcher.fetch_text( + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + ) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Union[ + Dict[Text, Text], List[Union[Dict[Text, Text], List[Any], None]], None +] + + +def save( + val, # type: Optional[Union[Savable, MutableSequence[Savable]]] + top=True, # type: bool + base_url="", # type: Text + relative_uris=True, # type: bool +): # type: (...) -> save_type + + if isinstance(val, Savable): + return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + if isinstance(val, MutableSequence): + return [ + save(v, top=False, base_url=base_url, relative_uris=relative_uris) + for v in val + ] + if isinstance(val, MutableMapping): + newdict = {} + for key in val: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris + ) + return newdict + return val + + +def expand_url( + url, # type: Union[str, Text] + base_url, # type: Union[str, Text] + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> Text + url = Text(url) + + if url in (u"@id", u"@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and u":" in url: + prefix = url.split(u":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urllib.parse.urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in [u"http", u"https", u"file"]) + or url.startswith(u"$(") + or url.startswith(u"${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urllib.parse.urlsplit(base_url) + frg = u"" + if bool(splitbase.fragment): + frg = splitbase.fragment + u"/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urllib.parse.urlunsplit( + (splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg) + ) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urllib.parse.urlsplit(base_url) + sp = splitbase.fragment.split(u"/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urllib.parse.urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + u"/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urllib.parse.urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException("Term '{}' not in vocabulary".format(url)) + + return url + + +class _Loader(object): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[Text], Type[Text]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException("Expected a list") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return "array<{}>".format(self.items) + + +class _EnumLoader(_Loader): + def __init__(self, symbols): + # type: (Sequence[Text]) -> None + self.symbols = symbols + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException("Expected one of {}".format(self.symbols)) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Savable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException("Expected a dict") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype) + + +class _UnionLoader(_Loader): + def __init__(self, alternates): + # type: (Sequence[_Loader]) -> None + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append( + ValidationException( + u"tried {} but".format(t.__class__.__name__), None, [e] + ) + ) + raise ValidationException("", None, errors, u"-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableSequence): + doc = [ + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + for i in doc + ] + if isinstance(doc, string_types): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: Text + baseuri, # type: Text + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[Text, Text], Text]], Dict[Text, Text], Text] + m = self.typeDSLregex.match(doc) + if m: + first = expand_url( + m.group(1), baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {u"type": u"array", u"items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = [u"null", second or first] + # third = CommentedSeq([u"null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, string_types): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, string_types): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, Text, Union[Text, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load(loader, doc, baseuri, loadingOptions): + # type: (_Loader, Any, Text, LoadingOptions) -> Any + if isinstance(doc, string_types): + return _document_load_by_url( + loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions + ) + + if isinstance(doc, MutableMapping): + if "$namespaces" in doc: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, namespaces=doc["$namespaces"] + ) + doc = {k: v for k, v in doc.items() if k != "$namespaces"} + + if "$base" in doc: + baseuri = doc["$base"] + + if "$graph" in doc: + return loader.load(doc["$graph"], baseuri, loadingOptions) + else: + return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri) + + if isinstance(doc, MutableSequence): + return loader.load(doc, baseuri, loadingOptions) + + raise ValidationException("Oops, we shouldn't be here!") + + +def _document_load_by_url(loader, url, loadingOptions): + # type: (_Loader, Text, LoadingOptions) -> Any + if url in loadingOptions.idx: + return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) + + text = loadingOptions.fetcher.fetch_text(url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(url) + result = yaml.round_trip_load(textIO, preserve_quotes=True) + add_lc_filename(result, url) + + loadingOptions.idx[url] = result + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) + + return _document_load(loader, result, url, loadingOptions) + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + urllib.parse.quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = urllib.request.pathname2url(str(pathsp[0])) + else: + urlpath = urllib.request.pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return "file:{}{}".format(urlpath, frag) + else: + return "file://{}{}".format(urlpath, frag) + + +def prefix_url(url, namespaces): # type: (Text, Dict[Text, Text]) -> Text + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris): + # type: (Text, Text, bool, Optional[int], bool) -> Union[Text, List[Text]] + if not relative_uris or uri == base_url: + return uri + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, text_type): + urisplit = urllib.parse.urlsplit(uri) + basesplit = urllib.parse.urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url) + + +class Documented(Savable): + pass + + +class RecordField(Documented): + """ +A field of a record. + """ + def __init__( + self, + doc, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordField + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `name`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'RecordField'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'name', u'type']) + + +class RecordSchema(Savable): + def __init__( + self, + fields, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'fields' in _doc: + try: + fields = load_field(_doc.get( + 'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, 'fields', str), + [e] + ) + ) + else: + fields = None + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `fields`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'RecordSchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(fields, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.fields is not None: + r['fields'] = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'fields', u'type']) + + +class EnumSchema(Savable): + """ +Define an enumerated type. + + """ + def __init__( + self, + symbols, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> EnumSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + symbols = load_field(_doc.get( + 'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, 'symbols', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `symbols`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'EnumSchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(symbols, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.symbols is not None: + u = save_relative_uri( + self.symbols, + base_url, + True, + None, + relative_uris) + if u: + r['symbols'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'symbols', u'type']) + + +class ArraySchema(Savable): + def __init__( + self, + items, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> ArraySchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + items = load_field(_doc.get( + 'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, 'items', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `items`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'ArraySchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(items, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.items is not None: + u = save_relative_uri( + self.items, + base_url, + False, + 2, + relative_uris) + if u: + r['items'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'items', u'type']) + + +class Labeled(Savable): + pass + + +class Identified(Savable): + pass + + +class Parameter(Documented, Identified): + """ +Define an input or output parameter to a process. + + """ + pass + + +class InputParameter(Parameter): + pass + + +class OutputParameter(Parameter): + pass + + +class Process(Identified, Labeled, Documented): + """ + +The base executable type in CWL is the `Process` object defined by the +document. Note that the `Process` object is abstract and cannot be +directly executed. + + """ + pass + + +class HasStepPosition(Savable): + pass + + +class StepPosition(Savable): + """ +This field specifies the location of the step's node when rendered in the workflow editor. + """ + def __init__( + self, + top, # type: Any + left, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.top = top + self.left = left + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> StepPosition + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + top = load_field(_doc.get( + 'top'), floattype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `top` field is not valid because:", + SourceLine(_doc, 'top', str), + [e] + ) + ) + try: + left = load_field(_doc.get( + 'left'), floattype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `left` field is not valid because:", + SourceLine(_doc, 'left', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `top`, `left`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'StepPosition'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(top, left, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.top is not None: + r['top'] = save( + self.top, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.left is not None: + r['left'] = save( + self.left, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'top', u'left']) + + +class ReferencesTool(Savable): + pass + + +class ToolShedRepository(Savable): + def __init__( + self, + changeset_revision, # type: Any + owner, # type: Any + tool_shed, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.changeset_revision = changeset_revision + self.owner = owner + self.tool_shed = tool_shed + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> ToolShedRepository + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + try: + changeset_revision = load_field(_doc.get( + 'changeset_revision'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `changeset_revision` field is not valid because:", + SourceLine(_doc, 'changeset_revision', str), + [e] + ) + ) + try: + owner = load_field(_doc.get( + 'owner'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `owner` field is not valid because:", + SourceLine(_doc, 'owner', str), + [e] + ) + ) + try: + tool_shed = load_field(_doc.get( + 'tool_shed'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_shed` field is not valid because:", + SourceLine(_doc, 'tool_shed', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `changeset_revision`, `name`, `owner`, `tool_shed`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'ToolShedRepository'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(changeset_revision, owner, tool_shed, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.changeset_revision is not None: + r['changeset_revision'] = save( + self.changeset_revision, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.owner is not None: + r['owner'] = save( + self.owner, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.tool_shed is not None: + r['tool_shed'] = save( + self.tool_shed, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'changeset_revision', u'name', u'owner', u'tool_shed']) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + doc, # type: Any + id, # type: Any + default, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.id = id + self.default = default + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowInputParameter + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'default' in _doc: + try: + default = load_field(_doc.get( + 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, 'default', str), + [e] + ) + ) + else: + default = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `id`, `default`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowInputParameter'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, id, default, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.default is not None: + r['default'] = save( + self.default, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'id', u'default', u'type']) + + +class WorkflowOutputParameter(OutputParameter): + """ +Describe an output parameter of a workflow. The parameter must be +connected to one parameter defined in the workflow that +will provide the value of the output parameter. It is legal to +connect a WorkflowInputParameter to a WorkflowOutputParameter. + + """ + def __init__( + self, + doc, # type: Any + id, # type: Any + outputSource, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.id = id + self.outputSource = outputSource + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowOutputParameter + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'outputSource' in _doc: + try: + outputSource = load_field(_doc.get( + 'outputSource'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, 'outputSource', str), + [e] + ) + ) + else: + outputSource = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `id`, `outputSource`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowOutputParameter'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, id, outputSource, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.outputSource is not None: + r['outputSource'] = save( + self.outputSource, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'id', u'outputSource', u'type']) + + +class WorkflowStep(Identified, Labeled, Documented, HasStepPosition, ReferencesTool): + """ +Workflow step. + + """ + def __init__( + self, + id, # type: Any + label, # type: Any + doc, # type: Any + position, # type: Any + tool_id, # type: Any + tool_shed_repository, # type: Any + tool_version, # type: Any + in_, # type: Any + out, # type: Any + state, # type: Any + type, # type: Any + run, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.position = position + self.tool_id = tool_id + self.tool_shed_repository = tool_shed_repository + self.tool_version = tool_version + self.in_ = in_ + self.out = out + self.state = state + self.type = type + self.run = run + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStep + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'position' in _doc: + try: + position = load_field(_doc.get( + 'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, 'position', str), + [e] + ) + ) + else: + position = None + if 'tool_id' in _doc: + try: + tool_id = load_field(_doc.get( + 'tool_id'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_id` field is not valid because:", + SourceLine(_doc, 'tool_id', str), + [e] + ) + ) + else: + tool_id = None + if 'tool_shed_repository' in _doc: + try: + tool_shed_repository = load_field(_doc.get( + 'tool_shed_repository'), union_of_None_type_or_ToolShedRepositoryLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_shed_repository` field is not valid because:", + SourceLine(_doc, 'tool_shed_repository', str), + [e] + ) + ) + else: + tool_shed_repository = None + if 'tool_version' in _doc: + try: + tool_version = load_field(_doc.get( + 'tool_version'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_version` field is not valid because:", + SourceLine(_doc, 'tool_version', str), + [e] + ) + ) + else: + tool_version = None + if 'in' in _doc: + try: + in_ = load_field(_doc.get( + 'in'), idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, 'in', str), + [e] + ) + ) + else: + in_ = None + if 'out' in _doc: + try: + out = load_field(_doc.get( + 'out'), uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, 'out', str), + [e] + ) + ) + else: + out = None + if 'state' in _doc: + try: + state = load_field(_doc.get( + 'state'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `state` field is not valid because:", + SourceLine(_doc, 'state', str), + [e] + ) + ) + else: + state = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + if 'run' in _doc: + try: + run = load_field(_doc.get( + 'run'), uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `run` field is not valid because:", + SourceLine(_doc, 'run', str), + [e] + ) + ) + else: + run = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `label`, `doc`, `position`, `tool_id`, `tool_shed_repository`, `tool_version`, `in`, `out`, `state`, `type`, `run`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStep'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, label, doc, position, tool_id, tool_shed_repository, tool_version, in_, out, state, type, run, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.position is not None: + r['position'] = save( + self.position, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_id is not None: + r['tool_id'] = save( + self.tool_id, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_shed_repository is not None: + r['tool_shed_repository'] = save( + self.tool_shed_repository, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_version is not None: + r['tool_version'] = save( + self.tool_version, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.in_ is not None: + r['in'] = save( + self.in_, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.out is not None: + u = save_relative_uri( + self.out, + self.id, + True, + None, + relative_uris) + if u: + r['out'] = u + + if self.state is not None: + r['state'] = save( + self.state, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.run is not None: + u = save_relative_uri( + self.run, + self.id, + False, + None, + relative_uris) + if u: + r['run'] = u + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'label', u'doc', u'position', u'tool_id', u'tool_shed_repository', u'tool_version', u'in', u'out', u'state', u'type', u'run']) + + +class Sink(Savable): + pass + + +class WorkflowStepInput(Identified, Sink, Labeled): + """ +TODO: + + """ + def __init__( + self, + id, # type: Any + source, # type: Any + label, # type: Any + default, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.label = label + self.default = default + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStepInput + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'source' in _doc: + try: + source = load_field(_doc.get( + 'source'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, 'source', str), + [e] + ) + ) + else: + source = None + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'default' in _doc: + try: + default = load_field(_doc.get( + 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, 'default', str), + [e] + ) + ) + else: + default = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `source`, `label`, `default`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStepInput'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, source, label, default, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.source is not None: + u = save_relative_uri( + self.source, + self.id, + False, + 2, + relative_uris) + if u: + r['source'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.default is not None: + r['default'] = save( + self.default, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'source', u'label', u'default']) + + +class WorkflowStepOutput(Identified): + """ +Associate an output parameter of the underlying process with a workflow +parameter. The workflow parameter (given in the `id` field) be may be used +as a `source` to connect with input parameters of other workflow steps, or +with an output parameter of the process. + +A unique identifier for this workflow output parameter. This is +the identifier to use in the `source` field of `WorkflowStepInput` +to connect the output value to downstream parameters. + + """ + def __init__( + self, + id, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStepOutput + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStepOutput'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id']) + + +class GalaxyWorkflow(Process): + """ +This is documentation for a workflow! + """ + def __init__( + self, + id, # type: Any + label, # type: Any + doc, # type: Any + inputs, # type: Any + outputs, # type: Any + steps, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.class_ = "GalaxyWorkflow" + self.steps = steps + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> GalaxyWorkflow + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + + if _doc.get('class') != 'GalaxyWorkflow': + raise ValidationException("Not a GalaxyWorkflow") + + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + inputs = load_field(_doc.get( + 'inputs'), idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, 'inputs', str), + [e] + ) + ) + try: + outputs = load_field(_doc.get( + 'outputs'), idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, 'outputs', str), + [e] + ) + ) + try: + steps = load_field(_doc.get( + 'steps'), idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, 'steps', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `name`, `class`, `steps`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'GalaxyWorkflow'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, label, doc, inputs, outputs, steps, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + r['class'] = 'GalaxyWorkflow' + + if self.id is not None: + u = save_relative_uri( + self.id, + self.name, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.inputs is not None: + r['inputs'] = save( + self.inputs, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.outputs is not None: + r['outputs'] = save( + self.outputs, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.steps is not None: + r['steps'] = save( + self.steps, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'label', u'doc', u'inputs', u'outputs', u'name', u'class', u'steps']) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "File": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File", + "GalaxyType": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType", + "GalaxyWorkflow": "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow", + "HasStepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ReferencesTool": "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool", + "Sink": "https://galaxyproject.org/gxformat2/v19_09#Sink", + "StepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition", + "ToolShedRepository": "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository", + "WorkflowInputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter", + "WorkflowOutputParameter": "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter", + "WorkflowStep": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep", + "WorkflowStepInput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput", + "WorkflowStepOutput": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput", + "WorkflowStepType": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "collection": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection", + "data": "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data", + "double": "http://www.w3.org/2001/XMLSchema#double", + "enum": "https://w3id.org/cwl/salad#enum", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "null": "https://w3id.org/cwl/salad#null", + "pause": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause", + "record": "https://w3id.org/cwl/salad#record", + "string": "http://www.w3.org/2001/XMLSchema#string", + "subworkflow": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow", + "tool": "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File": "File", + "https://galaxyproject.org/gxformat2/v19_09#GalaxyType": "GalaxyType", + "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow": "GalaxyWorkflow", + "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition": "HasStepPosition", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool": "ReferencesTool", + "https://galaxyproject.org/gxformat2/v19_09#Sink": "Sink", + "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition": "StepPosition", + "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository": "ToolShedRepository", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter": "WorkflowInputParameter", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep": "WorkflowStep", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput": "WorkflowStepInput", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput": "WorkflowStepOutput", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType": "WorkflowStepType", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection": "collection", + "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data": "data", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/salad#enum": "enum", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/salad#null": "null", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause": "pause", + "https://w3id.org/cwl/salad#record": "record", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow": "subworkflow", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool": "tool", +} + +inttype = _PrimitiveLoader(int) +booltype = _PrimitiveLoader(bool) +strtype = _PrimitiveLoader((str, text_type)) +Any_type = _AnyLoader() +floattype = _PrimitiveLoader(float) +None_type = _PrimitiveLoader(type(None)) +DocumentedLoader = _RecordLoader(Documented) +PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",)) +AnyLoader = _EnumLoader(("Any",)) +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +LabeledLoader = _RecordLoader(Labeled) +IdentifiedLoader = _RecordLoader(Identified) +ParameterLoader = _RecordLoader(Parameter) +InputParameterLoader = _RecordLoader(InputParameter) +OutputParameterLoader = _RecordLoader(OutputParameter) +ProcessLoader = _RecordLoader(Process) +HasStepPositionLoader = _RecordLoader(HasStepPosition) +StepPositionLoader = _RecordLoader(StepPosition) +ReferencesToolLoader = _RecordLoader(ReferencesTool) +ToolShedRepositoryLoader = _RecordLoader(ToolShedRepository) +GalaxyTypeLoader = _EnumLoader(("File", "data", "collection",)) +WorkflowStepTypeLoader = _EnumLoader(("tool", "subworkflow", "pause",)) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepLoader = _RecordLoader(WorkflowStep) +SinkLoader = _RecordLoader(Sink) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +GalaxyWorkflowLoader = _RecordLoader(GalaxyWorkflow) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,)) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,)) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,)) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,)) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type') +enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",)) +typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",)) +typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2) +enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",)) +typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2) +union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,)) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None) +union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,)) +union_of_WorkflowInputParameterLoader = _UnionLoader((WorkflowInputParameterLoader,)) +array_of_union_of_WorkflowInputParameterLoader = _ArrayLoader(union_of_WorkflowInputParameterLoader) +idmap_inputs_array_of_union_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowInputParameterLoader, 'id', 'type') +union_of_WorkflowOutputParameterLoader = _UnionLoader((WorkflowOutputParameterLoader,)) +array_of_union_of_WorkflowOutputParameterLoader = _ArrayLoader(union_of_WorkflowOutputParameterLoader) +idmap_outputs_array_of_union_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowOutputParameterLoader, 'id', 'type') +union_of_None_type_or_StepPositionLoader = _UnionLoader((None_type, StepPositionLoader,)) +union_of_None_type_or_ToolShedRepositoryLoader = _UnionLoader((None_type, ToolShedRepositoryLoader,)) +union_of_None_type_or_GalaxyTypeLoader = _UnionLoader((None_type, GalaxyTypeLoader,)) +typedsl_union_of_None_type_or_GalaxyTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_GalaxyTypeLoader, 2) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +union_of_None_type_or_array_of_WorkflowStepInputLoader = _UnionLoader((None_type, array_of_WorkflowStepInputLoader,)) +idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader = _IdMapLoader(union_of_None_type_or_array_of_WorkflowStepInputLoader, 'id', 'source') +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((strtype, WorkflowStepOutputLoader,)) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(union_of_strtype_or_WorkflowStepOutputLoader) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _UnionLoader((array_of_union_of_strtype_or_WorkflowStepOutputLoader, None_type,)) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type_True_False_None = _URILoader(union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, True, False, None) +union_of_None_type_or_WorkflowStepTypeLoader = _UnionLoader((None_type, WorkflowStepTypeLoader,)) +typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_WorkflowStepTypeLoader, 2) +union_of_None_type_or_GalaxyWorkflowLoader = _UnionLoader((None_type, GalaxyWorkflowLoader,)) +uri_union_of_None_type_or_GalaxyWorkflowLoader_False_False_None = _URILoader(union_of_None_type_or_GalaxyWorkflowLoader, False, False, None) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_WorkflowInputParameterLoader, 'id', 'type') +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_WorkflowOutputParameterLoader, 'id', 'type') +uri_strtype_False_True_None = _URILoader(strtype, False, True, None) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(union_of_array_of_WorkflowStepLoader, 'id', 'None') +union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader,)) +array_of_union_of_GalaxyWorkflowLoader = _ArrayLoader(union_of_GalaxyWorkflowLoader) +union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader, array_of_union_of_GalaxyWorkflowLoader,)) + + +def load_document(doc, baseuri=None, loadingOptions=None): + # type: (Any, Optional[Text], Optional[LoadingOptions]) -> Any + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, doc, baseuri, loadingOptions) + + +def load_document_by_string(string, uri, loadingOptions=None): + # type: (Any, Text, Optional[LoadingOptions]) -> Any + result = yaml.round_trip_load(string, preserve_quotes=True) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + loadingOptions.idx[uri] = result + + return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, result, uri, loadingOptions) diff --git a/java/.gitignore b/java/.gitignore new file mode 100644 index 0000000..2f7896d --- /dev/null +++ b/java/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/java/README.md b/java/README.md new file mode 100644 index 0000000..106be62 --- /dev/null +++ b/java/README.md @@ -0,0 +1,35 @@ +# org.galaxyproject.gxformat2.v19_09 + +This project contains Java objects and utilities auto-generated by Schema Salad for parsing documents corresponding to the https://galaxyproject.org/gxformat2/v19_09# schema. + +## License + +This project is licensed under the [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0.txt). + +## Contributing + +This project is auto-generated by [Schema Salad](https://github.com/common-workflow-language/schema_salad) +and likely should not be modified directly. Instead consider filing an issue or opening +a pull request against the Schema Salad repository. + +## Requirements + +This Java library requires Java 8+. Building and testing this project requires +[Apache Maven](https://maven.apache.org/) (``mvn``). + +## Usage + +Compile the project, test it, and build a jar + + $ mvn install + $ ls target/ # jar file in here + +Building a standalone jar with all dependencies included and use it to validate a document + + $ mvn assembly:single + $ java -jar target/-0.0.1-SNAPSHOT-jar-with-dependencies.jar ../path/to/document.yml + +Building and viewing JavaDocs + + $ mvn javadoc:javadoc + $ open target/site/apidocs/index.html diff --git a/java/pom.xml b/java/pom.xml new file mode 100644 index 0000000..8d78b6b --- /dev/null +++ b/java/pom.xml @@ -0,0 +1,125 @@ + + + 4.0.0 + + org.galaxyproject.gxformat2.v19_09 + v19_09 + jar + org.galaxyproject.gxformat2.v19_09 + Schema Salad for parsing documents corresponding to the https://galaxyproject.org/gxformat2/v19_09# schema.]]> + 0.0.1-SNAPSHOT + + + 3.7 + + + + + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.0 + + 11 + true + + + + + + + + org.apache.maven.plugins + maven-release-plugin + 2.5.3 + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-surefire-plugin + 2.21.0 + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.1.1 + + + + + org.apache.maven.plugins + maven-jar-plugin + + + src/main/resources/META-INF/MANIFEST.MF + + + + + maven-assembly-plugin + + + + org.galaxyproject.gxformat2.v19_09.utils.Validator + + + + jar-with-dependencies + + + + + com.coveo + fmt-maven-plugin + 2.9 + + + + format + + + + + + + + + + + org.apache.commons + commons-lang3 + ${commonslang.version} + + + + + + + junit + junit + 4.12 + + + org.yaml + snakeyaml + 1.24 + + + diff --git a/java/src/main/java/org/galaxyproject/gxformat2/Format2Linter.java b/java/src/main/java/org/galaxyproject/gxformat2/Format2Linter.java new file mode 100644 index 0000000..20d35b3 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/Format2Linter.java @@ -0,0 +1,16 @@ +package org.galaxyproject.gxformat2; + +import java.util.Map; +import org.galaxyproject.gxformat2.v19_09.utils.RootLoader; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public class Format2Linter implements GalaxyWorkflowLinter { + public int lint(final Map workflow) { + try { + RootLoader.loadDocument(workflow); + } catch (ValidationException e) { + return EXIT_CODE_FORMAT_ERROR; + } + return EXIT_CODE_SUCCESS; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/GalaxyWorkflowLinter.java b/java/src/main/java/org/galaxyproject/gxformat2/GalaxyWorkflowLinter.java new file mode 100644 index 0000000..b4e9d74 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/GalaxyWorkflowLinter.java @@ -0,0 +1,12 @@ +package org.galaxyproject.gxformat2; + +import java.util.Map; + +public interface GalaxyWorkflowLinter { + public static int EXIT_CODE_SUCCESS = 0; + public static int EXIT_CODE_LINT_FAILED = 1; + public static int EXIT_CODE_FORMAT_ERROR = 2; + public static int EXIT_CODE_FILE_PARSE_FAILED = 3; + + public int lint(final Map workflow); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/Lint.java b/java/src/main/java/org/galaxyproject/gxformat2/Lint.java new file mode 100644 index 0000000..447700b --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/Lint.java @@ -0,0 +1,31 @@ +package org.galaxyproject.gxformat2; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; +import org.yaml.snakeyaml.Yaml; + +public class Lint { + public static void main(String[] args) throws Exception { + final int exitCode = lint(args); + System.exit(exitCode); + } + + public static int lint(final String[] args) throws Exception { + + final Path path = Paths.get(args[0]); + final String workflowContents = new String(Files.readAllBytes(path), "UTF8"); + final Yaml yaml = new Yaml(); + final Map object = (Map) yaml.load(workflowContents); + final String wfClass = (String) object.get("class"); + GalaxyWorkflowLinter linter; + if (wfClass != null && wfClass.equals("GalaxyWorkflow")) { + linter = new Format2Linter(); + } else { + linter = new NativeLinter(); + } + final int exitCode = linter.lint(object); + return exitCode; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/NativeLinter.java b/java/src/main/java/org/galaxyproject/gxformat2/NativeLinter.java new file mode 100644 index 0000000..d6235d3 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/NativeLinter.java @@ -0,0 +1,60 @@ +package org.galaxyproject.gxformat2; + +import java.util.List; +import java.util.Map; + +public class NativeLinter implements GalaxyWorkflowLinter { + public int lint(final Map workflow) { + if (!"0.1".equals(workflow.get("format-version"))) { + return EXIT_CODE_FORMAT_ERROR; + } + if (!"true".equals(workflow.get("a_galaxy_workflow"))) { + return EXIT_CODE_FORMAT_ERROR; + } + + Object nativeSteps = workflow.get("steps"); + if (!(nativeSteps instanceof Map)) { + return EXIT_CODE_FORMAT_ERROR; + } + boolean foundOutputs = false; + boolean foundOutputsWithoutLabel = false; + Map steps = (Map) nativeSteps; + for (Map.Entry stepEntry : steps.entrySet()) { + final String orderIndexStr = stepEntry.getKey(); + try { + final int orderIndex = Integer.parseInt(orderIndexStr); + } catch (NumberFormatException e) { + return EXIT_CODE_FORMAT_ERROR; + } + if (!(stepEntry.getValue() instanceof Map)) { + return EXIT_CODE_FORMAT_ERROR; + } + Map step = (Map) stepEntry.getValue(); + Object workflowOutputsObject = step.get("workflow_outputs"); + if (workflowOutputsObject != null) { + if (!(workflowOutputsObject instanceof List)) { + return EXIT_CODE_FORMAT_ERROR; + } + final List workflowOutputs = (List) workflowOutputsObject; + for (Object workflowOutputObject : workflowOutputs) { + foundOutputs = true; + if (!(workflowOutputObject instanceof Map)) { + return EXIT_CODE_FORMAT_ERROR; + } + final Map workflowOutput = (Map) workflowOutputObject; + final String label = workflowOutput.get("label"); + if (label == null || label.length() == 0) { + foundOutputsWithoutLabel = true; + } + } + } + } + if (!foundOutputs) { + return EXIT_CODE_LINT_FAILED; + } + if (foundOutputsWithoutLabel) { + return EXIT_CODE_LINT_FAILED; + } + return EXIT_CODE_SUCCESS; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Any.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Any.java new file mode 100644 index 0000000..5c82828 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Any.java @@ -0,0 +1,23 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum Any { + ANY("Any"); + + private static String[] symbols = new String[] {"Any"}; + private String docVal; + + private Any(final String docVal) { + this.docVal = docVal; + } + + public static Any fromDocumentVal(final String docVal) { + for (final Any val : Any.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException(String.format("Expected one of %s", Any.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchema.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchema.java new file mode 100644 index 0000000..bc5cb57 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchema.java @@ -0,0 +1,30 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/salad#ArraySchema
+ * This interface is implemented by {@link ArraySchemaImpl}
+ */ +public interface ArraySchema extends Savable { + /** + * Getter for property https://w3id.org/cwl/salad#items
+ * + *
+ * + * Defines the type of the array elements. * + * + *
+ */ + Object getItems(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `array` * + * + *
+ */ + enum_d062602be0b4b8fd33e69e29a841317b6ab665bc getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchemaImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchemaImpl.java new file mode 100644 index 0000000..31b6708 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ArraySchemaImpl.java @@ -0,0 +1,102 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** Auto-generated class implementation for https://w3id.org/cwl/salad#ArraySchema
*/ +public class ArraySchemaImpl extends SavableImpl implements ArraySchema { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private Object items; + + /** + * Getter for property https://w3id.org/cwl/salad#items
+ * + *
+ * + * Defines the type of the array elements. * + * + *
+ */ + public Object getItems() { + return this.items; + } + + private enum_d062602be0b4b8fd33e69e29a841317b6ab665bc type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `array` * + * + *
+ */ + public enum_d062602be0b4b8fd33e69e29a841317b6ab665bc getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * ArraySchemaImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public ArraySchemaImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("ArraySchemaImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + Object items; + try { + items = + LoaderInstances + .uri_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_False_True_2 + .loadField(__doc.get("items"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + items = null; // won't be used but prevents compiler from complaining. + final String __message = "the `items` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + enum_d062602be0b4b8fd33e69e29a841317b6ab665bc type; + try { + type = + LoaderInstances.typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bc_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.items = (Object) items; + this.type = (enum_d062602be0b4b8fd33e69e29a841317b6ab665bc) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Documented.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Documented.java new file mode 100644 index 0000000..361ab24 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Documented.java @@ -0,0 +1,17 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://w3id.org/cwl/salad#Documented
*/ +public interface Documented extends Savable { + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchema.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchema.java new file mode 100644 index 0000000..cba8288 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchema.java @@ -0,0 +1,36 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/salad#EnumSchema
+ * This interface is implemented by {@link EnumSchemaImpl}
+ * + *
+ * + * Define an enumerated type. + * + *
+ */ +public interface EnumSchema extends Savable { + /** + * Getter for property https://w3id.org/cwl/salad#symbols
+ * + *
+ * + * Defines the set of valid symbols. * + * + *
+ */ + java.util.List getSymbols(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `enum` * + * + *
+ */ + enum_d961d79c225752b9fadb617367615ab176b47d77 getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchemaImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchemaImpl.java new file mode 100644 index 0000000..73e4c21 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/EnumSchemaImpl.java @@ -0,0 +1,109 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for https://w3id.org/cwl/salad#EnumSchema
+ * + *
+ * + * Define an enumerated type. + * + *
+ */ +public class EnumSchemaImpl extends SavableImpl implements EnumSchema { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.List symbols; + + /** + * Getter for property https://w3id.org/cwl/salad#symbols
+ * + *
+ * + * Defines the set of valid symbols. * + * + *
+ */ + public java.util.List getSymbols() { + return this.symbols; + } + + private enum_d961d79c225752b9fadb617367615ab176b47d77 type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `enum` * + * + *
+ */ + public enum_d961d79c225752b9fadb617367615ab176b47d77 getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * EnumSchemaImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public EnumSchemaImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("EnumSchemaImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.List symbols; + try { + symbols = + LoaderInstances.uri_array_of_StringInstance_True_False_None.loadField( + __doc.get("symbols"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + symbols = null; // won't be used but prevents compiler from complaining. + final String __message = "the `symbols` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + enum_d961d79c225752b9fadb617367615ab176b47d77 type; + try { + type = + LoaderInstances.typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.symbols = (java.util.List) symbols; + this.type = (enum_d961d79c225752b9fadb617367615ab176b47d77) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyType.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyType.java new file mode 100644 index 0000000..4a5b09f --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyType.java @@ -0,0 +1,25 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum GalaxyType { + FILE("File"), + DATA("data"), + COLLECTION("collection"); + + private static String[] symbols = new String[] {"File", "data", "collection"}; + private String docVal; + + private GalaxyType(final String docVal) { + this.docVal = docVal; + } + + public static GalaxyType fromDocumentVal(final String docVal) { + for (final GalaxyType val : GalaxyType.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException(String.format("Expected one of %s", GalaxyType.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflow.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflow.java new file mode 100644 index 0000000..9a2621f --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflow.java @@ -0,0 +1,90 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow
+ * This interface is implemented by {@link GalaxyWorkflowImpl}
+ * + *
+ * + * This is documentation for a workflow! + * + *
+ */ +public interface GalaxyWorkflow extends Process, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + java.util.Optional getLabel(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property https://w3id.org/cwl/cwl#inputs
+ * + *
+ * + * Defines the input parameters of the process. The process is ready to run when all required + * input parameters are associated with concrete values. Input parameters include a schema for + * each parameter which is used to validate the input object. It may also be used to build a user + * interface for constructing the input object. + * + *

When accepting an input object, all input parameters must have a value. If an input + * parameter is missing from the input object, it must be assigned a value of `null` (or the value + * of `default` for that parameter, if provided) for the purposes of validation and evaluation of + * expressions. * + * + *

+ */ + java.util.List getInputs(); + /** + * Getter for property https://w3id.org/cwl/cwl#outputs
+ * + *
+ * + * Defines the parameters representing the output of the process. May be used to generate and/or + * validate the output object. * + * + *
+ */ + java.util.List getOutputs(); + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow/class
+ */ + String getClass_(); + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow/steps
+ * + *
+ * + * The individual steps that make up the workflow. Each step is executed when all of its input + * data links are fulfilled. * + * + *
+ */ + java.util.List getSteps(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflowImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflowImpl.java new file mode 100644 index 0000000..b6a2192 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/GalaxyWorkflowImpl.java @@ -0,0 +1,271 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow
+ * + *
+ * + * This is documentation for a workflow! + * + *
+ */ +public class GalaxyWorkflowImpl extends SavableImpl implements GalaxyWorkflow { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + private java.util.Optional label; + + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + public java.util.Optional getLabel() { + return this.label; + } + + private Object doc; + + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + public Object getDoc() { + return this.doc; + } + + private java.util.List inputs; + + /** + * Getter for property https://w3id.org/cwl/cwl#inputs
+ * + *
+ * + * Defines the input parameters of the process. The process is ready to run when all required + * input parameters are associated with concrete values. Input parameters include a schema for + * each parameter which is used to validate the input object. It may also be used to build a user + * interface for constructing the input object. + * + *

When accepting an input object, all input parameters must have a value. If an input + * parameter is missing from the input object, it must be assigned a value of `null` (or the value + * of `default` for that parameter, if provided) for the purposes of validation and evaluation of + * expressions. * + * + *

+ */ + public java.util.List getInputs() { + return this.inputs; + } + + private java.util.List outputs; + + /** + * Getter for property https://w3id.org/cwl/cwl#outputs
+ * + *
+ * + * Defines the parameters representing the output of the process. May be used to generate and/or + * validate the output object. * + * + *
+ */ + public java.util.List getOutputs() { + return this.outputs; + } + + private String class_; + + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow/class
+ */ + public String getClass_() { + return this.class_; + } + + private java.util.List steps; + + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow/steps
+ * + *
+ * + * The individual steps that make up the workflow. Each step is executed when all of its input + * data links are fulfilled. * + * + *
+ */ + public java.util.List getSteps() { + return this.steps; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * GalaxyWorkflowImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public GalaxyWorkflowImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("GalaxyWorkflowImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + java.util.Optional label; + + if (__doc.containsKey("label")) { + try { + label = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("label"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + label = null; // won't be used but prevents compiler from complaining. + final String __message = "the `label` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + label = null; + } + Object doc; + + if (__doc.containsKey("doc")) { + try { + doc = + LoaderInstances.union_of_NullInstance_or_StringInstance_or_array_of_StringInstance + .loadField(__doc.get("doc"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + doc = null; // won't be used but prevents compiler from complaining. + final String __message = "the `doc` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + doc = null; + } + java.util.List inputs; + try { + inputs = + LoaderInstances.idmap_inputs_array_of_WorkflowInputParameter.loadField( + __doc.get("inputs"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + inputs = null; // won't be used but prevents compiler from complaining. + final String __message = "the `inputs` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + java.util.List outputs; + try { + outputs = + LoaderInstances.idmap_outputs_array_of_WorkflowOutputParameter.loadField( + __doc.get("outputs"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + outputs = null; // won't be used but prevents compiler from complaining. + final String __message = "the `outputs` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + String class_; + try { + class_ = + LoaderInstances.uri_StringInstance_False_True_None.loadField( + __doc.get("class"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + class_ = null; // won't be used but prevents compiler from complaining. + final String __message = "the `class` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + java.util.List steps; + try { + steps = + LoaderInstances.idmap_steps_array_of_WorkflowStep.loadField( + __doc.get("steps"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + steps = null; // won't be used but prevents compiler from complaining. + final String __message = "the `steps` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.id = (java.util.Optional) id; + this.label = (java.util.Optional) label; + this.doc = (Object) doc; + this.inputs = (java.util.List) inputs; + this.outputs = (java.util.List) outputs; + this.class_ = (String) class_; + this.steps = (java.util.List) steps; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/HasStepPosition.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/HasStepPosition.java new file mode 100644 index 0000000..893e10d --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/HasStepPosition.java @@ -0,0 +1,15 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition
+ */ +public interface HasStepPosition extends Savable { + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition/position
+ */ + java.util.Optional getPosition(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Identified.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Identified.java new file mode 100644 index 0000000..32399f2 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Identified.java @@ -0,0 +1,6 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://w3id.org/cwl/cwl#Identified
*/ +public interface Identified extends Savable {} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/InputParameter.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/InputParameter.java new file mode 100644 index 0000000..fc2b630 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/InputParameter.java @@ -0,0 +1,29 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://w3id.org/cwl/cwl#InputParameter
*/ +public interface InputParameter extends Parameter, Savable { + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property https://w3id.org/cwl/salad#default
+ * + *
+ * + * The default value to use for this parameter if the parameter is missing from the input object, + * or if the value of the parameter in the input object is `null`. Default values are applied + * before evaluating expressions (e.g. dependent `valueFrom` fields). * + * + *
+ */ + java.util.Optional getDefault(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Labeled.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Labeled.java new file mode 100644 index 0000000..bd1665b --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Labeled.java @@ -0,0 +1,17 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://w3id.org/cwl/cwl#Labeled
*/ +public interface Labeled extends Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + java.util.Optional getLabel(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/OutputParameter.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/OutputParameter.java new file mode 100644 index 0000000..f7099fb --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/OutputParameter.java @@ -0,0 +1,17 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://w3id.org/cwl/cwl#OutputParameter
*/ +public interface OutputParameter extends Parameter, Savable { + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Parameter.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Parameter.java new file mode 100644 index 0000000..b512ccb --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Parameter.java @@ -0,0 +1,25 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/cwl#Parameter
+ * + *
+ * + * Define an input or output parameter to a process. + * + *
+ */ +public interface Parameter extends Documented, Identified, Savable { + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/PrimitiveType.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/PrimitiveType.java new file mode 100644 index 0000000..8538e20 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/PrimitiveType.java @@ -0,0 +1,31 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum PrimitiveType { + NULL("null"), + BOOLEAN("boolean"), + INT("int"), + LONG("long"), + FLOAT("float"), + DOUBLE("double"), + STRING("string"); + + private static String[] symbols = + new String[] {"null", "boolean", "int", "long", "float", "double", "string"}; + private String docVal; + + private PrimitiveType(final String docVal) { + this.docVal = docVal; + } + + public static PrimitiveType fromDocumentVal(final String docVal) { + for (final PrimitiveType val : PrimitiveType.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException( + String.format("Expected one of %s", PrimitiveType.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Process.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Process.java new file mode 100644 index 0000000..ce97934 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Process.java @@ -0,0 +1,65 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/cwl#Process
+ * + *
+ * + * The base executable type in CWL is the `Process` object defined by the document. Note that the + * `Process` object is abstract and cannot be directly executed. + * + *
+ */ +public interface Process extends Identified, Labeled, Documented, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + java.util.Optional getLabel(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property https://w3id.org/cwl/cwl#inputs
+ * + *
+ * + * Defines the input parameters of the process. The process is ready to run when all required + * input parameters are associated with concrete values. Input parameters include a schema for + * each parameter which is used to validate the input object. It may also be used to build a user + * interface for constructing the input object. + * + *

When accepting an input object, all input parameters must have a value. If an input + * parameter is missing from the input object, it must be assigned a value of `null` (or the value + * of `default` for that parameter, if provided) for the purposes of validation and evaluation of + * expressions. * + * + *

+ */ + java.util.List getInputs(); + /** + * Getter for property https://w3id.org/cwl/cwl#outputs
+ * + *
+ * + * Defines the parameters representing the output of the process. May be used to generate and/or + * validate the output object. * + * + *
+ */ + java.util.List getOutputs(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordField.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordField.java new file mode 100644 index 0000000..30f351e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordField.java @@ -0,0 +1,46 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/salad#RecordField
+ * This interface is implemented by {@link RecordFieldImpl}
+ * + *
+ * + * A field of a record. + * + *
+ */ +public interface RecordField extends Documented, Savable { + /** + * Getter for property https://w3id.org/cwl/salad#RecordField/name
+ * + *
+ * + * The name of the field * + * + *
+ */ + String getName(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * The field type * + * + *
+ */ + Object getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordFieldImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordFieldImpl.java new file mode 100644 index 0000000..7904e97 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordFieldImpl.java @@ -0,0 +1,157 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for https://w3id.org/cwl/salad#RecordField
+ * + *
+ * + * A field of a record. + * + *
+ */ +public class RecordFieldImpl extends SavableImpl implements RecordField { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private String name; + + /** + * Getter for property https://w3id.org/cwl/salad#RecordField/name
+ * + *
+ * + * The name of the field * + * + *
+ */ + public String getName() { + return this.name; + } + + private Object doc; + + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + public Object getDoc() { + return this.doc; + } + + private Object type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * The field type * + * + *
+ */ + public Object getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * RecordFieldImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public RecordFieldImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("RecordFieldImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + String name; + + if (__doc.containsKey("name")) { + try { + name = + LoaderInstances.uri_StringInstance_True_False_None.loadField( + __doc.get("name"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + name = null; // won't be used but prevents compiler from complaining. + final String __message = "the `name` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + name = null; + } + + if (name == null) { + if (__docRoot != null) { + name = __docRoot; + } else { + throw new ValidationException("Missing name"); + } + } + __baseUri = (String) name; + Object doc; + + if (__doc.containsKey("doc")) { + try { + doc = + LoaderInstances.union_of_NullInstance_or_StringInstance_or_array_of_StringInstance + .loadField(__doc.get("doc"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + doc = null; // won't be used but prevents compiler from complaining. + final String __message = "the `doc` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + doc = null; + } + Object type; + try { + type = + LoaderInstances + .typedsl_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_2 + .loadField(__doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.doc = (Object) doc; + this.name = (String) name; + this.type = (Object) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchema.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchema.java new file mode 100644 index 0000000..f03cd25 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchema.java @@ -0,0 +1,30 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://w3id.org/cwl/salad#RecordSchema
+ * This interface is implemented by {@link RecordSchemaImpl}
+ */ +public interface RecordSchema extends Savable { + /** + * Getter for property https://w3id.org/cwl/salad#fields
+ * + *
+ * + * Defines the fields of the record. * + * + *
+ */ + java.util.Optional> getFields(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `record` * + * + *
+ */ + enum_d9cba076fca539106791a4f46d198c7fcfbdb779 getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchemaImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchemaImpl.java new file mode 100644 index 0000000..0f590d3 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/RecordSchemaImpl.java @@ -0,0 +1,107 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** Auto-generated class implementation for https://w3id.org/cwl/salad#RecordSchema
*/ +public class RecordSchemaImpl extends SavableImpl implements RecordSchema { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional> fields; + + /** + * Getter for property https://w3id.org/cwl/salad#fields
+ * + *
+ * + * Defines the fields of the record. * + * + *
+ */ + public java.util.Optional> getFields() { + return this.fields; + } + + private enum_d9cba076fca539106791a4f46d198c7fcfbdb779 type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Must be `record` * + * + *
+ */ + public enum_d9cba076fca539106791a4f46d198c7fcfbdb779 getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * RecordSchemaImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public RecordSchemaImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("RecordSchemaImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional> fields; + + if (__doc.containsKey("fields")) { + try { + fields = + LoaderInstances.idmap_fields_optional_array_of_RecordField.loadField( + __doc.get("fields"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + fields = null; // won't be used but prevents compiler from complaining. + final String __message = "the `fields` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + fields = null; + } + enum_d9cba076fca539106791a4f46d198c7fcfbdb779 type; + try { + type = + LoaderInstances.typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.fields = (java.util.Optional>) fields; + this.type = (enum_d9cba076fca539106791a4f46d198c7fcfbdb779) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ReferencesTool.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ReferencesTool.java new file mode 100644 index 0000000..600be3e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ReferencesTool.java @@ -0,0 +1,46 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool
+ */ +public interface ReferencesTool extends Savable { + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_id
+ * + *
+ * + * The tool ID used to run this step of the workflow (e.g. 'cat1' or + * 'toolshed.g2.bx.psu.edu/repos/nml/collapse_collections/collapse_dataset/4.0'). * + * + *
+ */ + java.util.Optional getTool_id(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_shed_repository + *
+ * + *
+ * + * The Galaxy Tool Shed repository that should be installed in order to use this tool. * + * + *
+ */ + java.util.Optional getTool_shed_repository(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_version
+ * + *
+ * + * The tool version corresponding used to run this step of the workflow. For tool shed installed + * tools, the ID generally uniquely specifies a version and this field is optional. * + * + *
+ */ + java.util.Optional getTool_version(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Sink.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Sink.java new file mode 100644 index 0000000..223be75 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/Sink.java @@ -0,0 +1,18 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** Auto-generated interface for https://galaxyproject.org/gxformat2/v19_09#Sink
*/ +public interface Sink extends Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#source
+ * + *
+ * + * Specifies one or more workflow parameters that will provide input to the underlying step + * parameter. * + * + *
+ */ + Object getSource(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPosition.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPosition.java new file mode 100644 index 0000000..291280d --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPosition.java @@ -0,0 +1,39 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/gxformat2common#StepPosition
+ * This interface is implemented by {@link StepPositionImpl}
+ * + *
+ * + * This field specifies the location of the step's node when rendered in the workflow editor. + * + *
+ */ +public interface StepPosition extends Savable { + /** + * Getter for property https://galaxyproject.org/gxformat2/gxformat2common#StepPosition/top + *
+ * + *
+ * + * Relative vertical position of the step's node when rendered in the workflow editor. * + * + *
+ */ + Float getTop(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#StepPosition/left
+ * + *
+ * + * Relative horizontal position of the step's node when rendered in the workflow editor. * + * + *
+ */ + Float getLeft(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPositionImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPositionImpl.java new file mode 100644 index 0000000..c219af5 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/StepPositionImpl.java @@ -0,0 +1,109 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/gxformat2common#StepPosition
+ * + *
+ * + * This field specifies the location of the step's node when rendered in the workflow editor. + * + *
+ */ +public class StepPositionImpl extends SavableImpl implements StepPosition { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private Float top; + + /** + * Getter for property https://galaxyproject.org/gxformat2/gxformat2common#StepPosition/top + *
+ * + *
+ * + * Relative vertical position of the step's node when rendered in the workflow editor. * + * + *
+ */ + public Float getTop() { + return this.top; + } + + private Float left; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#StepPosition/left
+ * + *
+ * + * Relative horizontal position of the step's node when rendered in the workflow editor. * + * + *
+ */ + public Float getLeft() { + return this.left; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * StepPositionImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public StepPositionImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("StepPositionImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + Float top; + try { + top = LoaderInstances.FloatInstance.loadField(__doc.get("top"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + top = null; // won't be used but prevents compiler from complaining. + final String __message = "the `top` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + Float left; + try { + left = + LoaderInstances.FloatInstance.loadField(__doc.get("left"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + left = null; // won't be used but prevents compiler from complaining. + final String __message = "the `left` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.top = (Float) top; + this.left = (Float) left; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepository.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepository.java new file mode 100644 index 0000000..d21f112 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepository.java @@ -0,0 +1,57 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository
+ * This interface is implemented by {@link ToolShedRepositoryImpl}
+ */ +public interface ToolShedRepository extends Savable { + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/name
+ * + *
+ * + * The name of the tool shed repository this tool can be found in. * + * + *
+ */ + String getName(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/changeset_revision + *
+ * + *
+ * + * The revision of the tool shed repository this tool can be found in. * + * + *
+ */ + String getChangeset_revision(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/owner
+ * + *
+ * + * The owner of the tool shed repository this tool can be found in. * + * + *
+ */ + String getOwner(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/tool_shed
+ * + *
+ * + * The URI of the tool shed containing the repository this tool can be found in - typically this + * should be toolshed.g2.bx.psu.edu. * + * + *
+ */ + String getTool_shed(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepositoryImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepositoryImpl.java new file mode 100644 index 0000000..f55bb78 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/ToolShedRepositoryImpl.java @@ -0,0 +1,176 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository
+ */ +public class ToolShedRepositoryImpl extends SavableImpl implements ToolShedRepository { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private String name; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/name
+ * + *
+ * + * The name of the tool shed repository this tool can be found in. * + * + *
+ */ + public String getName() { + return this.name; + } + + private String changeset_revision; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/changeset_revision + *
+ * + *
+ * + * The revision of the tool shed repository this tool can be found in. * + * + *
+ */ + public String getChangeset_revision() { + return this.changeset_revision; + } + + private String owner; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/owner
+ * + *
+ * + * The owner of the tool shed repository this tool can be found in. * + * + *
+ */ + public String getOwner() { + return this.owner; + } + + private String tool_shed; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository/tool_shed
+ * + *
+ * + * The URI of the tool shed containing the repository this tool can be found in - typically this + * should be toolshed.g2.bx.psu.edu. * + * + *
+ */ + public String getTool_shed() { + return this.tool_shed; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * ToolShedRepositoryImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public ToolShedRepositoryImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("ToolShedRepositoryImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + String name; + + if (__doc.containsKey("name")) { + try { + name = + LoaderInstances.uri_StringInstance_True_False_None.loadField( + __doc.get("name"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + name = null; // won't be used but prevents compiler from complaining. + final String __message = "the `name` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + name = null; + } + + if (name == null) { + if (__docRoot != null) { + name = __docRoot; + } else { + throw new ValidationException("Missing name"); + } + } + __baseUri = (String) name; + String changeset_revision; + try { + changeset_revision = + LoaderInstances.StringInstance.loadField( + __doc.get("changeset_revision"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + changeset_revision = null; // won't be used but prevents compiler from complaining. + final String __message = "the `changeset_revision` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + String owner; + try { + owner = + LoaderInstances.StringInstance.loadField(__doc.get("owner"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + owner = null; // won't be used but prevents compiler from complaining. + final String __message = "the `owner` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + String tool_shed; + try { + tool_shed = + LoaderInstances.StringInstance.loadField( + __doc.get("tool_shed"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + tool_shed = null; // won't be used but prevents compiler from complaining. + final String __message = "the `tool_shed` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.changeset_revision = (String) changeset_revision; + this.name = (String) name; + this.owner = (String) owner; + this.tool_shed = (String) tool_shed; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameter.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameter.java new file mode 100644 index 0000000..a30bca0 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameter.java @@ -0,0 +1,53 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter
+ * This interface is implemented by {@link WorkflowInputParameterImpl}
+ */ +public interface WorkflowInputParameter extends InputParameter, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property https://w3id.org/cwl/salad#default
+ * + *
+ * + * The default value to use for this parameter if the parameter is missing from the input object, + * or if the value of the parameter in the input object is `null`. Default values are applied + * before evaluating expressions (e.g. dependent `valueFrom` fields). * + * + *
+ */ + java.util.Optional getDefault(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Specify valid types of data that may be assigned to this parameter. * + * + *
+ */ + java.util.Optional getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameterImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameterImpl.java new file mode 100644 index 0000000..d19baf5 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowInputParameterImpl.java @@ -0,0 +1,191 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter
+ */ +public class WorkflowInputParameterImpl extends SavableImpl implements WorkflowInputParameter { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + private Object doc; + + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + public Object getDoc() { + return this.doc; + } + + private java.util.Optional default_; + + /** + * Getter for property https://w3id.org/cwl/salad#default
+ * + *
+ * + * The default value to use for this parameter if the parameter is missing from the input object, + * or if the value of the parameter in the input object is `null`. Default values are applied + * before evaluating expressions (e.g. dependent `valueFrom` fields). * + * + *
+ */ + public java.util.Optional getDefault() { + return this.default_; + } + + private java.util.Optional type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Specify valid types of data that may be assigned to this parameter. * + * + *
+ */ + public java.util.Optional getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * WorkflowInputParameterImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public WorkflowInputParameterImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("WorkflowInputParameterImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + Object doc; + + if (__doc.containsKey("doc")) { + try { + doc = + LoaderInstances.union_of_NullInstance_or_StringInstance_or_array_of_StringInstance + .loadField(__doc.get("doc"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + doc = null; // won't be used but prevents compiler from complaining. + final String __message = "the `doc` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + doc = null; + } + java.util.Optional default_; + + if (__doc.containsKey("default")) { + try { + default_ = + LoaderInstances.optional_AnyInstance.loadField( + __doc.get("default"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + default_ = null; // won't be used but prevents compiler from complaining. + final String __message = "the `default` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + default_ = null; + } + java.util.Optional type; + + if (__doc.containsKey("type")) { + try { + type = + LoaderInstances.typedsl_optional_GalaxyType_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + type = null; + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.doc = (Object) doc; + this.id = (java.util.Optional) id; + this.default_ = (java.util.Optional) default_; + this.type = (java.util.Optional) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameter.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameter.java new file mode 100644 index 0000000..9a4ff9e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameter.java @@ -0,0 +1,60 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter
+ * This interface is implemented by {@link WorkflowOutputParameterImpl}
+ * + *
+ * + * Describe an output parameter of a workflow. The parameter must be connected to one parameter + * defined in the workflow that will provide the value of the output parameter. It is legal to + * connect a WorkflowInputParameter to a WorkflowOutputParameter. + * + *
+ */ +public interface WorkflowOutputParameter extends OutputParameter, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter/outputSource
+ * + *
+ * + * Specifies workflow parameter that supply the value of to the output parameter. * + * + *
+ */ + java.util.Optional getOutputSource(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Specify valid types of data that may be assigned to this parameter. * + * + *
+ */ + java.util.Optional getType(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameterImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameterImpl.java new file mode 100644 index 0000000..e318cea --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowOutputParameterImpl.java @@ -0,0 +1,198 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter
+ * + *
+ * + * Describe an output parameter of a workflow. The parameter must be connected to one parameter + * defined in the workflow that will provide the value of the output parameter. It is legal to + * connect a WorkflowInputParameter to a WorkflowOutputParameter. + * + *
+ */ +public class WorkflowOutputParameterImpl extends SavableImpl implements WorkflowOutputParameter { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + private Object doc; + + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + public Object getDoc() { + return this.doc; + } + + private java.util.Optional outputSource; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter/outputSource
+ * + *
+ * + * Specifies workflow parameter that supply the value of to the output parameter. * + * + *
+ */ + public java.util.Optional getOutputSource() { + return this.outputSource; + } + + private java.util.Optional type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Specify valid types of data that may be assigned to this parameter. * + * + *
+ */ + public java.util.Optional getType() { + return this.type; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * WorkflowOutputParameterImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public WorkflowOutputParameterImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("WorkflowOutputParameterImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + Object doc; + + if (__doc.containsKey("doc")) { + try { + doc = + LoaderInstances.union_of_NullInstance_or_StringInstance_or_array_of_StringInstance + .loadField(__doc.get("doc"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + doc = null; // won't be used but prevents compiler from complaining. + final String __message = "the `doc` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + doc = null; + } + java.util.Optional outputSource; + + if (__doc.containsKey("outputSource")) { + try { + outputSource = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("outputSource"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + outputSource = null; // won't be used but prevents compiler from complaining. + final String __message = "the `outputSource` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + outputSource = null; + } + java.util.Optional type; + + if (__doc.containsKey("type")) { + try { + type = + LoaderInstances.typedsl_optional_GalaxyType_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + type = null; + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.doc = (Object) doc; + this.id = (java.util.Optional) id; + this.outputSource = (java.util.Optional) outputSource; + this.type = (java.util.Optional) type; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStep.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStep.java new file mode 100644 index 0000000..892c06a --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStep.java @@ -0,0 +1,142 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://galaxyproject.org/gxformat2/v19_09#WorkflowStep
+ * This interface is implemented by {@link WorkflowStepImpl}
+ * + *
+ * + * Workflow step. + * + *
+ */ +public interface WorkflowStep + extends Identified, Labeled, Documented, HasStepPosition, ReferencesTool, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + java.util.Optional getLabel(); + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + Object getDoc(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition/position
+ */ + java.util.Optional getPosition(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_id
+ * + *
+ * + * The tool ID used to run this step of the workflow (e.g. 'cat1' or + * 'toolshed.g2.bx.psu.edu/repos/nml/collapse_collections/collapse_dataset/4.0'). * + * + *
+ */ + java.util.Optional getTool_id(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_shed_repository + *
+ * + *
+ * + * The Galaxy Tool Shed repository that should be installed in order to use this tool. * + * + *
+ */ + java.util.Optional getTool_shed_repository(); + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_version
+ * + *
+ * + * The tool version corresponding used to run this step of the workflow. For tool shed installed + * tools, the ID generally uniquely specifies a version and this field is optional. * + * + *
+ */ + java.util.Optional getTool_version(); + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#in
+ * + *
+ * + * Defines the input parameters of the workflow step. The process is ready to run when all + * required input parameters are associated with concrete values. Input parameters include a + * schema for each parameter which is used to validate the input object. It may also be used build + * a user interface for constructing the input object. * + * + *
+ */ + java.util.Optional> getIn(); + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#out
+ * + *
+ * + * Defines the parameters representing the output of the process. May be used to generate and/or + * validate the output object. * + * + *
+ */ + java.util.Optional> getOut(); + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#WorkflowStep/state
+ * + *
+ * + * Structured tool state. * + * + *
+ */ + java.util.Optional getState(); + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Workflow step module's type (defaults to 'tool'). * + * + *
+ */ + java.util.Optional getType(); + /** + * Getter for property https://w3id.org/cwl/cwl#run
+ * + *
+ * + * Specifies a subworkflow to run. * + * + *
+ */ + java.util.Optional getRun(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepImpl.java new file mode 100644 index 0000000..11bd3af --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepImpl.java @@ -0,0 +1,457 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowStep
+ * + *
+ * + * Workflow step. + * + *
+ */ +public class WorkflowStepImpl extends SavableImpl implements WorkflowStep { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + private java.util.Optional label; + + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + public java.util.Optional getLabel() { + return this.label; + } + + private Object doc; + + /** + * Getter for property https://w3id.org/cwl/salad#Documented/doc
+ * + *
+ * + * A documentation string for this object, or an array of strings which should be concatenated. * + * + *
+ */ + public Object getDoc() { + return this.doc; + } + + private java.util.Optional position; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition/position
+ */ + public java.util.Optional getPosition() { + return this.position; + } + + private java.util.Optional tool_id; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_id
+ * + *
+ * + * The tool ID used to run this step of the workflow (e.g. 'cat1' or + * 'toolshed.g2.bx.psu.edu/repos/nml/collapse_collections/collapse_dataset/4.0'). * + * + *
+ */ + public java.util.Optional getTool_id() { + return this.tool_id; + } + + private java.util.Optional tool_shed_repository; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_shed_repository + *
+ * + *
+ * + * The Galaxy Tool Shed repository that should be installed in order to use this tool. * + * + *
+ */ + public java.util.Optional getTool_shed_repository() { + return this.tool_shed_repository; + } + + private java.util.Optional tool_version; + + /** + * Getter for property + * https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool/tool_version
+ * + *
+ * + * The tool version corresponding used to run this step of the workflow. For tool shed installed + * tools, the ID generally uniquely specifies a version and this field is optional. * + * + *
+ */ + public java.util.Optional getTool_version() { + return this.tool_version; + } + + private java.util.Optional> in; + + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#in
+ * + *
+ * + * Defines the input parameters of the workflow step. The process is ready to run when all + * required input parameters are associated with concrete values. Input parameters include a + * schema for each parameter which is used to validate the input object. It may also be used build + * a user interface for constructing the input object. * + * + *
+ */ + public java.util.Optional> getIn() { + return this.in; + } + + private java.util.Optional> out; + + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#out
+ * + *
+ * + * Defines the parameters representing the output of the process. May be used to generate and/or + * validate the output object. * + * + *
+ */ + public java.util.Optional> getOut() { + return this.out; + } + + private java.util.Optional state; + + /** + * Getter for property https://galaxyproject.org/gxformat2/v19_09#WorkflowStep/state
+ * + *
+ * + * Structured tool state. * + * + *
+ */ + public java.util.Optional getState() { + return this.state; + } + + private java.util.Optional type; + + /** + * Getter for property https://w3id.org/cwl/salad#type
+ * + *
+ * + * Workflow step module's type (defaults to 'tool'). * + * + *
+ */ + public java.util.Optional getType() { + return this.type; + } + + private java.util.Optional run; + + /** + * Getter for property https://w3id.org/cwl/cwl#run
+ * + *
+ * + * Specifies a subworkflow to run. * + * + *
+ */ + public java.util.Optional getRun() { + return this.run; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * WorkflowStepImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public WorkflowStepImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("WorkflowStepImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + java.util.Optional label; + + if (__doc.containsKey("label")) { + try { + label = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("label"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + label = null; // won't be used but prevents compiler from complaining. + final String __message = "the `label` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + label = null; + } + Object doc; + + if (__doc.containsKey("doc")) { + try { + doc = + LoaderInstances.union_of_NullInstance_or_StringInstance_or_array_of_StringInstance + .loadField(__doc.get("doc"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + doc = null; // won't be used but prevents compiler from complaining. + final String __message = "the `doc` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + doc = null; + } + java.util.Optional position; + + if (__doc.containsKey("position")) { + try { + position = + LoaderInstances.optional_StepPosition.loadField( + __doc.get("position"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + position = null; // won't be used but prevents compiler from complaining. + final String __message = "the `position` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + position = null; + } + java.util.Optional tool_id; + + if (__doc.containsKey("tool_id")) { + try { + tool_id = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("tool_id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + tool_id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `tool_id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + tool_id = null; + } + java.util.Optional tool_shed_repository; + + if (__doc.containsKey("tool_shed_repository")) { + try { + tool_shed_repository = + LoaderInstances.optional_ToolShedRepository.loadField( + __doc.get("tool_shed_repository"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + tool_shed_repository = null; // won't be used but prevents compiler from complaining. + final String __message = "the `tool_shed_repository` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + tool_shed_repository = null; + } + java.util.Optional tool_version; + + if (__doc.containsKey("tool_version")) { + try { + tool_version = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("tool_version"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + tool_version = null; // won't be used but prevents compiler from complaining. + final String __message = "the `tool_version` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + tool_version = null; + } + java.util.Optional> in; + + if (__doc.containsKey("in")) { + try { + in = + LoaderInstances.idmap_in_optional_array_of_WorkflowStepInput.loadField( + __doc.get("in"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + in = null; // won't be used but prevents compiler from complaining. + final String __message = "the `in` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + in = null; + } + java.util.Optional> out; + + if (__doc.containsKey("out")) { + try { + out = + LoaderInstances + .uri_optional_array_of_union_of_StringInstance_or_WorkflowStepOutput_True_False_None + .loadField(__doc.get("out"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + out = null; // won't be used but prevents compiler from complaining. + final String __message = "the `out` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + out = null; + } + java.util.Optional state; + + if (__doc.containsKey("state")) { + try { + state = + LoaderInstances.optional_AnyInstance.loadField( + __doc.get("state"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + state = null; // won't be used but prevents compiler from complaining. + final String __message = "the `state` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + state = null; + } + java.util.Optional type; + + if (__doc.containsKey("type")) { + try { + type = + LoaderInstances.typedsl_optional_WorkflowStepType_2.loadField( + __doc.get("type"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + type = null; // won't be used but prevents compiler from complaining. + final String __message = "the `type` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + type = null; + } + java.util.Optional run; + + if (__doc.containsKey("run")) { + try { + run = + LoaderInstances.uri_optional_GalaxyWorkflow_False_False_None.loadField( + __doc.get("run"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + run = null; // won't be used but prevents compiler from complaining. + final String __message = "the `run` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + run = null; + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.id = (java.util.Optional) id; + this.label = (java.util.Optional) label; + this.doc = (Object) doc; + this.position = (java.util.Optional) position; + this.tool_id = (java.util.Optional) tool_id; + this.tool_shed_repository = (java.util.Optional) tool_shed_repository; + this.tool_version = (java.util.Optional) tool_version; + this.in = (java.util.Optional>) in; + this.out = (java.util.Optional>) out; + this.state = (java.util.Optional) state; + this.type = (java.util.Optional) type; + this.run = (java.util.Optional) run; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInput.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInput.java new file mode 100644 index 0000000..7bb75b1 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInput.java @@ -0,0 +1,60 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput + *
+ * This interface is implemented by {@link WorkflowStepInputImpl}
+ * + *
+ * + * TODO: + * + *
+ */ +public interface WorkflowStepInput extends Identified, Sink, Labeled, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); + /** + * Getter for property https://w3id.org/cwl/cwl#source
+ * + *
+ * + * Specifies one or more workflow parameters that will provide input to the underlying step + * parameter. * + * + *
+ */ + Object getSource(); + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + java.util.Optional getLabel(); + /** + * Getter for property https://w3id.org/cwl/salad#default
+ * + *
+ * + * The default value for this parameter to use if either there is no `source` field, or the value + * produced by the `source` is `null`. The default must be applied prior to scattering or + * evaluating `valueFrom`. * + * + *
+ */ + java.util.Optional getDefault(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInputImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInputImpl.java new file mode 100644 index 0000000..47cda94 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepInputImpl.java @@ -0,0 +1,199 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput
+ * + *
+ * + * TODO: + * + *
+ */ +public class WorkflowStepInputImpl extends SavableImpl implements WorkflowStepInput { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + private Object source; + + /** + * Getter for property https://w3id.org/cwl/cwl#source
+ * + *
+ * + * Specifies one or more workflow parameters that will provide input to the underlying step + * parameter. * + * + *
+ */ + public Object getSource() { + return this.source; + } + + private java.util.Optional label; + + /** + * Getter for property https://w3id.org/cwl/cwl#Labeled/label
+ * + *
+ * + * A short, human-readable label of this object. * + * + *
+ */ + public java.util.Optional getLabel() { + return this.label; + } + + private java.util.Optional default_; + + /** + * Getter for property https://w3id.org/cwl/salad#default
+ * + *
+ * + * The default value for this parameter to use if either there is no `source` field, or the value + * produced by the `source` is `null`. The default must be applied prior to scattering or + * evaluating `valueFrom`. * + * + *
+ */ + public java.util.Optional getDefault() { + return this.default_; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * WorkflowStepInputImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public WorkflowStepInputImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("WorkflowStepInputImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + Object source; + + if (__doc.containsKey("source")) { + try { + source = + LoaderInstances + .uri_union_of_NullInstance_or_StringInstance_or_array_of_StringInstance_False_False_2 + .loadField(__doc.get("source"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + source = null; // won't be used but prevents compiler from complaining. + final String __message = "the `source` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + source = null; + } + java.util.Optional label; + + if (__doc.containsKey("label")) { + try { + label = + LoaderInstances.optional_StringInstance.loadField( + __doc.get("label"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + label = null; // won't be used but prevents compiler from complaining. + final String __message = "the `label` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + label = null; + } + java.util.Optional default_; + + if (__doc.containsKey("default")) { + try { + default_ = + LoaderInstances.optional_AnyInstance.loadField( + __doc.get("default"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + default_ = null; // won't be used but prevents compiler from complaining. + final String __message = "the `default` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + default_ = null; + } + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.id = (java.util.Optional) id; + this.source = (Object) source; + this.label = (java.util.Optional) label; + this.default_ = (java.util.Optional) default_; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutput.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutput.java new file mode 100644 index 0000000..05c466c --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutput.java @@ -0,0 +1,32 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.Savable; + +/** + * Auto-generated interface for https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput + *
+ * This interface is implemented by {@link WorkflowStepOutputImpl}
+ * + *
+ * + * Associate an output parameter of the underlying process with a workflow parameter. The workflow + * parameter (given in the `id` field) be may be used as a `source` to connect with input parameters + * of other workflow steps, or with an output parameter of the process. + * + *

A unique identifier for this workflow output parameter. This is the identifier to use in the + * `source` field of `WorkflowStepInput` to connect the output value to downstream parameters. + * + *

+ */ +public interface WorkflowStepOutput extends Identified, Savable { + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + java.util.Optional getId(); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutputImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutputImpl.java new file mode 100644 index 0000000..2e2b5bf --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepOutputImpl.java @@ -0,0 +1,104 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.LoaderInstances; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptions; +import org.galaxyproject.gxformat2.v19_09.utils.LoadingOptionsBuilder; +import org.galaxyproject.gxformat2.v19_09.utils.SavableImpl; +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +/** + * Auto-generated class implementation for + * https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput
+ * + *
+ * + * Associate an output parameter of the underlying process with a workflow parameter. The workflow + * parameter (given in the `id` field) be may be used as a `source` to connect with input parameters + * of other workflow steps, or with an output parameter of the process. + * + *

A unique identifier for this workflow output parameter. This is the identifier to use in the + * `source` field of `WorkflowStepInput` to connect the output value to downstream parameters. + * + *

+ */ +public class WorkflowStepOutputImpl extends SavableImpl implements WorkflowStepOutput { + private LoadingOptions loadingOptions_ = new LoadingOptionsBuilder().build(); + private java.util.Map extensionFields_ = new java.util.HashMap(); + + private java.util.Optional id; + + /** + * Getter for property https://w3id.org/cwl/cwl#Identified/id
+ * + *
+ * + * The unique identifier for this object. * + * + *
+ */ + public java.util.Optional getId() { + return this.id; + } + + /** + * Used by {@link org.galaxyproject.gxformat2.v19_09.utils.RootLoader} to construct instances of + * WorkflowStepOutputImpl. + * + * @param __doc_ Document fragment to load this record object from (presumably a {@link + * java.util.Map}). + * @param __baseUri_ Base URI to generate child document IDs against. + * @param __loadingOptions Context for loading URIs and populating objects. + * @param __docRoot_ ID at this position in the document (if available) (maybe?) + * @throws ValidationException If the document fragment is not a {@link java.util.Map} or + * validation of fields fails. + */ + public WorkflowStepOutputImpl( + final Object __doc_, + final String __baseUri_, + LoadingOptions __loadingOptions, + final String __docRoot_) { + super(__doc_, __baseUri_, __loadingOptions, __docRoot_); + // Prefix plumbing variables with '__' to reduce likelihood of collision with + // generated names. + String __baseUri = __baseUri_; + String __docRoot = __docRoot_; + if (!(__doc_ instanceof java.util.Map)) { + throw new ValidationException("WorkflowStepOutputImpl called on non-map"); + } + final java.util.Map __doc = (java.util.Map) __doc_; + final java.util.List __errors = + new java.util.ArrayList(); + if (__loadingOptions != null) { + this.loadingOptions_ = __loadingOptions; + } + java.util.Optional id; + + if (__doc.containsKey("id")) { + try { + id = + LoaderInstances.uri_optional_StringInstance_True_False_None.loadField( + __doc.get("id"), __baseUri, __loadingOptions); + } catch (ValidationException e) { + id = null; // won't be used but prevents compiler from complaining. + final String __message = "the `id` field is not valid because:"; + __errors.add(new ValidationException(__message, e)); + } + + } else { + id = null; + } + + if (id == null) { + if (__docRoot != null) { + id = java.util.Optional.of(__docRoot); + } else { + id = java.util.Optional.of("_:" + java.util.UUID.randomUUID().toString()); + } + } + __baseUri = (String) id.orElse(null); + if (!__errors.isEmpty()) { + throw new ValidationException("Trying 'RecordField'", __errors); + } + this.id = (java.util.Optional) id; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepType.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepType.java new file mode 100644 index 0000000..08ba51c --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/WorkflowStepType.java @@ -0,0 +1,26 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum WorkflowStepType { + TOOL("tool"), + SUBWORKFLOW("subworkflow"), + PAUSE("pause"); + + private static String[] symbols = new String[] {"tool", "subworkflow", "pause"}; + private String docVal; + + private WorkflowStepType(final String docVal) { + this.docVal = docVal; + } + + public static WorkflowStepType fromDocumentVal(final String docVal) { + for (final WorkflowStepType val : WorkflowStepType.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException( + String.format("Expected one of %s", WorkflowStepType.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.java new file mode 100644 index 0000000..b902730 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.java @@ -0,0 +1,26 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum enum_d062602be0b4b8fd33e69e29a841317b6ab665bc { + ARRAY("array"); + + private static String[] symbols = new String[] {"array"}; + private String docVal; + + private enum_d062602be0b4b8fd33e69e29a841317b6ab665bc(final String docVal) { + this.docVal = docVal; + } + + public static enum_d062602be0b4b8fd33e69e29a841317b6ab665bc fromDocumentVal(final String docVal) { + for (final enum_d062602be0b4b8fd33e69e29a841317b6ab665bc val : + enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException( + String.format( + "Expected one of %s", enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d961d79c225752b9fadb617367615ab176b47d77.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d961d79c225752b9fadb617367615ab176b47d77.java new file mode 100644 index 0000000..8eddf33 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d961d79c225752b9fadb617367615ab176b47d77.java @@ -0,0 +1,26 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum enum_d961d79c225752b9fadb617367615ab176b47d77 { + ENUM("enum"); + + private static String[] symbols = new String[] {"enum"}; + private String docVal; + + private enum_d961d79c225752b9fadb617367615ab176b47d77(final String docVal) { + this.docVal = docVal; + } + + public static enum_d961d79c225752b9fadb617367615ab176b47d77 fromDocumentVal(final String docVal) { + for (final enum_d961d79c225752b9fadb617367615ab176b47d77 val : + enum_d961d79c225752b9fadb617367615ab176b47d77.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException( + String.format( + "Expected one of %s", enum_d961d79c225752b9fadb617367615ab176b47d77.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d9cba076fca539106791a4f46d198c7fcfbdb779.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d9cba076fca539106791a4f46d198c7fcfbdb779.java new file mode 100644 index 0000000..498eec1 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/enum_d9cba076fca539106791a4f46d198c7fcfbdb779.java @@ -0,0 +1,26 @@ +package org.galaxyproject.gxformat2.v19_09; + +import org.galaxyproject.gxformat2.v19_09.utils.ValidationException; + +public enum enum_d9cba076fca539106791a4f46d198c7fcfbdb779 { + RECORD("record"); + + private static String[] symbols = new String[] {"record"}; + private String docVal; + + private enum_d9cba076fca539106791a4f46d198c7fcfbdb779(final String docVal) { + this.docVal = docVal; + } + + public static enum_d9cba076fca539106791a4f46d198c7fcfbdb779 fromDocumentVal(final String docVal) { + for (final enum_d9cba076fca539106791a4f46d198c7fcfbdb779 val : + enum_d9cba076fca539106791a4f46d198c7fcfbdb779.values()) { + if (val.docVal.equals(docVal)) { + return val; + } + } + throw new ValidationException( + String.format( + "Expected one of %s", enum_d9cba076fca539106791a4f46d198c7fcfbdb779.symbols, docVal)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/package.html b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/package.html new file mode 100644 index 0000000..3f7b047 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/package.html @@ -0,0 +1,4 @@ + +Package for the auto-generated Java objects corresponding generated by Schema Salad +for https://galaxyproject.org/gxformat2/v19_09#. + diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/AnyLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/AnyLoader.java new file mode 100644 index 0000000..ab50022 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/AnyLoader.java @@ -0,0 +1,15 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public class AnyLoader implements Loader { + + public Object load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + if (doc == null) { + throw new ValidationException("Expected non-null"); + } + return doc; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ArrayLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ArrayLoader.java new file mode 100644 index 0000000..04b5281 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ArrayLoader.java @@ -0,0 +1,42 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.List; + +public class ArrayLoader implements Loader> { + private final Loader itemLoader; + + public ArrayLoader(Loader itemLoader) { + this.itemLoader = itemLoader; + } + + public List load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + final List docList = (List) Loader.validateOfJavaType(List.class, doc); + final List r = new ArrayList(); + final List loaders = new ArrayList(); + loaders.add(this); + loaders.add(this.itemLoader); + final UnionLoader unionLoader = new UnionLoader(loaders); + final List errors = new ArrayList(); + for (final Object el : docList) { + try { + final Object loadedField = unionLoader.loadField(el, baseUri, loadingOptions); + if (loadedField instanceof List) { + r.addAll((List) loadedField); + } else { + r.add((T) loadedField); + } + } catch (final ValidationException e) { + errors.add(e); + } + } + if (!errors.isEmpty()) { + throw new ValidationException("", errors); + } + return r; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ConstantMaps.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ConstantMaps.java new file mode 100644 index 0000000..dfb66df --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ConstantMaps.java @@ -0,0 +1,117 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.HashMap; + +public class ConstantMaps { + // declare as HashMap for clone(). + public static final HashMap vocab = new HashMap(); + public static final HashMap rvocab = new HashMap(); + + static { + vocab.put("Any", "https://w3id.org/cwl/salad#Any"); + vocab.put("ArraySchema", "https://w3id.org/cwl/salad#ArraySchema"); + vocab.put("Documented", "https://w3id.org/cwl/salad#Documented"); + vocab.put("EnumSchema", "https://w3id.org/cwl/salad#EnumSchema"); + vocab.put("File", "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File"); + vocab.put("GalaxyType", "https://galaxyproject.org/gxformat2/v19_09#GalaxyType"); + vocab.put("GalaxyWorkflow", "https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow"); + vocab.put( + "HasStepPosition", "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition"); + vocab.put("Identified", "https://w3id.org/cwl/cwl#Identified"); + vocab.put("InputParameter", "https://w3id.org/cwl/cwl#InputParameter"); + vocab.put("Labeled", "https://w3id.org/cwl/cwl#Labeled"); + vocab.put("OutputParameter", "https://w3id.org/cwl/cwl#OutputParameter"); + vocab.put("Parameter", "https://w3id.org/cwl/cwl#Parameter"); + vocab.put("PrimitiveType", "https://w3id.org/cwl/salad#PrimitiveType"); + vocab.put("Process", "https://w3id.org/cwl/cwl#Process"); + vocab.put("RecordField", "https://w3id.org/cwl/salad#RecordField"); + vocab.put("RecordSchema", "https://w3id.org/cwl/salad#RecordSchema"); + vocab.put( + "ReferencesTool", "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool"); + vocab.put("Sink", "https://galaxyproject.org/gxformat2/v19_09#Sink"); + vocab.put("StepPosition", "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition"); + vocab.put( + "ToolShedRepository", + "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository"); + vocab.put( + "WorkflowInputParameter", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter"); + vocab.put( + "WorkflowOutputParameter", + "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter"); + vocab.put("WorkflowStep", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStep"); + vocab.put("WorkflowStepInput", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput"); + vocab.put( + "WorkflowStepOutput", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput"); + vocab.put("WorkflowStepType", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType"); + vocab.put("array", "array"); + vocab.put("boolean", "http://www.w3.org/2001/XMLSchema#boolean"); + vocab.put("collection", "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection"); + vocab.put("data", "https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data"); + vocab.put("double", "http://www.w3.org/2001/XMLSchema#double"); + vocab.put("enum", "enum"); + vocab.put("float", "http://www.w3.org/2001/XMLSchema#float"); + vocab.put("int", "http://www.w3.org/2001/XMLSchema#int"); + vocab.put("long", "http://www.w3.org/2001/XMLSchema#long"); + vocab.put("null", "https://w3id.org/cwl/salad#null"); + vocab.put("pause", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause"); + vocab.put("record", "record"); + vocab.put("string", "http://www.w3.org/2001/XMLSchema#string"); + vocab.put( + "subworkflow", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow"); + vocab.put("tool", "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool"); + + rvocab.put("https://w3id.org/cwl/salad#Any", "Any"); + rvocab.put("https://w3id.org/cwl/salad#ArraySchema", "ArraySchema"); + rvocab.put("https://w3id.org/cwl/salad#Documented", "Documented"); + rvocab.put("https://w3id.org/cwl/salad#EnumSchema", "EnumSchema"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#GalaxyType/File", "File"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#GalaxyType", "GalaxyType"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#GalaxyWorkflow", "GalaxyWorkflow"); + rvocab.put( + "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition", "HasStepPosition"); + rvocab.put("https://w3id.org/cwl/cwl#Identified", "Identified"); + rvocab.put("https://w3id.org/cwl/cwl#InputParameter", "InputParameter"); + rvocab.put("https://w3id.org/cwl/cwl#Labeled", "Labeled"); + rvocab.put("https://w3id.org/cwl/cwl#OutputParameter", "OutputParameter"); + rvocab.put("https://w3id.org/cwl/cwl#Parameter", "Parameter"); + rvocab.put("https://w3id.org/cwl/salad#PrimitiveType", "PrimitiveType"); + rvocab.put("https://w3id.org/cwl/cwl#Process", "Process"); + rvocab.put("https://w3id.org/cwl/salad#RecordField", "RecordField"); + rvocab.put("https://w3id.org/cwl/salad#RecordSchema", "RecordSchema"); + rvocab.put( + "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool", "ReferencesTool"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#Sink", "Sink"); + rvocab.put("https://galaxyproject.org/gxformat2/gxformat2common#StepPosition", "StepPosition"); + rvocab.put( + "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository", + "ToolShedRepository"); + rvocab.put( + "https://galaxyproject.org/gxformat2/v19_09#WorkflowInputParameter", + "WorkflowInputParameter"); + rvocab.put( + "https://galaxyproject.org/gxformat2/v19_09#WorkflowOutputParameter", + "WorkflowOutputParameter"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#WorkflowStep", "WorkflowStep"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#WorkflowStepInput", "WorkflowStepInput"); + rvocab.put( + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepOutput", "WorkflowStepOutput"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType", "WorkflowStepType"); + rvocab.put("array", "array"); + rvocab.put("http://www.w3.org/2001/XMLSchema#boolean", "boolean"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#GalaxyType/collection", "collection"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#GalaxyType/data", "data"); + rvocab.put("http://www.w3.org/2001/XMLSchema#double", "double"); + rvocab.put("enum", "enum"); + rvocab.put("http://www.w3.org/2001/XMLSchema#float", "float"); + rvocab.put("http://www.w3.org/2001/XMLSchema#int", "int"); + rvocab.put("http://www.w3.org/2001/XMLSchema#long", "long"); + rvocab.put("https://w3id.org/cwl/salad#null", "null"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/pause", "pause"); + rvocab.put("record", "record"); + rvocab.put("http://www.w3.org/2001/XMLSchema#string", "string"); + rvocab.put( + "https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/subworkflow", "subworkflow"); + rvocab.put("https://galaxyproject.org/gxformat2/v19_09#WorkflowStepType/tool", "tool"); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcher.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcher.java new file mode 100644 index 0000000..679282e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcher.java @@ -0,0 +1,32 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.net.URI; + +public class DefaultFetcher implements Fetcher { + + public String urlJoin(final String baseUrl, final String url) { + if (url.startsWith("_:")) { + return url; + } + + final URI baseUri = Uris.toUri(baseUrl); + final URI uri = Uris.toUri(url); + if (baseUri.getScheme() != null + && !baseUri.getScheme().equals("file") + && "file".equals(uri.getScheme())) { + throw new ValidationException( + String.format( + "Not resolving potential remote exploit %s from base %s".format(url, baseUrl))); + } + String result = baseUri.resolve(uri).toString(); + if (result.startsWith("file:")) { + // Well this is gross - needed for http as well? + result = "file://" + result.substring("file:".length()); + } + return result; + } + + public String fetchText(final String url) { + return "fetched"; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/EnumLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/EnumLoader.java new file mode 100644 index 0000000..0a4885a --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/EnumLoader.java @@ -0,0 +1,26 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.lang.reflect.Method; + +public class EnumLoader implements Loader { + private final Class symbolEnumClass; + + public EnumLoader(final Class symbolEnumClass) { + this.symbolEnumClass = symbolEnumClass; + } + + public T load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + final String docString = Loader.validateOfJavaType(String.class, doc); + try { + final Method m = symbolEnumClass.getMethod("fromDocumentVal", String.class); + final T val = (T) m.invoke(null, docString); + return val; + } catch (final ReflectiveOperationException e) { + throw new RuntimeException(e); + } + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Fetcher.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Fetcher.java new file mode 100644 index 0000000..ef1840d --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Fetcher.java @@ -0,0 +1,8 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public interface Fetcher { + + public abstract String urlJoin(final String baseUrl, final String url); + + public abstract String fetchText(final String url); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/IdMapLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/IdMapLoader.java new file mode 100644 index 0000000..03a1c25 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/IdMapLoader.java @@ -0,0 +1,53 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeSet; + +public class IdMapLoader implements Loader { + private final Loader innerLoader; + private final String mapSubject; + private final String mapPredicate; + + public IdMapLoader( + final Loader innerLoader, final String mapSubject, final String mapPredicate) { + this.innerLoader = innerLoader; + this.mapSubject = mapSubject; + this.mapPredicate = mapPredicate; + } + + public T load( + final Object doc_, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + Object doc = doc_; + if (doc instanceof Map) { + final Map docMap = (Map) doc; + final List asList = new ArrayList(); + final TreeSet sortedKeys = new TreeSet(); + sortedKeys.addAll(docMap.keySet()); + for (final String key : sortedKeys) { + final Object el = docMap.get(key); + if (el instanceof Map) { + final Map v2 = new HashMap((Map) el); + v2.put(this.mapSubject, key); + asList.add(v2); + } else { + if (this.mapPredicate != null) { + final Map v3 = new HashMap(); + v3.put(this.mapPredicate, el); + v3.put(this.mapSubject, key); + asList.add(v3); + } else { + throw new ValidationException("No mapPredicate"); + } + } + } + doc = asList; + } + return this.innerLoader.load(doc, baseUri, loadingOptions); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Loader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Loader.java new file mode 100644 index 0000000..8e0d649 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Loader.java @@ -0,0 +1,111 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public interface Loader { + + abstract T load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot); + + default T load(final Object doc, final String baseUri, final LoadingOptions loadingOptions) { + return load(doc, baseUri, loadingOptions, null); + } + + default T documentLoad( + final String doc, final String baseUri, final LoadingOptions loadingOptions) { + return load(doc, baseUri, loadingOptions); + } + + default T documentLoad( + final Map doc_, final String baseUri_, final LoadingOptions loadingOptions_) { + Map doc = doc_; + LoadingOptions loadingOptions = loadingOptions_; + if (doc.containsKey("$namespaces")) { + final Map namespaces = (Map) doc.get("$namespaces"); + loadingOptions = + new LoadingOptionsBuilder().copiedFrom(loadingOptions).setNamespaces(namespaces).build(); + doc = copyWithoutKey(doc, "$namespaces"); + } + String baseUri = baseUri_; + if (doc.containsKey("$base")) { + baseUri = (String) doc.get("$base"); + } + if (doc.containsKey("$graph")) { + return load(doc.get("$graph"), baseUri, loadingOptions); + } else { + return load(doc, baseUri, loadingOptions, baseUri); + } + } + + default T documentLoad( + final List doc, final String baseUri, final LoadingOptions loadingOptions) { + return load(doc, baseUri, loadingOptions); + } + + default T documentLoadByUrl(final String url, final LoadingOptions loadingOptions) { + if (loadingOptions.idx.containsKey(url)) { + return documentLoad(loadingOptions.idx.get(url), url, loadingOptions); + } + + final String text = loadingOptions.fetcher.fetchText(url); + final Map result = YamlUtils.mapFromString(text); + loadingOptions.idx.put(url, result); + final LoadingOptionsBuilder urlLoadingOptions = + new LoadingOptionsBuilder().copiedFrom(loadingOptions).setFileUri(url); + return documentLoad(result, url, urlLoadingOptions.build()); + } + + default T loadField( + final Object val_, final String baseUri, final LoadingOptions loadingOptions) { + Object val = val_; + if (val instanceof Map) { + Map valMap = (Map) val; + if (valMap.containsKey("$import")) { + if (loadingOptions.fileUri == null) { + throw new ValidationException("Cannot load $import without fileuri"); + } + return documentLoadByUrl( + loadingOptions.fetcher.urlJoin(loadingOptions.fileUri, (String) valMap.get("$import")), + loadingOptions); + } else if (valMap.containsKey("$include")) { + if (loadingOptions.fileUri == null) { + throw new ValidationException("Cannot load $import without fileuri"); + } + val = + loadingOptions.fetcher.fetchText( + loadingOptions.fetcher.urlJoin( + loadingOptions.fileUri, (String) valMap.get("$include"))); + } + } + return load(val, baseUri, loadingOptions); + } + + private Map copyWithoutKey(final Map doc, final String key) { + final Map result = new HashMap(); + for (final Map.Entry entry : doc.entrySet()) { + if (!entry.getKey().equals(key)) { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } + + public static T validateOfJavaType(final Class clazz, final Object doc) { + if (!clazz.isInstance(doc)) { + String className = "null"; + if (doc != null) { + className = doc.getClass().getName(); + } + final String message = + String.format( + "Expected object with Java type of %s but got %s", clazz.getName(), className); + throw new ValidationException(message); + } + return (T) doc; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoaderInstances.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoaderInstances.java new file mode 100644 index 0000000..d6e2907 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoaderInstances.java @@ -0,0 +1,218 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import org.galaxyproject.gxformat2.v19_09.*; + +public class LoaderInstances { + public static Loader IntegerInstance = new PrimitiveLoader(Integer.class); + public static Loader BooleanInstance = new PrimitiveLoader(Boolean.class); + public static Loader StringInstance = new PrimitiveLoader(String.class); + public static Loader AnyInstance = new AnyLoader(); + public static Loader LongInstance = new PrimitiveLoader(Long.class); + public static Loader FloatInstance = new PrimitiveLoader(Float.class); + public static Loader NullInstance = new NullLoader(); + public static Loader DoubleInstance = new PrimitiveLoader(Double.class); + public static Loader Documented = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Documented.class); + public static Loader PrimitiveType = new EnumLoader(PrimitiveType.class); + public static Loader Any = new EnumLoader(Any.class); + public static Loader RecordField = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.RecordFieldImpl.class); + public static Loader RecordSchema = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.RecordSchemaImpl.class); + public static Loader EnumSchema = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.EnumSchemaImpl.class); + public static Loader ArraySchema = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.ArraySchemaImpl.class); + public static Loader Labeled = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Labeled.class); + public static Loader Identified = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Identified.class); + public static Loader Parameter = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Parameter.class); + public static Loader InputParameter = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.InputParameter.class); + public static Loader OutputParameter = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.OutputParameter.class); + public static Loader Process = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Process.class); + public static Loader HasStepPosition = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.HasStepPosition.class); + public static Loader StepPosition = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.StepPositionImpl.class); + public static Loader ReferencesTool = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.ReferencesTool.class); + public static Loader ToolShedRepository = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.ToolShedRepositoryImpl.class); + public static Loader GalaxyType = new EnumLoader(GalaxyType.class); + public static Loader WorkflowStepType = new EnumLoader(WorkflowStepType.class); + public static Loader + WorkflowInputParameter = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.WorkflowInputParameterImpl.class); + public static Loader + WorkflowOutputParameter = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.WorkflowOutputParameterImpl.class); + public static Loader WorkflowStep = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.WorkflowStepImpl.class); + public static Loader Sink = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.Sink.class); + public static Loader WorkflowStepInput = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.WorkflowStepInputImpl.class); + public static Loader WorkflowStepOutput = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.WorkflowStepOutputImpl.class); + public static Loader GalaxyWorkflow = + new RecordLoader( + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflowImpl.class); + public static Loader> array_of_StringInstance = + new ArrayLoader(StringInstance); + public static Loader union_of_NullInstance_or_StringInstance_or_array_of_StringInstance = + new UnionLoader(new Loader[] {NullInstance, StringInstance, array_of_StringInstance}); + public static Loader uri_StringInstance_True_False_None = + new UriLoader(StringInstance, true, false, null); + public static Loader + union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance = + new UnionLoader( + new Loader[] {PrimitiveType, RecordSchema, EnumSchema, ArraySchema, StringInstance}); + public static Loader> + array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance = + new ArrayLoader( + union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance); + public static Loader + union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance = + new UnionLoader( + new Loader[] { + PrimitiveType, + RecordSchema, + EnumSchema, + ArraySchema, + StringInstance, + array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance + }); + public static Loader + typedsl_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_2 = + new TypeDslLoader( + union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance, + 2); + public static Loader> array_of_RecordField = + new ArrayLoader(RecordField); + public static Loader>> optional_array_of_RecordField = + new OptionalLoader(array_of_RecordField); + public static Loader>> + idmap_fields_optional_array_of_RecordField = + new IdMapLoader(optional_array_of_RecordField, "name", "type"); + public static Loader + enum_d9cba076fca539106791a4f46d198c7fcfbdb779 = + new EnumLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779.class); + public static Loader + typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779_2 = + new TypeDslLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779, 2); + public static Loader> uri_array_of_StringInstance_True_False_None = + new UriLoader(array_of_StringInstance, true, false, null); + public static Loader + enum_d961d79c225752b9fadb617367615ab176b47d77 = + new EnumLoader(enum_d961d79c225752b9fadb617367615ab176b47d77.class); + public static Loader + typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77_2 = + new TypeDslLoader(enum_d961d79c225752b9fadb617367615ab176b47d77, 2); + public static Loader + uri_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_False_True_2 = + new UriLoader( + union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance_or_array_of_union_of_PrimitiveType_or_RecordSchema_or_EnumSchema_or_ArraySchema_or_StringInstance, + false, + true, + 2); + public static Loader + enum_d062602be0b4b8fd33e69e29a841317b6ab665bc = + new EnumLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bc.class); + public static Loader + typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bc_2 = + new TypeDslLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bc, 2); + public static Loader> optional_StringInstance = + new OptionalLoader(StringInstance); + public static Loader> uri_optional_StringInstance_True_False_None = + new UriLoader(optional_StringInstance, true, false, null); + public static Loader> optional_AnyInstance = + new OptionalLoader(AnyInstance); + public static Loader> array_of_WorkflowInputParameter = + new ArrayLoader(WorkflowInputParameter); + public static Loader> idmap_inputs_array_of_WorkflowInputParameter = + new IdMapLoader(array_of_WorkflowInputParameter, "id", "type"); + public static Loader> array_of_WorkflowOutputParameter = + new ArrayLoader(WorkflowOutputParameter); + public static Loader> idmap_outputs_array_of_WorkflowOutputParameter = + new IdMapLoader(array_of_WorkflowOutputParameter, "id", "type"); + public static Loader> optional_StepPosition = + new OptionalLoader(StepPosition); + public static Loader> optional_ToolShedRepository = + new OptionalLoader(ToolShedRepository); + public static Loader> optional_GalaxyType = + new OptionalLoader(GalaxyType); + public static Loader> typedsl_optional_GalaxyType_2 = + new TypeDslLoader(optional_GalaxyType, 2); + public static Loader> array_of_WorkflowStepInput = + new ArrayLoader(WorkflowStepInput); + public static Loader>> + optional_array_of_WorkflowStepInput = new OptionalLoader(array_of_WorkflowStepInput); + public static Loader>> + idmap_in_optional_array_of_WorkflowStepInput = + new IdMapLoader(optional_array_of_WorkflowStepInput, "id", "source"); + public static Loader union_of_StringInstance_or_WorkflowStepOutput = + new UnionLoader(new Loader[] {StringInstance, WorkflowStepOutput}); + public static Loader> + array_of_union_of_StringInstance_or_WorkflowStepOutput = + new ArrayLoader(union_of_StringInstance_or_WorkflowStepOutput); + public static Loader>> + optional_array_of_union_of_StringInstance_or_WorkflowStepOutput = + new OptionalLoader(array_of_union_of_StringInstance_or_WorkflowStepOutput); + public static Loader>> + uri_optional_array_of_union_of_StringInstance_or_WorkflowStepOutput_True_False_None = + new UriLoader( + optional_array_of_union_of_StringInstance_or_WorkflowStepOutput, true, false, null); + public static Loader> optional_WorkflowStepType = + new OptionalLoader(WorkflowStepType); + public static Loader> typedsl_optional_WorkflowStepType_2 = + new TypeDslLoader(optional_WorkflowStepType, 2); + public static Loader> optional_GalaxyWorkflow = + new OptionalLoader(GalaxyWorkflow); + public static Loader> + uri_optional_GalaxyWorkflow_False_False_None = + new UriLoader(optional_GalaxyWorkflow, false, false, null); + public static Loader + uri_union_of_NullInstance_or_StringInstance_or_array_of_StringInstance_False_False_2 = + new UriLoader( + union_of_NullInstance_or_StringInstance_or_array_of_StringInstance, false, false, 2); + public static Loader uri_StringInstance_False_True_None = + new UriLoader(StringInstance, false, true, null); + public static Loader> array_of_WorkflowStep = + new ArrayLoader(WorkflowStep); + public static Loader> idmap_steps_array_of_WorkflowStep = + new IdMapLoader(array_of_WorkflowStep, "id", "None"); + public static Loader> array_of_GalaxyWorkflow = + new ArrayLoader(GalaxyWorkflow); + public static Loader< + org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow>> + one_or_array_of_GalaxyWorkflow = + new OneOrListOfLoader( + GalaxyWorkflow, array_of_GalaxyWorkflow); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptions.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptions.java new file mode 100644 index 0000000..21a2bb7 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptions.java @@ -0,0 +1,124 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class LoadingOptions { + Fetcher fetcher; + String fileUri; + Map namespaces; + List schemas; + Map> idx; + Map vocab; + Map rvocab; + + LoadingOptions( + final Fetcher fetcher, + final String fileUri, + final Map namespaces, + final List schemas, + final Map> idx) { + this.fetcher = fetcher; + this.fileUri = fileUri; + this.namespaces = namespaces; + this.schemas = schemas; + this.idx = idx; + + if (namespaces != null) { + this.vocab = (Map) ConstantMaps.vocab.clone(); + this.rvocab = (Map) ConstantMaps.rvocab.clone(); + for (Map.Entry namespaceEntry : namespaces.entrySet()) { + this.vocab.put(namespaceEntry.getKey(), namespaceEntry.getValue()); + this.rvocab.put(namespaceEntry.getValue(), namespaceEntry.getKey()); + } + } else { + this.vocab = (Map) ConstantMaps.vocab; + this.rvocab = (Map) ConstantMaps.rvocab; + } + } + + public String expandUrl( + String url_, + final String baseUrl, + final boolean scopedId, + final boolean vocabTerm, + final Integer scopedRef) { + // NOT CONVERTING this - doesn't match type declaration + // if not isinstance(url, string_types): + // return url + String url = url_; + if (url.equals("@id") || url.equals("@type")) { + return url; + } + + if (vocabTerm && this.vocab.containsKey(url)) { + return url; + } + + if (!this.vocab.isEmpty() && url.contains(":")) { + String prefix = url.split(":", 1)[0]; + if (this.vocab.containsKey(prefix)) { + url = this.vocab.get(prefix) + url.substring(prefix.length() + 1); + } + } + + Uris.UriSplit split = Uris.split(url); + final String scheme = split.scheme; + final boolean hasFragment = stringHasContent(split.fragment); + if (scheme != null + && ((scheme.length() > 0 + && (scheme.equals("http") || scheme.equals("https") || scheme.equals("file"))) + || url.startsWith("$(") + || url.startsWith("${"))) { + // pass + } else if (scopedId && !hasFragment) { + final Uris.UriSplit splitbase = Uris.split(baseUrl); + final String frg; + if (stringHasContent(splitbase.fragment)) { + frg = splitbase.fragment + "/" + split.path; + } else { + frg = split.path; + } + String pt; + if (!splitbase.path.equals("")) { + pt = splitbase.path; + } else { + pt = "/"; + } + url = Uris.unsplit(splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg); + } else if (scopedRef != null && !hasFragment) { + final Uris.UriSplit splitbase = Uris.split(baseUrl); + final ArrayList sp = new ArrayList(Arrays.asList(splitbase.fragment.split("/"))); + int n = scopedRef; + while (n > 0 && sp.size() > 0) { + sp.remove(0); + n -= 1; + } + sp.add(url); + final String fragment = String.join("/", sp); + url = + Uris.unsplit( + splitbase.scheme, splitbase.netloc, splitbase.path, splitbase.query, fragment); + } else { + url = this.fetcher.urlJoin(baseUrl, url); + } + + if (vocabTerm) { + split = Uris.split(url); + if (stringHasContent(split.scheme)) { + if (this.rvocab.containsKey(url)) { + return this.rvocab.get(url); + } + } else { + throw new ValidationException("Term '{}' not in vocabulary".format(url)); + } + } + return url; + } + + static boolean stringHasContent(final String s) { + return s != null && s.length() > 0; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptionsBuilder.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptionsBuilder.java new file mode 100644 index 0000000..2011746 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/LoadingOptionsBuilder.java @@ -0,0 +1,62 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +public class LoadingOptionsBuilder { + private Optional fetcher = Optional.empty(); + private Optional fileUri = Optional.empty(); + private Optional> namespaces = Optional.empty(); + private Optional> schemas = Optional.empty(); + private Optional copyFrom = Optional.empty(); + + public LoadingOptionsBuilder() {} + + public LoadingOptionsBuilder setFetcher(final Fetcher fetcher) { + this.fetcher = Optional.of(fetcher); + return this; + } + + public LoadingOptionsBuilder copiedFrom(final LoadingOptions copyFrom) { + this.copyFrom = Optional.of(copyFrom); + return this; + } + + public LoadingOptionsBuilder setFileUri(final String fileUri) { + this.fileUri = Optional.of(fileUri); + return this; + } + + public LoadingOptionsBuilder setNamespaces(final Map namespaces) { + this.namespaces = Optional.of(namespaces); + return this; + } + + public LoadingOptions build() { + Fetcher fetcher = this.fetcher.orElse(null); + String fileUri = this.fileUri.orElse(null); + List schemas = this.schemas.orElse(null); + Map namespaces = this.namespaces.orElse(null); + Map> idx = new HashMap>(); + if (this.copyFrom.isPresent()) { + final LoadingOptions copyFrom = this.copyFrom.get(); + idx = copyFrom.idx; + if (fetcher == null) { + fetcher = copyFrom.fetcher; + } + if (fileUri == null) { + fileUri = copyFrom.fileUri; + } + if (namespaces == null) { + namespaces = copyFrom.namespaces; + schemas = copyFrom.schemas; // Bug in Python codegen? + } + } + if (fetcher == null) { + fetcher = new DefaultFetcher(); + } + return new LoadingOptions(fetcher, fileUri, namespaces, schemas, idx); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/NullLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/NullLoader.java new file mode 100644 index 0000000..6d2ee01 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/NullLoader.java @@ -0,0 +1,15 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public class NullLoader implements Loader { + + public Object load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + if (doc != null) { + throw new ValidationException("Expected null"); + } + return doc; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOf.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOf.java new file mode 100644 index 0000000..90e17b2 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOf.java @@ -0,0 +1,47 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.List; +import java.util.Optional; + +public class OneOrListOf { + private Optional object; + private Optional> objects; + + private OneOrListOf(final T object, final List objects) { + this.object = Optional.ofNullable(object); + this.objects = Optional.ofNullable(objects); + } + + public static OneOrListOf oneOf(T object) { + return new OneOrListOf(object, null); + } + + public static OneOrListOf listOf(List objects) { + assert objects != null; + return new OneOrListOf(null, objects); + } + + public boolean isOne() { + return this.getOneOptional().isPresent(); + } + + public boolean isList() { + return this.getListOptional().isPresent(); + } + + public Optional getOneOptional() { + return this.object; + } + + public Optional> getListOptional() { + return this.objects; + } + + public T getOne() { + return this.getOneOptional().get(); + } + + public List getList() { + return this.getListOptional().get(); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOfLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOfLoader.java new file mode 100644 index 0000000..9a91a1e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OneOrListOfLoader.java @@ -0,0 +1,33 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.List; + +public class OneOrListOfLoader implements Loader> { + private final Loader oneLoader; + private final Loader> listLoader; + + public OneOrListOfLoader(Loader oneLoader, Loader> listLoader) { + this.oneLoader = oneLoader; + this.listLoader = listLoader; + } + + public OneOrListOf load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + final List errors = new ArrayList(); + try { + return OneOrListOf.oneOf(this.oneLoader.load(doc, baseUri, loadingOptions, docRoot)); + } catch (ValidationException e) { + errors.add(e); + } + try { + return OneOrListOf.listOf(this.listLoader.load(doc, baseUri, loadingOptions, docRoot)); + } catch (ValidationException e) { + errors.add(e); + } + throw new ValidationException("Failed to one or list of of type", errors); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OptionalLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OptionalLoader.java new file mode 100644 index 0000000..39647c1 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/OptionalLoader.java @@ -0,0 +1,22 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.Optional; + +public class OptionalLoader implements Loader> { + private final Loader itemLoader; + + public OptionalLoader(Loader itemLoader) { + this.itemLoader = itemLoader; + } + + public Optional load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + if (doc == null) { + return Optional.empty(); + } + return Optional.of(itemLoader.load(doc, baseUri, loadingOptions, docRoot)); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/PrimitiveLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/PrimitiveLoader.java new file mode 100644 index 0000000..56f5e67 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/PrimitiveLoader.java @@ -0,0 +1,17 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public class PrimitiveLoader implements Loader { + private Class clazz; + + public PrimitiveLoader(Class clazz) { + this.clazz = clazz; + } + + public T load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + return Loader.validateOfJavaType(this.clazz, doc); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RecordLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RecordLoader.java new file mode 100644 index 0000000..c6ab319 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RecordLoader.java @@ -0,0 +1,35 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; + +public class RecordLoader implements Loader { + private final Class savableClass; + + public RecordLoader(final Class savableClass) { + this.savableClass = savableClass; + } + + public T load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + Loader.validateOfJavaType(java.util.Map.class, doc); + try { + final Constructor constructor = + this.savableClass.getConstructor( + new Class[] {Object.class, String.class, LoadingOptions.class, String.class}); + final T ret = constructor.newInstance(doc, baseUri, loadingOptions, docRoot); + return ret; + } catch (InvocationTargetException e) { + final Throwable cause = e.getCause(); + if (cause instanceof RuntimeException) { + throw (RuntimeException) cause; + } + throw new RuntimeException(e.getCause()); + } catch (ReflectiveOperationException e) { + throw new RuntimeException(e); + } + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RootLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RootLoader.java new file mode 100644 index 0000000..0c3cd77 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/RootLoader.java @@ -0,0 +1,132 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Map; + +public class RootLoader { + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument( + final Map doc, + final String baseUri_, + final LoadingOptions loadingOptions_) { + final String baseUri = ensureBaseUri(baseUri_); + LoadingOptions loadingOptions = loadingOptions_; + if (loadingOptions == null) { + loadingOptions = new LoadingOptionsBuilder().build(); + } + return LoaderInstances.one_or_array_of_GalaxyWorkflow.documentLoad( + doc, baseUri, loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Map doc, final String baseUri) { + return loadDocument(doc, baseUri, null); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Map doc) { + return loadDocument(doc, ensureBaseUri(null)); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Path path) { + return loadDocument(readPath(path)); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Path path, String baseUri) { + return loadDocument(readPath(path), baseUri); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Path path, LoadingOptions loadingOptions) { + return loadDocument(readPath(path), loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final Path path, String baseUri, LoadingOptions loadingOptions) { + return loadDocument(readPath(path), baseUri, loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final File file) { + return loadDocument(file.toPath()); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final File file, String baseUri) { + return loadDocument(file.toPath(), baseUri); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final File file, LoadingOptions loadingOptions) { + return loadDocument(file.toPath(), loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final File file, String baseUri, LoadingOptions loadingOptions) { + return loadDocument(file.toPath(), baseUri, loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final String doc) { + return loadDocument(doc, ensureBaseUri(null)); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final String doc, final LoadingOptions loadingOptions) { + return loadDocument(doc, ensureBaseUri(null), loadingOptions); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final String doc, final String uri) { + return loadDocument(doc, uri, null); + } + + public static org.galaxyproject.gxformat2.v19_09.utils.OneOrListOf< + org.galaxyproject.gxformat2.v19_09.GalaxyWorkflow> + loadDocument(final String doc, final String uri_, final LoadingOptions loadingOptions_) { + final String uri = ensureBaseUri(uri_); + LoadingOptions loadingOptions = loadingOptions_; + if (loadingOptions == null) { + loadingOptions = new LoadingOptionsBuilder().setFileUri(uri).build(); + } + final Map result = YamlUtils.mapFromString(doc); + loadingOptions.idx.put(uri, result); + return loadDocument(result, uri, loadingOptions); + } + + static String readPath(final Path path) { + try { + return new String(Files.readAllBytes(path), "UTF8"); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + static String ensureBaseUri(final String baseUri_) { + String baseUri = baseUri_; + if (baseUri == null) { + baseUri = Uris.fileUri(Paths.get(".").toAbsolutePath().normalize().toString()) + "/"; + } + return baseUri; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Savable.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Savable.java new file mode 100644 index 0000000..2dd02db --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Savable.java @@ -0,0 +1,6 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public interface Savable { + // TODO: implement writable interface + // public abstract void save(boolean top, String baseUrl, boolean relativeUris); +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/SavableImpl.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/SavableImpl.java new file mode 100644 index 0000000..81a430f --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/SavableImpl.java @@ -0,0 +1,5 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +public class SavableImpl implements Savable { + public SavableImpl(Object doc, String baseUri, LoadingOptions loadingOptions, String docRoot) {} +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/TypeDslLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/TypeDslLoader.java new file mode 100644 index 0000000..93e8687 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/TypeDslLoader.java @@ -0,0 +1,81 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class TypeDslLoader implements Loader { + private final Loader innerLoader; + private final Integer refScope; + private static final Pattern TYPE_DSL_REGEX = Pattern.compile("^([^\\[?]+)(\\[\\])?(\\?)?$"); + + public TypeDslLoader(final Loader innerLoader, final Integer refScope) { + this.innerLoader = innerLoader; + this.refScope = refScope; + } + + private Object resolve( + final String doc_, final String baseUri, final LoadingOptions loadingOptions) { + final Matcher m = TYPE_DSL_REGEX.matcher(doc_); + if (m.matches()) { + final String first = + loadingOptions.expandUrl(m.group(1), baseUri, false, true, this.refScope); + Object second = null; + Object third = null; + if (m.group(2) != null && m.group(2).length() > 0) { + HashMap resolvedMap = new HashMap(); + resolvedMap.put("type", "array"); + resolvedMap.put("items", first); + second = resolvedMap; + } + if (m.group(3) != null && m.group(3).length() > 0) { + third = Arrays.asList("null", second != null ? second : first); + } + if (third != null) { + return third; + } else { + return second != null ? second : first; + } + } else { + return doc_; + } + } + + public T load( + final Object doc_, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + Object doc = doc_; + if (doc instanceof List) { + final List docList = (List) doc; + final List r = new ArrayList(); + for (final Object d : docList) { + if (d instanceof String) { + Object resolved = this.resolve((String) d, baseUri, loadingOptions); + if (resolved instanceof List) { + List resolvedList = (List) resolved; + for (Object i : resolvedList) { + if (!r.contains(i)) { + r.add(i); + } + } + } else { + if (!r.contains(resolved)) { + r.add(resolved); + } + } + } else { + r.add(d); + } + } + doc = docList; + } else if (doc instanceof String) { + doc = this.resolve((String) doc, baseUri, loadingOptions); + } + return this.innerLoader.load(doc, baseUri, loadingOptions); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UnionLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UnionLoader.java new file mode 100644 index 0000000..0209250 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UnionLoader.java @@ -0,0 +1,33 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class UnionLoader implements Loader { + private final List alternates; + + public UnionLoader(List alternates) { + this.alternates = alternates; + } + + public UnionLoader(Loader[] alternates) { + this(Arrays.asList(alternates)); + } + + public Object load( + final Object doc, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + final List errors = new ArrayList(); + for (final Loader loader : this.alternates) { + try { + return loader.load(doc, baseUri, loadingOptions, docRoot); + } catch (ValidationException e) { + errors.add(e); + } + } + throw new ValidationException("Failed to match union type", errors); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UriLoader.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UriLoader.java new file mode 100644 index 0000000..58763a1 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/UriLoader.java @@ -0,0 +1,52 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.List; + +public class UriLoader implements Loader { + private final Loader innerLoader; + private final boolean scopedId; + private final boolean vocabTerm; + private final Integer scopedRef; + + public UriLoader( + final Loader innerLoader, + final boolean scopedId, + final boolean vocabTerm, + final Integer scopedRef) { + this.innerLoader = innerLoader; + this.scopedId = scopedId; + this.vocabTerm = vocabTerm; + this.scopedRef = scopedRef; + } + + private Object expandUrl( + final Object object, final String baseUri, final LoadingOptions loadingOptions) { + if (object instanceof String) { + return loadingOptions.expandUrl( + (String) object, baseUri, this.scopedId, this.vocabTerm, this.scopedRef); + } else { + return object; + } + } + + public T load( + final Object doc_, + final String baseUri, + final LoadingOptions loadingOptions, + final String docRoot) { + Object doc = doc_; + if (doc instanceof List) { + List docList = (List) doc; + List docWithExpansion = new ArrayList(); + for (final Object el : docList) { + docWithExpansion.add(this.expandUrl(el, baseUri, loadingOptions)); + } + doc = docWithExpansion; + } + if (doc instanceof String) { + doc = this.expandUrl(doc, baseUri, loadingOptions); + } + return this.innerLoader.load(doc, baseUri, loadingOptions); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Uris.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Uris.java new file mode 100644 index 0000000..ae6fac5 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Uris.java @@ -0,0 +1,117 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; + +public class Uris { + + // Emulate Python's urlsplit. + public static class UriSplit { + String scheme; + String netloc; + String path; + String query; + String fragment; + + public UriSplit(String scheme, String netloc, String path, String query, String fragment) { + this.scheme = scheme; + this.netloc = netloc; + this.path = path; + this.query = query; + this.fragment = fragment; + } + + public String toString() { + return String.format( + "UriSplit[%s,%s,%s,%s,%s]", + this.scheme, this.netloc, this.path, this.query, this.fragment); + } + } + + public static String fileUri(final String path) { + return fileUri(path, false); + } + + public static String fileUri(final String path, final boolean splitFrag) { + if (path.equals("file://")) { + return path; + } + String frag; + String urlPath; + if (splitFrag) { + final String[] pathsp = path.split("#", 2); + // is quoting this? + urlPath = Uris.quote(pathsp[0]); + if (pathsp.length == 2) { + frag = "#" + Uris.quote(pathsp[1]); + } else { + frag = ""; + urlPath = Uris.quote(path); + } + } else { + urlPath = Uris.quote(path); + frag = ""; + } + if (urlPath.startsWith("//")) { + return "file:" + urlPath + frag; + } else { + return "file://" + urlPath + frag; + } + } + + public static UriSplit split(final String uriString) { + try { + final URI uri = new URI(uriString); + return new Uris.UriSplit( + uri.getScheme(), uri.getAuthority(), uri.getPath(), uri.getQuery(), uri.getFragment()); + } catch (URISyntaxException e) { + return new Uris.UriSplit(null, null, uriString, null, null); + } + } + + public static String unsplit( + final String scheme, + final String netloc, + final String path, + final String query, + final String fragment) { + try { + return new URI(scheme, netloc, path, query, fragment).toString(); + } catch (URISyntaxException e) { + if (scheme == null && path.startsWith("_:")) { + String uri = path; + if (fragment != null && fragment.length() > 0) { + uri += "#" + fragment; + } + return fragment; + } + throw new RuntimeException(e); + } + } + + public static URI toUri(final String url) { + try { + return new URI(url); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public static String quote(final String uri) { + try { + return java.net.URLDecoder.decode(uri, StandardCharsets.UTF_8.name()); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } + + public static String unquote(final String uri) { + try { + return java.net.URLEncoder.encode(uri, StandardCharsets.UTF_8.name()); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); + } + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ValidationException.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ValidationException.java new file mode 100644 index 0000000..835095e --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/ValidationException.java @@ -0,0 +1,80 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ValidationException extends RuntimeException { + private final List children; + private String bullet = ""; + private String currentMessage; + + public ValidationException(final String message) { + this(message, (List) null); + } + + public ValidationException(final String message, final ValidationException child) { + this(message, Arrays.asList(child)); + } + + public ValidationException(final String message, final List children_) { + super(message); + this.currentMessage = message; + final List children = new ArrayList(); + if (children_ != null) { + for (final ValidationException child : children_) { + children.addAll(child.simplify()); + } + } + this.children = children; + } + + public ValidationException withBullet(final String bullet) { + this.bullet = bullet; + return this; + } + + public List simplify() { + if (getMessage().length() > 0) { + return Arrays.asList(this); + } else { + return this.children; + } + } + + public String summary(final int level, final boolean withBullet) { + final int indentPerLevel = 2; + final String spaces = new String(new char[level * indentPerLevel]).replace("\0", " "); + final String bullet; + if (this.bullet.length() > 0 && withBullet) { + bullet = this.bullet; + } else { + bullet = ""; + } + return spaces + bullet + this.currentMessage; + } + + public String prettyStr(final Integer level_) { + Integer level = level_; + if (level == null) { + level = 0; + } + final List parts = new ArrayList(); + int nextLevel; + if (this.currentMessage != null && this.currentMessage.length() > 0) { + parts.add(this.summary(level, true)); + nextLevel = level + 1; + } else { + nextLevel = level; + } + for (final ValidationException child : this.children) { + parts.add(child.prettyStr(nextLevel)); + } + final String ret = String.join("\n", parts); + return ret; + } + + public String getMessage() { + return this.prettyStr(null); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Validator.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Validator.java new file mode 100644 index 0000000..2337043 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/Validator.java @@ -0,0 +1,14 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.io.File; + +public class Validator { + public static void main(final String[] args) throws Exception { + if (args.length != 1) { + throw new Exception("No argument supplied to validate."); + } + // TODO: allow URLs and such. + final File uri = new File(args[0]); + RootLoader.loadDocument(uri); + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtils.java b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtils.java new file mode 100644 index 0000000..f1bdf5f --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtils.java @@ -0,0 +1,13 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.Map; +import org.yaml.snakeyaml.Yaml; + +public class YamlUtils { + + public static Map mapFromString(final String text) { + Yaml yaml = new Yaml(); + final Map result = yaml.load(text); + return result; + } +} diff --git a/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/package.html b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/package.html new file mode 100644 index 0000000..6b82709 --- /dev/null +++ b/java/src/main/java/org/galaxyproject/gxformat2/v19_09/utils/package.html @@ -0,0 +1,3 @@ + +This package contains utilities for working the SchemaSalad generated code for the org.galaxyproject.gxformat2.v19_09 package. + diff --git a/java/src/main/javadoc/overview.html b/java/src/main/javadoc/overview.html new file mode 100644 index 0000000..9a31ca8 --- /dev/null +++ b/java/src/main/javadoc/overview.html @@ -0,0 +1,4 @@ + + This project contains Java objects and utilities auto-generated by Schema Salad + for parsing documents corresponding to the https://galaxyproject.org/gxformat2/v19_09# schema. + diff --git a/java/src/main/resources/META-INF/MANIFEST.MF b/java/src/main/resources/META-INF/MANIFEST.MF new file mode 100644 index 0000000..294897d --- /dev/null +++ b/java/src/main/resources/META-INF/MANIFEST.MF @@ -0,0 +1,2 @@ +Manifest-Version: 1.0 +Main-Class: org.galaxyproject.gxformat2.v19_09.utils.Validator diff --git a/java/src/test/java/org/galaxyproject/gxformat2/LintExamplesTest.java b/java/src/test/java/org/galaxyproject/gxformat2/LintExamplesTest.java new file mode 100644 index 0000000..9799fd2 --- /dev/null +++ b/java/src/test/java/org/galaxyproject/gxformat2/LintExamplesTest.java @@ -0,0 +1,25 @@ +package org.galaxyproject.gxformat2; + +import java.io.File; +import org.junit.Test; + +public class LintExamplesTest { + + @Test + public void testLinting() throws Exception { + final File examplesDirectory = new File("../tests/examples"); + assert examplesDirectory.exists(); + assert examplesDirectory.isDirectory(); + for (final File file : examplesDirectory.listFiles()) { + final String path = file.getAbsolutePath(); + final String exitCodeString = file.getName().substring(0, 1); + final int expectedExitCode = Integer.parseInt(exitCodeString); + final int actualExitCode = Lint.lint(new String[] {path}); + if (expectedExitCode != actualExitCode) { + final String template = + "File [%s] didn't lint properly - expected exit code [%d], got [%d]."; + assert false : template.format(template, path, expectedExitCode, actualExitCode); + } + } + } +} diff --git a/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcherTest.java b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcherTest.java new file mode 100644 index 0000000..217ec16 --- /dev/null +++ b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/DefaultFetcherTest.java @@ -0,0 +1,64 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import org.junit.Assert; +import org.junit.Test; + +public class DefaultFetcherTest { + @Test + public void testUnderscoreJoin() { + final DefaultFetcher fetcher = new DefaultFetcher(); + Assert.assertEquals(fetcher.urlJoin("http://googl.com/", "_:/moo"), "_:/moo"); + } + + /* + + url = fetcher.urljoin("file:///home/fred/foo.cwl", "../alice/soup.cwl") + assert url == "file:///home/alice/soup.cwl" + # relative from root + url = fetcher.urljoin("file:///home/fred/foo.cwl", "/baz/soup.cwl") + assert url == "file:///baz/soup.cwl" + + url = fetcher.urljoin( + "file:///home/fred/foo.cwl", "http://example.com/bar/soup.cwl" + ) + assert url == "http://example.com/bar/soup.cwl" + + url = fetcher.urljoin("http://example.com/fred/foo.cwl", "soup.cwl") + assert url == "http://example.com/fred/soup.cwl" + + # Root-relative -- here relative to http host, not file:/// + url = fetcher.urljoin("http://example.com/fred/foo.cwl", "/bar/soup.cwl") + assert url == "http://example.com/bar/soup.cwl" + + # Security concern - can't resolve file: from http: + with pytest.raises(ValidationException): + url = fetcher.urljoin( + "http://example.com/fred/foo.cwl", "file:///bar/soup.cwl" + ) + */ + + @Test + public void testUnixJoin() { + final DefaultFetcher fetcher = new DefaultFetcher(); + String url; + + url = fetcher.urlJoin("file:///home/fred/foo.cwl", "soup.cwl"); + Assert.assertEquals(url, "file:///home/fred/soup.cwl"); + + url = fetcher.urlJoin("file:///home/fred/foo.cwl", "../alice/soup.cwl"); + Assert.assertEquals(url, "file:///home/alice/soup.cwl"); + // relative from root + url = fetcher.urlJoin("file:///home/fred/foo.cwl", "/baz/soup.cwl"); + Assert.assertEquals(url, "file:///baz/soup.cwl"); + + url = fetcher.urlJoin("file:///home/fred/foo.cwl", "http://example.com/bar/soup.cwl"); + Assert.assertEquals(url, "http://example.com/bar/soup.cwl"); + + url = fetcher.urlJoin("http://example.com/fred/foo.cwl", "soup.cwl"); + Assert.assertEquals(url, "http://example.com/fred/soup.cwl"); + + // Root-relative -- here relative to http host, not file:/// + url = fetcher.urlJoin("http://example.com/fred/foo.cwl", "/bar/soup.cwl"); + Assert.assertEquals(url, "http://example.com/bar/soup.cwl"); + } +} diff --git a/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/ExamplesTest.java b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/ExamplesTest.java new file mode 100644 index 0000000..143b76b --- /dev/null +++ b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/ExamplesTest.java @@ -0,0 +1,4 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + + +public class ExamplesTest {} diff --git a/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtilsTest.java b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtilsTest.java new file mode 100644 index 0000000..a9c8ede --- /dev/null +++ b/java/src/test/java/org/galaxyproject/gxformat2/v19_09/utils/YamlUtilsTest.java @@ -0,0 +1,15 @@ +package org.galaxyproject.gxformat2.v19_09.utils; + +import java.util.Map; +import org.junit.Assert; +import org.junit.Test; + +public class YamlUtilsTest { + @Test + public void testSimpleLoad() { + final String yamlStr = "moo: cow\nbark: dog\n"; + final Map loaded = YamlUtils.mapFromString(yamlStr); + Assert.assertEquals(loaded.get("moo"), "cow"); + Assert.assertEquals(loaded.get("bark"), "dog"); + } +} diff --git a/schema/Process.yml b/schema/Process.yml new file mode 100644 index 0000000..3f367a8 --- /dev/null +++ b/schema/Process.yml @@ -0,0 +1,103 @@ +# Subset of CWL's process, can't use process as is because of types not implemented. +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + sld: "https://w3id.org/cwl/salad#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + +$graph: + +- name: Labeled + type: record + abstract: true + fields: + - name: label + type: + - "null" + - string + jsonldPredicate: "rdfs:label" + doc: "A short, human-readable label of this object." + +- name: Identified + type: record + abstract: true + fields: + - name: id + type: string? + jsonldPredicate: "@id" + doc: "The unique identifier for this object." + +- name: Parameter + type: record + extends: [sld:Documented, Identified] # Dropped FieldBase. + abstract: true + doc: | + Define an input or output parameter to a process. + +- name: InputParameter + type: record + abstract: true + extends: [Parameter] # , InputFormat, LoadContents] + fields: + - name: default + type: Any? + jsonldPredicate: + _id: sld:default + noLinkCheck: true + doc: | + The default value to use for this parameter if the parameter is missing + from the input object, or if the value of the parameter in the input + object is `null`. Default values are applied before evaluating expressions + (e.g. dependent `valueFrom` fields). + + +- name: OutputParameter + type: record + extends: [Parameter] + abstract: true + +- type: record + name: Process + extends: [Identified, Labeled, sld:Documented] + abstract: true + doc: | + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + fields: + - name: inputs + type: + type: array + items: InputParameter + jsonldPredicate: + _id: "cwl:inputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the input parameters of the process. The process is ready to + run when all required input parameters are associated with concrete + values. Input parameters include a schema for each parameter which is + used to validate the input object. It may also be used to build a user + interface for constructing the input object. + + When accepting an input object, all input parameters must have a value. + If an input parameter is missing from the input object, it must be + assigned a value of `null` (or the value of `default` for that + parameter, if provided) for the purposes of validation and evaluation + of expressions. + + - name: outputs + type: + type: array + items: OutputParameter + jsonldPredicate: + _id: "cwl:outputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the parameters representing the output of the process. May be + used to generate and/or validate the output object. diff --git a/schema/common/common.yml b/schema/common/common.yml new file mode 100644 index 0000000..5375352 --- /dev/null +++ b/schema/common/common.yml @@ -0,0 +1,65 @@ +saladVersion: v1.1 +$base: "https://galaxyproject.org/gxformat2/gxformat2common#" + +$graph: + +- name: HasStepPosition + type: record + abstract: true + fields: + - name: position + type: StepPosition? + +- name: StepPosition + type: record + doc: This field specifies the location of the step's node when rendered in the workflow editor. + fields: + - name: top + type: float + default: 0.0 + doc: | + Relative vertical position of the step's node when rendered in the workflow editor. + - name: left + type: float + default: 0.0 + doc: | + Relative horizontal position of the step's node when rendered in the workflow editor. + +- name: ReferencesTool + type: record + abstract: true + fields: + - name: tool_id + type: string? + doc: | + The tool ID used to run this step of the workflow (e.g. 'cat1' or 'toolshed.g2.bx.psu.edu/repos/nml/collapse_collections/collapse_dataset/4.0'). + - name: tool_shed_repository + type: ToolShedRepository? + doc: | + The Galaxy Tool Shed repository that should be installed in order to use this tool. + - name: tool_version + type: string? + doc: | + The tool version corresponding used to run this step of the workflow. For tool shed installed tools, the ID generally uniquely specifies a version + and this field is optional. + +- name: ToolShedRepository + type: record + fields: + - name: changeset_revision + type: string + doc: | + The revision of the tool shed repository this tool can be found in. + - name: name + type: string + jsonldPredicate: "@id" # will this bite me? + doc: | + The name of the tool shed repository this tool can be found in. + - name: owner + type: string + doc: | + The owner of the tool shed repository this tool can be found in. + - name: tool_shed + type: string + doc: | + The URI of the tool shed containing the repository this tool can be found in - typically this should be toolshed.g2.bx.psu.edu. diff --git a/schema/common/metaschema/field_name.yml b/schema/common/metaschema/field_name.yml new file mode 100644 index 0000000..44e95a2 --- /dev/null +++ b/schema/common/metaschema/field_name.yml @@ -0,0 +1,46 @@ +- | + ## Field name resolution + + The document schema declares the vocabulary of known field names. During + preprocessing traversal, field name in the document which are not part of + the schema vocabulary must be resolved to absolute URIs. Under "strict" + validation, it is an error for a document to include fields which are not + part of the vocabulary and not resolvable to absolute URIs. Fields names + which are not part of the vocabulary are resolved using the following + rules: + + * If an field name URI begins with a namespace prefix declared in the + document context (`@context`) followed by a colon `:`, the prefix and + colon must be replaced by the namespace declared in `@context`. + + * If there is a vocabulary term which maps to the URI of a resolved + field, the field name must be replace with the vocabulary term. + + * If a field name URI is an absolute URI consisting of a scheme and path + and is not part of the vocabulary, no processing occurs. + + Field name resolution is not relative. It must not be affected by the + base URI. + + ### Field name resolution example + + Given the following schema: + + ``` +- $include: field_name_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: field_name_src.yml +- | + ``` + + This becomes: + + ``` +- $include: field_name_proc.yml +- | + ``` diff --git a/schema/common/metaschema/field_name_proc.yml b/schema/common/metaschema/field_name_proc.yml new file mode 100644 index 0000000..a53ef4b --- /dev/null +++ b/schema/common/metaschema/field_name_proc.yml @@ -0,0 +1,8 @@ + { + "base": "one", + "form": { + "base": "two", + "http://example.com/three": "three", + }, + "http://example.com/acid#four": "four" + } diff --git a/schema/common/metaschema/field_name_schema.yml b/schema/common/metaschema/field_name_schema.yml new file mode 100644 index 0000000..5089c4b --- /dev/null +++ b/schema/common/metaschema/field_name_schema.yml @@ -0,0 +1,14 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "base", + "type": "string", + "jsonldPredicate": "http://example.com/base" + }] + }] +} diff --git a/schema/common/metaschema/field_name_src.yml b/schema/common/metaschema/field_name_src.yml new file mode 100644 index 0000000..1ed79b9 --- /dev/null +++ b/schema/common/metaschema/field_name_src.yml @@ -0,0 +1,8 @@ + { + "base": "one", + "form": { + "http://example.com/base": "two", + "http://example.com/three": "three", + }, + "acid:four": "four" + } diff --git a/schema/common/metaschema/ident_res.yml b/schema/common/metaschema/ident_res.yml new file mode 100644 index 0000000..86aefe6 --- /dev/null +++ b/schema/common/metaschema/ident_res.yml @@ -0,0 +1,70 @@ +- | + ## Identifier resolution + + The schema may designate one or more fields as identifier fields to identify + specific objects. Processing must resolve relative identifiers to absolute + identifiers using the following rules: + + * If an identifier URI begins with `#` it is a current document + fragment identifier. It is resolved relative to the base URI by + setting or replacing the fragment portion of the base URI. + + * If an identifier URI contains `#` in some other position is a + relative URI with fragment identifier. It is resolved relative + to the base URI by stripping the last path segment from the base + URI and adding the identifier followed by the fragment. + + * If an identifier URI does not contain a scheme and does not + contain `#` it is a parent relative fragment identifier. + + * If an identifier URI is a parent relative fragment identifier + and the base URI does not contain a document fragment, set the + document fragment on the base URI. + + * If an identifier URI is a parent relative fragment identifier + and the object containing this identifier is assigned to a + parent object field defined with `subscope` in + `jsonldPredicate`, append a slash `/` to the base URI fragment + followed by the value of the parent field `subscope`. Then + append the identifier as described in the next rule. + + * If an identifier URI is a parent relative fragment identifier + and the base URI contains a document fragment, append a slash + `/` to the fragment followed by the identifier field to the + fragment portion of the base URI. + + * If an identifier URI begins with a namespace prefix declared in + `$namespaces` followed by a colon `:`, the prefix and colon must be + replaced by the namespace declared in `$namespaces`. + + * If an identifier URI is an absolute URI consisting of a scheme and path, + no processing occurs. + + When preprocessing visits a node containing an identifier, that identifier + must be used as the base URI to process child nodes. + + It is an error for more than one object in a document to have the same + absolute URI. + + ### Identifier resolution example + + Given the following schema: + + ``` +- $include: ident_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: ident_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: ident_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/ident_res_proc.yml b/schema/common/metaschema/ident_res_proc.yml new file mode 100644 index 0000000..db9d822 --- /dev/null +++ b/schema/common/metaschema/ident_res_proc.yml @@ -0,0 +1,25 @@ +{ + "id": "http://example.com/base", + "form": { + "id": "http://example.com/base#one", + "things": [ + { + "id": "http://example.com/base#one/two" + }, + { + "id": "http://example.com/base#three" + }, + { + "id": "http://example.com/four#five", + }, + { + "id": "http://example.com/acid#six", + }, + { + "subscopeField": { + "id": "http://example.com/base#one/thisIsASubscope/seven" + } + } + ], + } +} diff --git a/schema/common/metaschema/ident_res_schema.yml b/schema/common/metaschema/ident_res_schema.yml new file mode 100644 index 0000000..704116d --- /dev/null +++ b/schema/common/metaschema/ident_res_schema.yml @@ -0,0 +1,23 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "id", + "type": "string", + "jsonldPredicate": "@id" + }]}, { + "name": "SubscopeType", + "type": "record", + "fields": [{ + "name": "subscopeField", + "type": "ExampleType", + "jsonldPredicate": { + "subscope": "thisIsASubscope" + } + }] + }] +} diff --git a/schema/common/metaschema/ident_res_src.yml b/schema/common/metaschema/ident_res_src.yml new file mode 100644 index 0000000..4b634ff --- /dev/null +++ b/schema/common/metaschema/ident_res_src.yml @@ -0,0 +1,25 @@ + { + "id": "http://example.com/base", + "form": { + "id": "one", + "things": [ + { + "id": "two" + }, + { + "id": "#three", + }, + { + "id": "four#five", + }, + { + "id": "acid:six", + }, + { + "subscopeField": { + "id": "seven" + } + } + ], + } + } diff --git a/schema/common/metaschema/import_include.md b/schema/common/metaschema/import_include.md new file mode 100644 index 0000000..0ad06bf --- /dev/null +++ b/schema/common/metaschema/import_include.md @@ -0,0 +1,112 @@ +## Import + +During preprocessing traversal, an implementation must resolve `$import` +directives. An `$import` directive is an object consisting of exactly one +field `$import` specifying resource by URI string. It is an error if there +are additional fields in the `$import` object, such additional fields must +be ignored. + +The URI string must be resolved to an absolute URI using the link +resolution rules described previously. Implementations must support +loading from `file`, `http` and `https` resources. The URI referenced by +`$import` must be loaded and recursively preprocessed as a Salad document. +The external imported document does not inherit the context of the +importing document, and the default base URI for processing the imported +document must be the URI used to retrieve the imported document. If the +`$import` URI includes a document fragment, the fragment must be excluded +from the base URI used to preprocess the imported document. + +Once loaded and processed, the `$import` node is replaced in the document +structure by the object or array yielded from the import operation. + +URIs may reference document fragments which refer to specific an object in +the target document. This indicates that the `$import` node must be +replaced by only the object with the appropriate fragment identifier. + +It is a fatal error if an import directive refers to an external resource +or resource fragment which does not exist or is not accessible. + +### Import example + +import.yml: +``` +{ + "hello": "world" +} + +``` + +parent.yml: +``` +{ + "form": { + "bar": { + "$import": "import.yml" + } + } +} + +``` + +This becomes: + +``` +{ + "form": { + "bar": { + "hello": "world" + } + } +} +``` + +## Include + +During preprocessing traversal, an implementation must resolve `$include` +directives. An `$include` directive is an object consisting of exactly one +field `$include` specifying a URI string. It is an error if there are +additional fields in the `$include` object, such additional fields must be +ignored. + +The URI string must be resolved to an absolute URI using the link +resolution rules described previously. The URI referenced by `$include` must +be loaded as a text data. Implementations must support loading from +`file`, `http` and `https` resources. Implementations may transcode the +character encoding of the text data to match that of the parent document, +but must not interpret or parse the text document in any other way. + +Once loaded, the `$include` node is replaced in the document structure by a +string containing the text data loaded from the resource. + +It is a fatal error if an import directive refers to an external resource +which does not exist or is not accessible. + +### Include example + +parent.yml: +``` +{ + "form": { + "bar": { + "$include": "include.txt" + } + } +} + +``` + +include.txt: +``` +hello world + +``` + +This becomes: + +``` +{ + "form": { + "bar": "hello world" + } +} +``` diff --git a/schema/common/metaschema/link_res.yml b/schema/common/metaschema/link_res.yml new file mode 100644 index 0000000..9346f8a --- /dev/null +++ b/schema/common/metaschema/link_res.yml @@ -0,0 +1,55 @@ +- | + ## Link resolution + + The schema may designate one or more fields as link fields reference other + objects. Processing must resolve links to either absolute URIs using the + following rules: + + * If a reference URI is prefixed with `#` it is a relative + fragment identifier. It is resolved relative to the base URI by setting + or replacing the fragment portion of the base URI. + + * If a reference URI does not contain a scheme and is not prefixed with `#` + it is a path relative reference. If the reference URI contains `#` in any + position other than the first character, the reference URI must be divided + into a path portion and a fragment portion split on the first instance of + `#`. The path portion is resolved relative to the base URI by the following + rule: if the path portion of the base URI ends in a slash `/`, append the + path portion of the reference URI to the path portion of the base URI. If + the path portion of the base URI does not end in a slash, replace the final + path segment with the path portion of the reference URI. Replace the + fragment portion of the base URI with the fragment portion of the reference + URI. + + * If a reference URI begins with a namespace prefix declared in `$namespaces` + followed by a colon `:`, the prefix and colon must be replaced by the + namespace declared in `$namespaces`. + + * If a reference URI is an absolute URI consisting of a scheme and path, + no processing occurs. + + Link resolution must not affect the base URI used to resolve identifiers + and other links. + + ### Link resolution example + + Given the following schema: + + ``` +- $include: link_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: link_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: link_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/link_res_proc.yml b/schema/common/metaschema/link_res_proc.yml new file mode 100644 index 0000000..03e539d --- /dev/null +++ b/schema/common/metaschema/link_res_proc.yml @@ -0,0 +1,21 @@ +{ + "$base": "http://example.com/base", + "link": "http://example.com/base/zero", + "form": { + "link": "http://example.com/one", + "things": [ + { + "link": "http://example.com/two" + }, + { + "link": "http://example.com/base#three" + }, + { + "link": "http://example.com/four#five", + }, + { + "link": "http://example.com/acid#six", + } + ] + } +} diff --git a/schema/common/metaschema/link_res_schema.yml b/schema/common/metaschema/link_res_schema.yml new file mode 100644 index 0000000..76420d3 --- /dev/null +++ b/schema/common/metaschema/link_res_schema.yml @@ -0,0 +1,16 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "link", + "type": "string", + "jsonldPredicate": { + "_type": "@id" + } + }] + }] +} diff --git a/schema/common/metaschema/link_res_src.yml b/schema/common/metaschema/link_res_src.yml new file mode 100644 index 0000000..23f7a29 --- /dev/null +++ b/schema/common/metaschema/link_res_src.yml @@ -0,0 +1,21 @@ +{ + "$base": "http://example.com/base", + "link": "http://example.com/base/zero", + "form": { + "link": "one", + "things": [ + { + "link": "two" + }, + { + "link": "#three", + }, + { + "link": "four#five", + }, + { + "link": "acid:six", + } + ] + } +} diff --git a/schema/common/metaschema/map_res.yml b/schema/common/metaschema/map_res.yml new file mode 100644 index 0000000..bbcee48 --- /dev/null +++ b/schema/common/metaschema/map_res.yml @@ -0,0 +1,36 @@ +- | + ## Identifier maps + + The schema may designate certain fields as having a `mapSubject`. If the + value of the field is a JSON object, it must be transformed into an array of + JSON objects. Each key-value pair from the source JSON object is a list + item, each list item must be a JSON objects, and the value of the key is + assigned to the field specified by `mapSubject`. + + Fields which have `mapSubject` specified may also supply a `mapPredicate`. + If the value of a map item is not a JSON object, the item is transformed to a + JSON object with the key assigned to the field specified by `mapSubject` and + the value assigned to the field specified by `mapPredicate`. + + ### Identifier map example + + Given the following schema: + + ``` +- $include: map_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: map_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: map_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/map_res_proc.yml b/schema/common/metaschema/map_res_proc.yml new file mode 100644 index 0000000..52e9c22 --- /dev/null +++ b/schema/common/metaschema/map_res_proc.yml @@ -0,0 +1,12 @@ +{ + "mapped": [ + { + "value": "daphne", + "key": "fred" + }, + { + "value": "scooby", + "key": "shaggy" + } + ] +} \ No newline at end of file diff --git a/schema/common/metaschema/map_res_schema.yml b/schema/common/metaschema/map_res_schema.yml new file mode 100644 index 0000000..086cc29 --- /dev/null +++ b/schema/common/metaschema/map_res_schema.yml @@ -0,0 +1,30 @@ +{ + "$graph": [{ + "name": "MappedType", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "mapped", + "type": { + "type": "array", + "items": "ExampleRecord" + }, + "jsonldPredicate": { + "mapSubject": "key", + "mapPredicate": "value" + } + }], + }, + { + "name": "ExampleRecord", + "type": "record", + "fields": [{ + "name": "key", + "type": "string" + }, { + "name": "value", + "type": "string" + } + ] + }] +} diff --git a/schema/common/metaschema/map_res_src.yml b/schema/common/metaschema/map_res_src.yml new file mode 100644 index 0000000..9df0c35 --- /dev/null +++ b/schema/common/metaschema/map_res_src.yml @@ -0,0 +1,8 @@ +{ + "mapped": { + "shaggy": { + "value": "scooby" + }, + "fred": "daphne" + } +} \ No newline at end of file diff --git a/schema/common/metaschema/metaschema.yml b/schema/common/metaschema/metaschema.yml new file mode 100644 index 0000000..3340c22 --- /dev/null +++ b/schema/common/metaschema/metaschema.yml @@ -0,0 +1,377 @@ +$base: "https://w3id.org/cwl/salad#" + +$namespaces: + sld: "https://w3id.org/cwl/salad#" + dct: "http://purl.org/dc/terms/" + rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + xsd: "http://www.w3.org/2001/XMLSchema#" + +$graph: + +- name: "Semantic_Annotations_for_Linked_Avro_Data" + type: documentation + doc: + - $include: salad.md + - $import: field_name.yml + - $import: ident_res.yml + - $import: link_res.yml + - $import: vocab_res.yml + - $include: import_include.md + - $import: map_res.yml + - $import: typedsl_res.yml + - $import: sfdsl_res.yml + +- name: "Link_Validation" + type: documentation + doc: | + # Link validation + + Once a document has been preprocessed, an implementation may validate + links. The link validation traversal may visit fields which the schema + designates as link fields and check that each URI references an existing + object in the current document, an imported document, file system, or + network resource. Failure to validate links may be a fatal error. Link + validation behavior for individual fields may be modified by `identity` and + `noLinkCheck` in the `jsonldPredicate` section of the field schema. + + +- name: "Schema_Validation" + type: documentation + doc: | + # Validating a document against a schema + + To validate a document against the schema, first [apply + preprocessing](#Document_preprocessing), then, use the following + algorithm. + + 1. The document root must be an object or a list. If the document root is an + object containing the field `$graph` (which must be a list of + objects), then validation applies to each object in the list. + 2. For each object, attempt to validate as one of the record types + flagged with `documentRoot: true`. + 3. To validate a record, go through `fields` and recursively + validate each field of the object. + 4. For fields with a list of types (type union), go through each + type in the list and recursively validate the type. For the + field to be valid, at least one type in the union must be valid. + 5. Missing fields are considered `null`. To validate, the allowed types + for the field must include `null` + 6. Primitive types are null, boolean, int, long, float, double, + string. To validate, the value in the document must have one + of these type. For numerics, the value appearing in the + document must fit into the specified type. + 7. To validate an array, the value in the document must be a list, + and each item in the list must recursively validate as a type + in `items`. + 8. To validate an enum, the value in the document be a string, and + the value must be equal to the short name of one of the values + listed in `symbols`. + 9. As a special case, a field with the `Expression` type validates string values + which contain a CWL parameter reference or expression in the form + `$(...)` or `${...}` + +# - name: "JSON_LD_Context" +# type: documentation +# doc: | +# # Generating JSON-LD Context + +# How to generate the json-ld context... + + +- $import: metaschema_base.yml + +- name: JsonldPredicate + type: record + doc: | + Attached to a record field to define how the parent record field is handled for + URI resolution and JSON-LD context generation. + fields: + - name: _id + type: string? + jsonldPredicate: + _id: sld:_id + _type: "@id" + identity: true + doc: | + The predicate URI that this field corresponds to. + Corresponds to JSON-LD `@id` directive. + - name: _type + type: string? + doc: | + The context type hint, corresponds to JSON-LD `@type` directive. + + * If the value of this field is `@id` and `identity` is false or + unspecified, the parent field must be resolved using the link + resolution rules. If `identity` is true, the parent field must be + resolved using the identifier expansion rules. + + * If the value of this field is `@vocab`, the parent field must be + resolved using the vocabulary resolution rules. + + - name: _container + type: string? + doc: | + Structure hint, corresponds to JSON-LD `@container` directive. + - name: identity + type: boolean? + doc: | + If true and `_type` is `@id` this indicates that the parent field must + be resolved according to identity resolution rules instead of link + resolution rules. In addition, the field value is considered an + assertion that the linked value exists; absence of an object in the loaded document + with the URI is not an error. + - name: noLinkCheck + type: boolean? + doc: | + If true, this indicates that link validation traversal must stop at + this field. This field (it is is a URI) or any fields under it (if it + is an object or array) are not subject to link checking. + - name: mapSubject + type: string? + doc: | + If the value of the field is a JSON object, it must be transformed + into an array of JSON objects, where each key-value pair from the + source JSON object is a list item, the list items must be JSON objects, + and the key is assigned to the field specified by `mapSubject`. + - name: mapPredicate + type: string? + doc: | + Only applies if `mapSubject` is also provided. If the value of the + field is a JSON object, it is transformed as described in `mapSubject`, + with the addition that when the value of a map item is not an object, + the item is transformed to a JSON object with the key assigned to the + field specified by `mapSubject` and the value assigned to the field + specified by `mapPredicate`. + - name: refScope + type: int? + doc: | + If the field contains a relative reference, it must be resolved by + searching for valid document references in each successive parent scope + in the document fragment. For example, a reference of `foo` in the + context `#foo/bar/baz` will first check for the existence of + `#foo/bar/baz/foo`, followed by `#foo/bar/foo`, then `#foo/foo` and + then finally `#foo`. The first valid URI in the search order shall be + used as the fully resolved value of the identifier. The value of the + refScope field is the specified number of levels from the containing + identifer scope before starting the search, so if `refScope: 2` then + "baz" and "bar" must be stripped to get the base `#foo` and search + `#foo/foo` and the `#foo`. The last scope searched must be the top + level scope before determining if the identifier cannot be resolved. + - name: typeDSL + type: boolean? + doc: | + Field must be expanded based on the the Schema Salad type DSL. + - name: secondaryFilesDSL + type: boolean? + doc: | + Field must be expanded based on the the Schema Salad secondary file DSL. + - name: subscope + type: string? + doc: | + Append the subscope to the current scope when performing + identifier resolution to objects under this field. + +- name: SpecializeDef + type: record + fields: + - name: specializeFrom + type: string + doc: "The data type to be replaced" + jsonldPredicate: + _id: "sld:specializeFrom" + _type: "@id" + refScope: 1 + + - name: specializeTo + type: string + doc: "The new data type to replace with" + jsonldPredicate: + _id: "sld:specializeTo" + _type: "@id" + refScope: 1 + + +- name: NamedType + type: record + abstract: true + docParent: "#Schema" + fields: + - name: name + type: string + jsonldPredicate: "@id" + doc: "The identifier for this type" + - name: inVocab + type: boolean? + default: true + doc: | + If "true" (the default), include the short name of this type + in the vocabulary. The vocabulary are all the symbols (field + names and other identifiers, such as classes and enum values) + which can be used in the document without a namespace prefix. + These are the keys of the JSON-LD context. If false, do not + include the short name in the vocabulary. + + This is useful for specifying schema extensions that will be + included in validation without introducing ambiguity by + introducing non-standard terms into the vocabulary. + + +- name: DocType + type: record + extends: Documented + abstract: true + docParent: "#Schema" + fields: + - name: docParent + type: string? + doc: | + Hint to indicate that during documentation generation, documentation + for this type should appear in a subsection under `docParent`. + jsonldPredicate: + _id: "sld:docParent" + _type: "@id" + + - name: docChild + type: + - string? + - string[]? + doc: | + Hint to indicate that during documentation generation, documentation + for `docChild` should appear in a subsection under this type. + jsonldPredicate: + _id: "sld:docChild" + _type: "@id" + + - name: docAfter + type: string? + doc: | + Hint to indicate that during documentation generation, documentation + for this type should appear after the `docAfter` section at the same + level. + jsonldPredicate: + _id: "sld:docAfter" + _type: "@id" + + +- name: SchemaDefinedType + type: record + extends: DocType + doc: | + Abstract base for schema-defined types. + abstract: true + fields: + - name: jsonldPredicate + type: + - string? + - JsonldPredicate? + doc: | + Annotate this type with linked data context. + jsonldPredicate: sld:jsonldPredicate + + - name: documentRoot + type: boolean? + doc: | + If true, indicates that the type is a valid at the document root. At + least one type in a schema must be tagged with `documentRoot: true`. + + +- name: SaladRecordField + type: record + extends: RecordField + doc: "A field of a record." + fields: + - name: jsonldPredicate + type: + - string? + - JsonldPredicate? + doc: | + Annotate this type with linked data context. + jsonldPredicate: "sld:jsonldPredicate" + - name: default + type: Any? + jsonldPredicate: + _id: sld:default + noLinkCheck: true + doc: | + The default value to use for this field if the field is missing or "null". + + +- name: SaladRecordSchema + docParent: "#Schema" + type: record + extends: [NamedType, RecordSchema, SchemaDefinedType] + documentRoot: true + specialize: + RecordField: SaladRecordField + fields: + - name: abstract + type: boolean? + doc: | + If true, this record is abstract and may be used as a base for other + records, but is not valid on its own. + + - name: extends + type: + - string? + - string[]? + jsonldPredicate: + _id: "sld:extends" + _type: "@id" + refScope: 1 + doc: | + Indicates that this record inherits fields from one or more base records. + + - name: specialize + type: + - SpecializeDef[]? + doc: | + Only applies if `extends` is declared. Apply type specialization using the + base record as a template. For each field inherited from the base + record, replace any instance of the type `specializeFrom` with + `specializeTo`. + jsonldPredicate: + _id: "sld:specialize" + mapSubject: specializeFrom + mapPredicate: specializeTo + +- name: SaladEnumSchema + docParent: "#Schema" + type: record + extends: [NamedType, EnumSchema, SchemaDefinedType] + documentRoot: true + doc: | + Define an enumerated type. + fields: + - name: extends + type: + - string? + - string[]? + jsonldPredicate: + _id: "sld:extends" + _type: "@id" + refScope: 1 + doc: | + Indicates that this enum inherits symbols from a base enum. + + +- name: Documentation + type: record + docParent: "#Schema" + extends: [NamedType, DocType] + documentRoot: true + doc: | + A documentation section. This type exists to facilitate self-documenting + schemas but has no role in formal validation. + fields: + - name: type + doc: "Must be `documentation`" + type: + type: enum + symbols: + - "sld:documentation" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 diff --git a/schema/common/metaschema/metaschema_base.yml b/schema/common/metaschema/metaschema_base.yml new file mode 100644 index 0000000..67008b6 --- /dev/null +++ b/schema/common/metaschema/metaschema_base.yml @@ -0,0 +1,176 @@ +$base: "https://w3id.org/cwl/salad#" + +$namespaces: + sld: "https://w3id.org/cwl/salad#" + dct: "http://purl.org/dc/terms/" + rdf: "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + xsd: "http://www.w3.org/2001/XMLSchema#" + +$graph: + +- name: "Schema" + type: documentation + doc: | + # Schema + +- name: Documented + type: record + abstract: true + docParent: "#Schema" + fields: + - name: doc + type: + - string? + - string[]? + doc: "A documentation string for this object, or an array of strings which should be concatenated." + jsonldPredicate: "rdfs:comment" + + +- name: PrimitiveType + type: enum + symbols: + - "sld:null" + - "xsd:boolean" + - "xsd:int" + - "xsd:long" + - "xsd:float" + - "xsd:double" + - "xsd:string" + doc: + - | + Salad data types are based on Avro schema declarations. Refer to the + [Avro schema declaration documentation](https://avro.apache.org/docs/current/spec.html#schemas) for + detailed information. + - "null: no value" + - "boolean: a binary value" + - "int: 32-bit signed integer" + - "long: 64-bit signed integer" + - "float: single precision (32-bit) IEEE 754 floating-point number" + - "double: double precision (64-bit) IEEE 754 floating-point number" + - "string: Unicode character sequence" + + +- name: Any + type: enum + symbols: ["#Any"] + docAfter: "#PrimitiveType" + doc: | + The **Any** type validates for any non-null value. + + +- name: RecordField + type: record + extends: Documented + doc: A field of a record. + fields: + - name: name + type: string + jsonldPredicate: "@id" + doc: | + The name of the field + + - name: type + type: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + - type: array + items: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + jsonldPredicate: + _id: sld:type + _type: "@vocab" + typeDSL: true + refScope: 2 + doc: | + The field type + + +- name: RecordSchema + type: record + fields: + type: + doc: "Must be `record`" + type: + type: enum + symbols: + - "sld:record" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + fields: + type: RecordField[]? + jsonldPredicate: + _id: sld:fields + mapSubject: name + mapPredicate: type + doc: "Defines the fields of the record." + + +- name: EnumSchema + type: record + doc: | + Define an enumerated type. + fields: + type: + doc: "Must be `enum`" + type: + type: enum + symbols: + - "sld:enum" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + symbols: + type: string[] + jsonldPredicate: + _id: "sld:symbols" + _type: "@id" + identity: true + doc: "Defines the set of valid symbols." + + +- name: ArraySchema + type: record + fields: + type: + doc: "Must be `array`" + type: + type: enum + symbols: + - "sld:array" + jsonldPredicate: + _id: "sld:type" + _type: "@vocab" + typeDSL: true + refScope: 2 + items: + type: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + - type: array + items: + - PrimitiveType + - RecordSchema + - EnumSchema + - ArraySchema + - string + jsonldPredicate: + _id: "sld:items" + _type: "@vocab" + refScope: 2 + doc: "Defines the type of the array elements." diff --git a/schema/common/metaschema/salad.md b/schema/common/metaschema/salad.md new file mode 100644 index 0000000..68233c9 --- /dev/null +++ b/schema/common/metaschema/salad.md @@ -0,0 +1,306 @@ +# Semantic Annotations for Linked Avro Data (SALAD) + +Author: + +* Peter Amstutz , Veritas Genetics + +Contributors: + +* The developers of Apache Avro +* The developers of JSON-LD +* Nebojša Tijanić , Seven Bridges Genomics + +# Abstract + +Salad is a schema language for describing structured linked data documents +in JSON or YAML documents. A Salad schema provides rules for +preprocessing, structural validation, and link checking for documents +described by a Salad schema. Salad builds on JSON-LD and the Apache Avro +data serialization system, and extends Avro with features for rich data +modeling such as inheritance, template specialization, object identifiers, +and object references. Salad was developed to provide a bridge between the +record oriented data modeling supported by Apache Avro and the Semantic +Web. + +# Status of This Document + +This document is the product of the [Common Workflow Language working +group](https://groups.google.com/forum/#!forum/common-workflow-language). The +latest version of this document is available in the "schema_salad" repository at + +https://github.com/common-workflow-language/schema_salad + +The products of the CWL working group (including this document) are made available +under the terms of the Apache License, version 2.0. + + + +# Introduction + +The JSON data model is an extremely popular way to represent structured +data. It is attractive because of its relative simplicity and is a +natural fit with the standard types of many programming languages. +However, this simplicity means that basic JSON lacks expressive features +useful for working with complex data structures and document formats, such +as schemas, object references, and namespaces. + +JSON-LD is a W3C standard providing a way to describe how to interpret a +JSON document as Linked Data by means of a "context". JSON-LD provides a +powerful solution for representing object references and namespaces in JSON +based on standard web URIs, but is not itself a schema language. Without a +schema providing a well defined structure, it is difficult to process an +arbitrary JSON-LD document as idiomatic JSON because there are many ways to +express the same data that are logically equivalent but structurally +distinct. + +Several schema languages exist for describing and validating JSON data, +such as the Apache Avro data serialization system, however none understand +linked data. As a result, to fully take advantage of JSON-LD to build the +next generation of linked data applications, one must maintain separate +JSON schema, JSON-LD context, RDF schema, and human documentation, despite +significant overlap of content and obvious need for these documents to stay +synchronized. + +Schema Salad is designed to address this gap. It provides a schema +language and processing rules for describing structured JSON content +permitting URI resolution and strict document validation. The schema +language supports linked data through annotations that describe the linked +data interpretation of the content, enables generation of JSON-LD context +and RDF schema, and production of RDF triples by applying the JSON-LD +context. The schema language also provides for robust support of inline +documentation. + +## Introduction to v1.1 + +This is the third version of of the Schema Salad specification. It is +developed concurrently with v1.1 of the Common Workflow Language for use in +specifying the Common Workflow Language, however Schema Salad is intended to be +useful to a broader audience. Compared to the v1.0 schema salad +specification, the following changes have been made: + +* Support for `default` values on record fields to specify default values +* Add subscoped fields (fields which introduce a new inner scope for identifiers) +* Add the *inVocab* flag (default true) to indicate if a type is added to the vocabulary of well known terms or must be prefixed +* Add *secondaryFilesDSL* micro DSL (domain specific language) to convert text strings to a secondaryFiles record type used in CWL +* The `$mixin` feature has been removed from the specification, as it + is poorly documented, not included in conformance testing, + and not widely supported. + +## References to Other Specifications + +**Javascript Object Notation (JSON)**: http://json.org + +**JSON Linked Data (JSON-LD)**: http://json-ld.org + +**YAML**: https://yaml.org/spec/1.2/spec.html + +**Avro**: https://avro.apache.org/docs/current/spec.html + +**Uniform Resource Identifier (URI) Generic Syntax**: https://tools.ietf.org/html/rfc3986) + +**Resource Description Framework (RDF)**: http://www.w3.org/RDF/ + +**UTF-8**: https://www.ietf.org/rfc/rfc2279.txt) + +## Scope + +This document describes the syntax, data model, algorithms, and schema +language for working with Salad documents. It is not intended to document +a specific implementation of Salad, however it may serve as a reference for +the behavior of conforming implementations. + +## Terminology + +The terminology used to describe Salad documents is defined in the Concepts +section of the specification. The terms defined in the following list are +used in building those definitions and in describing the actions of an +Salad implementation: + +**may**: Conforming Salad documents and Salad implementations are permitted but +not required to be interpreted as described. + +**must**: Conforming Salad documents and Salad implementations are required +to be interpreted as described; otherwise they are in error. + +**error**: A violation of the rules of this specification; results are +undefined. Conforming implementations may detect and report an error and may +recover from it. + +**fatal error**: A violation of the rules of this specification; results +are undefined. Conforming implementations must not continue to process the +document and may report an error. + +**at user option**: Conforming software may or must (depending on the modal verb in +the sentence) behave as described; if it does, it must provide users a means to +enable or disable the behavior described. + +# Document model + +## Data concepts + +An **object** is a data structure equivalent to the "object" type in JSON, +consisting of a unordered set of name/value pairs (referred to here as +**fields**) and where the name is a string and the value is a string, number, +boolean, array, or object. + +A **document** is a file containing a serialized object, or an array of +objects. + +A **document type** is a class of files that share a common structure and +semantics. + +A **document schema** is a formal description of the grammar of a document type. + +A **base URI** is a context-dependent URI used to resolve relative references. + +An **identifier** is a URI that designates a single document or single +object within a document. + +A **vocabulary** is the set of symbolic field names and enumerated symbols defined +by a document schema, where each term maps to absolute URI. + +## Syntax + +Conforming Salad v1.1 documents are serialized and loaded using a +subset of YAML 1.2 syntax and UTF-8 text encoding. Salad documents +are written using the [JSON-compatible subset of YAML described in +section 10.2](https://yaml.org/spec/1.2/spec.html#id2803231). The +following features of YAML must not be used in conforming Salad +documents: + +* Use of explicit node tags with leading `!` or `!!` +* Use of anchors with leading `&` and aliases with leading `*` +* %YAML directives +* %TAG directives + +It is a fatal error if the document is not valid YAML. + +A Salad document must consist only of either a single root object or an +array of objects. + +## Document context + +### Implied context + +The implicit context consists of the vocabulary defined by the schema and +the base URI. By default, the base URI must be the URI that was used to +load the document. It may be overridden by an explicit context. + +### Explicit context + +If a document consists of a root object, this object may contain the +fields `$base`, `$namespaces`, `$schemas`, and `$graph`: + + * `$base`: Must be a string. Set the base URI for the document used to + resolve relative references. + + * `$namespaces`: Must be an object with strings as values. The keys of + the object are namespace prefixes used in the document; the values of + the object are the prefix expansions. + + * `$schemas`: Must be an array of strings. This field may list URI + references to documents in RDF-XML format which will be queried for RDF + schema data. The subjects and predicates described by the RDF schema + may provide additional semantic context for the document, and may be + used for validation of prefixed extension fields found in the document. + +Other directives beginning with `$` must be ignored. + +## Document graph + +If a document consists of a single root object, this object may contain the +field `$graph`. This field must be an array of objects. If present, this +field holds the primary content of the document. A document that consists +of array of objects at the root is an implicit graph. + +## Document metadata + +If a document consists of a single root object, metadata about the +document, such as authorship, may be declared in the root object. + +## Document schema + +Document preprocessing, link validation and schema validation require a +document schema. A schema may consist of: + + * At least one record definition object which defines valid fields that + make up a record type. Record field definitions include the valid types + that may be assigned to each field and annotations to indicate fields + that represent identifiers and links, described below in "Semantic + Annotations". + + * Any number of enumerated type objects which define a set of finite set of symbols that are + valid value of the type. + + * Any number of documentation objects which allow in-line documentation of the schema. + +The schema for defining a salad schema (the metaschema) is described in +detail in the [Schema](#Schema) section. + +## Record field annotations + +In a document schema, record field definitions may include the field +`jsonldPredicate`, which may be either a string or object. Implementations +must use the following document preprocessing of fields by the following +rules: + + * If the value of `jsonldPredicate` is `@id`, the field is an identifier + field. + + * If the value of `jsonldPredicate` is an object, and contains that + object contains the field `_type` with the value `@id`, the field is a + link field subject to [link validation](#Link_validation). + + * If the value of `jsonldPredicate` is an object which contains the + field `_type` with the value `@vocab`, the field value is subject to + [vocabulary resolution](#Vocabulary_resolution). + +## Document traversal + +To perform document document preprocessing, link validation and schema +validation, the document must be traversed starting from the fields or +array items of the root object or array and recursively visiting each child +item which contains an object or arrays. + +## Short names + +The "short name" of an fully qualified identifier is the portion of +the identifier following the final slash `/` of either the fragment +identifier following `#` or the path portion, if there is no fragment. +Some examples: + +* the short name of `http://example.com/foo` is `foo` +* the short name of `http://example.com/#bar` is `bar` +* the short name of `http://example.com/foo/bar` is `bar` +* the short name of `http://example.com/foo#bar` is `bar` +* the short name of `http://example.com/#foo/bar` is `bar` +* the short name of `http://example.com/foo#bar/baz` is `baz` + +## Inheritance and specialization + +A record definition may inherit from one or more record definitions +with the `extends` field. This copies the fields defined in the +parent record(s) as the base for the new record. A record definition +may `specialize` type declarations of the fields inherited from the +base record. For each field inherited from the base record, any +instance of the type in `specializeFrom` is replaced with the type in +`specializeTo`. The type in `specializeTo` should extend from the +type in `specializeFrom`. + +A record definition may be `abstract`. This means the record +definition is not used for validation on its own, but may be extended +by other definitions. If an abstract type appears in a field +definition, it is logically replaced with a union of all concrete +subtypes of the abstract type. In other words, the field value does +not validate as the abstract type, but must validate as some concrete +type that inherits from the abstract type. + +# Document preprocessing + +After processing the explicit context (if any), document preprocessing +begins. Starting from the document root, object fields values or array +items which contain objects or arrays are recursively traversed +depth-first. For each visited object, field names, identifier fields, link +fields, vocabulary fields, and `$import` and `$include` directives must be +processed as described in this section. The order of traversal of child +nodes within a parent node is undefined. diff --git a/schema/common/metaschema/sfdsl_res.yml b/schema/common/metaschema/sfdsl_res.yml new file mode 100644 index 0000000..f619495 --- /dev/null +++ b/schema/common/metaschema/sfdsl_res.yml @@ -0,0 +1,34 @@ +- | + ## Domain Specific Language for secondary files + + Fields may be tagged `secondaryFilesDSL: true` in `jsonldPredicate`. If so, the field is expanded using the + following micro-DSL for secondary files: + + * If the value is a string, it is transformed to an object with two fields `pattern` and `required` + * By default, the value of `required` is `null` (this indicates default behavior, which may be based on the context) + * If the value ends with a question mark `?` the question mark is + stripped off and the value of the field `required` is set to `False` + * The remaining value is assigned to the field `pattern` + + ### Type DSL example + + Given the following schema: + + ``` +- $include: sfdsl_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: sfdsl_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: sfdsl_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/sfdsl_res_proc.yml b/schema/common/metaschema/sfdsl_res_proc.yml new file mode 100644 index 0000000..2d27a68 --- /dev/null +++ b/schema/common/metaschema/sfdsl_res_proc.yml @@ -0,0 +1,21 @@ +[ + { + "secondaryFiles": { + "pattern": ".bai", + "required": null + }, + { + "secondaryFiles": { + "pattern": ".bai", + "required": false + }, + { + "secondaryFiles": { + "pattern": ".bai?" + }, + { + "secondaryFiles": { + "pattern": ".bai?", + "required": true + }, +] diff --git a/schema/common/metaschema/sfdsl_res_schema.yml b/schema/common/metaschema/sfdsl_res_schema.yml new file mode 100644 index 0000000..549128a --- /dev/null +++ b/schema/common/metaschema/sfdsl_res_schema.yml @@ -0,0 +1,16 @@ +{ + "$graph": [ + { + "name": "SecondaryFilesDSLExample", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "secondaryFiles", + "type": "string", + "jsonldPredicate": { + _type: "@vocab", + "secondaryFilesDSL": true + } + }] + }] +} diff --git a/schema/common/metaschema/sfdsl_res_src.yml b/schema/common/metaschema/sfdsl_res_src.yml new file mode 100644 index 0000000..dc4c5f3 --- /dev/null +++ b/schema/common/metaschema/sfdsl_res_src.yml @@ -0,0 +1,12 @@ +[{ + "secondaryFiles": ".bai" +}, { + "secondaryFiles": ".bai?" +}, { + "secondaryFiles": { + "pattern": ".bai?" +}, { + "secondaryFiles": { + "pattern": ".bai?", + "required": true +}] diff --git a/schema/common/metaschema/typedsl_res.yml b/schema/common/metaschema/typedsl_res.yml new file mode 100644 index 0000000..c30c60b --- /dev/null +++ b/schema/common/metaschema/typedsl_res.yml @@ -0,0 +1,33 @@ +- | + ## Domain Specific Language for types + + Fields may be tagged `typeDSL: true` in `jsonldPredicate`. If so, the field is expanded using the + following micro-DSL for schema salad types: + + * If the type ends with a question mark `?`, the question mark is stripped off and the type is expanded to a union with `null` + * If the type ends with square brackets `[]` it is expanded to an array with items of the preceeding type symbol + * The type may end with both `[]?` to indicate it is an optional array. + * Identifier resolution is applied after type DSL expansion. + + ### Type DSL example + + Given the following schema: + + ``` +- $include: typedsl_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: typedsl_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: typedsl_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/typedsl_res_proc.yml b/schema/common/metaschema/typedsl_res_proc.yml new file mode 100644 index 0000000..8097a6a --- /dev/null +++ b/schema/common/metaschema/typedsl_res_proc.yml @@ -0,0 +1,26 @@ +[ + { + "extype": "string" + }, + { + "extype": [ + "null", + "string" + ] + }, + { + "extype": { + "type": "array", + "items": "string" + } + }, + { + "extype": [ + "null", + { + "type": "array", + "items": "string" + } + ] + } +] diff --git a/schema/common/metaschema/typedsl_res_schema.yml b/schema/common/metaschema/typedsl_res_schema.yml new file mode 100644 index 0000000..52459a6 --- /dev/null +++ b/schema/common/metaschema/typedsl_res_schema.yml @@ -0,0 +1,17 @@ +{ + "$graph": [ + {"$import": "metaschema_base.yml"}, + { + "name": "TypeDSLExample", + "type": "record", + "documentRoot": true, + "fields": [{ + "name": "extype", + "type": "string", + "jsonldPredicate": { + _type: "@vocab", + "typeDSL": true + } + }] + }] +} diff --git a/schema/common/metaschema/typedsl_res_src.yml b/schema/common/metaschema/typedsl_res_src.yml new file mode 100644 index 0000000..6ecbd50 --- /dev/null +++ b/schema/common/metaschema/typedsl_res_src.yml @@ -0,0 +1,9 @@ +[{ + "extype": "string" +}, { + "extype": "string?" +}, { + "extype": "string[]" +}, { + "extype": "string[]?" +}] diff --git a/schema/common/metaschema/vocab_res.yml b/schema/common/metaschema/vocab_res.yml new file mode 100644 index 0000000..726bef0 --- /dev/null +++ b/schema/common/metaschema/vocab_res.yml @@ -0,0 +1,36 @@ +- | + ## Vocabulary resolution + + The schema may designate one or more vocabulary fields which use + terms defined in the vocabulary. The vocabulary are the short + names of all the identifiers in the schema. Processing must + resolve vocabulary fields to either vocabulary terms or absolute + URIs by first applying the link resolution rules defined above, + then applying the following additional rule: + + * If a reference URI is a vocabulary field, and there is a vocabulary + term which maps to the resolved URI, the reference must be replace with + the vocabulary term. + + ### Vocabulary resolution example + + Given the following schema: + + ``` +- $include: vocab_res_schema.yml +- | + ``` + + Process the following example: + + ``` +- $include: vocab_res_src.yml +- | + ``` + + This becomes: + + ``` +- $include: vocab_res_proc.yml +- | + ``` diff --git a/schema/common/metaschema/vocab_res_proc.yml b/schema/common/metaschema/vocab_res_proc.yml new file mode 100644 index 0000000..d13ab15 --- /dev/null +++ b/schema/common/metaschema/vocab_res_proc.yml @@ -0,0 +1,15 @@ + { + "form": { + "things": [ + { + "voc": "red", + }, + { + "voc": "red", + }, + { + "voc": "http://example.com/acid#blue", + } + ] + } + } diff --git a/schema/common/metaschema/vocab_res_schema.yml b/schema/common/metaschema/vocab_res_schema.yml new file mode 100644 index 0000000..92b271e --- /dev/null +++ b/schema/common/metaschema/vocab_res_schema.yml @@ -0,0 +1,21 @@ +{ + "$namespaces": { + "acid": "http://example.com/acid#" + }, + "$graph": [{ + "name": "Colors", + "type": "enum", + "symbols": ["acid:red"] + }, + { + "name": "ExampleType", + "type": "record", + "fields": [{ + "name": "voc", + "type": "string", + "jsonldPredicate": { + "_type": "@vocab" + } + }] + }] +} diff --git a/schema/common/metaschema/vocab_res_src.yml b/schema/common/metaschema/vocab_res_src.yml new file mode 100644 index 0000000..82954f1 --- /dev/null +++ b/schema/common/metaschema/vocab_res_src.yml @@ -0,0 +1,15 @@ + { + "form": { + "things": [ + { + "voc": "red", + }, + { + "voc": "http://example.com/acid#red", + }, + { + "voc": "http://example.com/acid#blue", + } + ] + } + } diff --git a/schema/common/steps_description.txt b/schema/common/steps_description.txt new file mode 100644 index 0000000..dfc2bbf --- /dev/null +++ b/schema/common/steps_description.txt @@ -0,0 +1,2 @@ +The individual steps that make up the workflow. Each step is executed when all of its +input data links are fulfilled. diff --git a/schema/icon.png b/schema/icon.png new file mode 100644 index 0000000..1e5e609 Binary files /dev/null and b/schema/icon.png differ diff --git a/schema/v19.09/Process.yml b/schema/v19.09/Process.yml new file mode 100644 index 0000000..3f367a8 --- /dev/null +++ b/schema/v19.09/Process.yml @@ -0,0 +1,103 @@ +# Subset of CWL's process, can't use process as is because of types not implemented. +saladVersion: v1.1 +$base: "https://w3id.org/cwl/cwl#" + +$namespaces: + cwl: "https://w3id.org/cwl/cwl#" + sld: "https://w3id.org/cwl/salad#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + +$graph: + +- name: Labeled + type: record + abstract: true + fields: + - name: label + type: + - "null" + - string + jsonldPredicate: "rdfs:label" + doc: "A short, human-readable label of this object." + +- name: Identified + type: record + abstract: true + fields: + - name: id + type: string? + jsonldPredicate: "@id" + doc: "The unique identifier for this object." + +- name: Parameter + type: record + extends: [sld:Documented, Identified] # Dropped FieldBase. + abstract: true + doc: | + Define an input or output parameter to a process. + +- name: InputParameter + type: record + abstract: true + extends: [Parameter] # , InputFormat, LoadContents] + fields: + - name: default + type: Any? + jsonldPredicate: + _id: sld:default + noLinkCheck: true + doc: | + The default value to use for this parameter if the parameter is missing + from the input object, or if the value of the parameter in the input + object is `null`. Default values are applied before evaluating expressions + (e.g. dependent `valueFrom` fields). + + +- name: OutputParameter + type: record + extends: [Parameter] + abstract: true + +- type: record + name: Process + extends: [Identified, Labeled, sld:Documented] + abstract: true + doc: | + + The base executable type in CWL is the `Process` object defined by the + document. Note that the `Process` object is abstract and cannot be + directly executed. + + fields: + - name: inputs + type: + type: array + items: InputParameter + jsonldPredicate: + _id: "cwl:inputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the input parameters of the process. The process is ready to + run when all required input parameters are associated with concrete + values. Input parameters include a schema for each parameter which is + used to validate the input object. It may also be used to build a user + interface for constructing the input object. + + When accepting an input object, all input parameters must have a value. + If an input parameter is missing from the input object, it must be + assigned a value of `null` (or the value of `default` for that + parameter, if provided) for the purposes of validation and evaluation + of expressions. + + - name: outputs + type: + type: array + items: OutputParameter + jsonldPredicate: + _id: "cwl:outputs" + mapSubject: id + mapPredicate: type + doc: | + Defines the parameters representing the output of the process. May be + used to generate and/or validate the output object. diff --git a/schema/v19.09/schema.py b/schema/v19.09/schema.py new file mode 100644 index 0000000..7726c2b --- /dev/null +++ b/schema/v19.09/schema.py @@ -0,0 +1,2717 @@ +# +# This file was autogenerated using schema-salad-tool --codegen=python +# The code itself is released under the Apache 2.0 license and the help text is +# subject to the license of the original schema. +# +import copy +import os +import re +import uuid # pylint: disable=unused-import # noqa: F401 +from typing import ( + Any, + Dict, + List, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from six import iteritems, string_types, text_type +from six.moves import StringIO, urllib +from typing_extensions import Text # pylint: disable=unused-import + +from ruamel import yaml +from ruamel.yaml.comments import CommentedMap +from schema_salad.ref_resolver import Fetcher +from schema_salad.sourceline import SourceLine, add_lc_filename +from schema_salad.exceptions import SchemaSaladException, ValidationException + +# move to a regular typing import when Python 3.3-3.6 is no longer supported + +_vocab = {} # type: Dict[Text, Text] +_rvocab = {} # type: Dict[Text, Text] + + +class Savable(object): + @classmethod + def fromDoc(cls, _doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Savable + pass + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Text] + pass + + +class LoadingOptions(object): + def __init__( + self, + fetcher=None, # type: Optional[Fetcher] + namespaces=None, # type: Optional[Dict[Text, Text]] + fileuri=None, # type: Optional[Text] + copyfrom=None, # type: Optional[LoadingOptions] + original_doc=None, # type: Optional[Any] + ): # type: (...) -> None + self.idx = {} # type: Dict[Text, Dict[Text, Any]] + self.fileuri = fileuri # type: Optional[Text] + self.namespaces = namespaces + self.original_doc = original_doc + if copyfrom is not None: + self.idx = copyfrom.idx + if fetcher is None: + fetcher = copyfrom.fetcher + if fileuri is None: + self.fileuri = copyfrom.fileuri + if namespaces is None: + self.namespaces = copyfrom.namespaces + + if fetcher is None: + import requests + from cachecontrol.wrapper import CacheControl + from cachecontrol.caches import FileCache + from schema_salad.ref_resolver import DefaultFetcher + + if "HOME" in os.environ: + session = CacheControl( + requests.Session(), + cache=FileCache( + os.path.join(os.environ["HOME"], ".cache", "salad") + ), + ) + elif "TMPDIR" in os.environ: + session = CacheControl( + requests.Session(), + cache=FileCache( + os.path.join(os.environ["TMPDIR"], ".cache", "salad") + ), + ) + else: + session = CacheControl( + requests.Session(), cache=FileCache("/tmp", ".cache", "salad") + ) + self.fetcher = DefaultFetcher({}, session) # type: Fetcher + else: + self.fetcher = fetcher + + self.vocab = _vocab + self.rvocab = _rvocab + + if namespaces is not None: + self.vocab = self.vocab.copy() + self.rvocab = self.rvocab.copy() + for k, v in iteritems(namespaces): + self.vocab[k] = v + self.rvocab[v] = k + + +def load_field(val, fieldtype, baseuri, loadingOptions): + # type: (Union[Text, Dict[Text, Text]], _Loader, Text, LoadingOptions) -> Any + if isinstance(val, MutableMapping): + if "$import" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + return _document_load_by_url( + fieldtype, + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$import"]), + loadingOptions, + ) + elif "$include" in val: + if loadingOptions.fileuri is None: + raise SchemaSaladException("Cannot load $import without fileuri") + val = loadingOptions.fetcher.fetch_text( + loadingOptions.fetcher.urljoin(loadingOptions.fileuri, val["$include"]) + ) + return fieldtype.load(val, baseuri, loadingOptions) + + +save_type = Union[ + Dict[Text, Text], List[Union[Dict[Text, Text], List[Any], None]], None +] + + +def save( + val, # type: Optional[Union[Savable, MutableSequence[Savable]]] + top=True, # type: bool + base_url="", # type: Text + relative_uris=True, # type: bool +): # type: (...) -> save_type + + if isinstance(val, Savable): + return val.save(top=top, base_url=base_url, relative_uris=relative_uris) + if isinstance(val, MutableSequence): + return [ + save(v, top=False, base_url=base_url, relative_uris=relative_uris) + for v in val + ] + if isinstance(val, MutableMapping): + newdict = {} + for key in val: + newdict[key] = save( + val[key], top=False, base_url=base_url, relative_uris=relative_uris + ) + return newdict + return val + + +def expand_url( + url, # type: Union[str, Text] + base_url, # type: Union[str, Text] + loadingOptions, # type: LoadingOptions + scoped_id=False, # type: bool + vocab_term=False, # type: bool + scoped_ref=None, # type: Optional[int] +): + # type: (...) -> Text + url = Text(url) + + if url in (u"@id", u"@type"): + return url + + if vocab_term and url in loadingOptions.vocab: + return url + + if bool(loadingOptions.vocab) and u":" in url: + prefix = url.split(u":")[0] + if prefix in loadingOptions.vocab: + url = loadingOptions.vocab[prefix] + url[len(prefix) + 1 :] + + split = urllib.parse.urlsplit(url) + + if ( + (bool(split.scheme) and split.scheme in [u"http", u"https", u"file"]) + or url.startswith(u"$(") + or url.startswith(u"${") + ): + pass + elif scoped_id and not bool(split.fragment): + splitbase = urllib.parse.urlsplit(base_url) + frg = u"" + if bool(splitbase.fragment): + frg = splitbase.fragment + u"/" + split.path + else: + frg = split.path + pt = splitbase.path if splitbase.path != "" else "/" + url = urllib.parse.urlunsplit( + (splitbase.scheme, splitbase.netloc, pt, splitbase.query, frg) + ) + elif scoped_ref is not None and not bool(split.fragment): + splitbase = urllib.parse.urlsplit(base_url) + sp = splitbase.fragment.split(u"/") + n = scoped_ref + while n > 0 and len(sp) > 0: + sp.pop() + n -= 1 + sp.append(url) + url = urllib.parse.urlunsplit( + ( + splitbase.scheme, + splitbase.netloc, + splitbase.path, + splitbase.query, + u"/".join(sp), + ) + ) + else: + url = loadingOptions.fetcher.urljoin(base_url, url) + + if vocab_term: + split = urllib.parse.urlsplit(url) + if bool(split.scheme): + if url in loadingOptions.rvocab: + return loadingOptions.rvocab[url] + else: + raise ValidationException("Term '{}' not in vocabulary".format(url)) + + return url + + +class _Loader(object): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + pass + + +class _AnyLoader(_Loader): + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if doc is not None: + return doc + raise ValidationException("Expected non-null") + + +class _PrimitiveLoader(_Loader): + def __init__(self, tp): + # type: (Union[type, Tuple[Type[Text], Type[Text]]]) -> None + self.tp = tp + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, self.tp): + raise ValidationException( + "Expected a {} but got {}".format( + self.tp.__class__.__name__, doc.__class__.__name__ + ) + ) + return doc + + def __repr__(self): # type: () -> str + return str(self.tp) + + +class _ArrayLoader(_Loader): + def __init__(self, items): + # type: (_Loader) -> None + self.items = items + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, MutableSequence): + raise ValidationException("Expected a list") + r = [] # type: List[Any] + errors = [] # type: List[SchemaSaladException] + for i in range(0, len(doc)): + try: + lf = load_field( + doc[i], _UnionLoader((self, self.items)), baseuri, loadingOptions + ) + if isinstance(lf, MutableSequence): + r.extend(lf) + else: + r.append(lf) + except ValidationException as e: + errors.append(e.with_sourceline(SourceLine(doc, i, str))) + if errors: + raise ValidationException("", None, errors) + return r + + def __repr__(self): # type: () -> str + return "array<{}>".format(self.items) + + +class _EnumLoader(_Loader): + def __init__(self, symbols): + # type: (Sequence[Text]) -> None + self.symbols = symbols + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if doc in self.symbols: + return doc + else: + raise ValidationException("Expected one of {}".format(self.symbols)) + + +class _RecordLoader(_Loader): + def __init__(self, classtype): + # type: (Type[Savable]) -> None + self.classtype = classtype + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if not isinstance(doc, MutableMapping): + raise ValidationException("Expected a dict") + return self.classtype.fromDoc(doc, baseuri, loadingOptions, docRoot=docRoot) + + def __repr__(self): # type: () -> str + return str(self.classtype) + + +class _UnionLoader(_Loader): + def __init__(self, alternates): + # type: (Sequence[_Loader]) -> None + self.alternates = alternates + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + errors = [] + for t in self.alternates: + try: + return t.load(doc, baseuri, loadingOptions, docRoot=docRoot) + except ValidationException as e: + errors.append( + ValidationException( + u"tried {} but".format(t.__class__.__name__), None, [e] + ) + ) + raise ValidationException("", None, errors, u"-") + + def __repr__(self): # type: () -> str + return " | ".join(str(a) for a in self.alternates) + + +class _URILoader(_Loader): + def __init__(self, inner, scoped_id, vocab_term, scoped_ref): + # type: (_Loader, bool, bool, Union[int, None]) -> None + self.inner = inner + self.scoped_id = scoped_id + self.vocab_term = vocab_term + self.scoped_ref = scoped_ref + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableSequence): + doc = [ + expand_url( + i, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + for i in doc + ] + if isinstance(doc, string_types): + doc = expand_url( + doc, + baseuri, + loadingOptions, + self.scoped_id, + self.vocab_term, + self.scoped_ref, + ) + return self.inner.load(doc, baseuri, loadingOptions) + + +class _TypeDSLLoader(_Loader): + typeDSLregex = re.compile(r"^([^[?]+)(\[\])?(\?)?$") + + def __init__(self, inner, refScope): + # type: (_Loader, Union[int, None]) -> None + self.inner = inner + self.refScope = refScope + + def resolve( + self, + doc, # type: Text + baseuri, # type: Text + loadingOptions, # type: LoadingOptions + ): + # type: (...) -> Union[List[Union[Dict[Text, Text], Text]], Dict[Text, Text], Text] + m = self.typeDSLregex.match(doc) + if m: + first = expand_url( + m.group(1), baseuri, loadingOptions, False, True, self.refScope + ) + second = third = None + if bool(m.group(2)): + second = {u"type": u"array", u"items": first} + # second = CommentedMap((("type", "array"), + # ("items", first))) + # second.lc.add_kv_line_col("type", lc) + # second.lc.add_kv_line_col("items", lc) + # second.lc.filename = filename + if bool(m.group(3)): + third = [u"null", second or first] + # third = CommentedSeq([u"null", second or first]) + # third.lc.add_kv_line_col(0, lc) + # third.lc.add_kv_line_col(1, lc) + # third.lc.filename = filename + return third or second or first + return doc + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableSequence): + r = [] # type: List[Any] + for d in doc: + if isinstance(d, string_types): + resolved = self.resolve(d, baseuri, loadingOptions) + if isinstance(resolved, MutableSequence): + for i in resolved: + if i not in r: + r.append(i) + else: + if resolved not in r: + r.append(resolved) + else: + r.append(d) + doc = r + elif isinstance(doc, string_types): + doc = self.resolve(doc, baseuri, loadingOptions) + + return self.inner.load(doc, baseuri, loadingOptions) + + +class _IdMapLoader(_Loader): + def __init__(self, inner, mapSubject, mapPredicate): + # type: (_Loader, Text, Union[Text, None]) -> None + self.inner = inner + self.mapSubject = mapSubject + self.mapPredicate = mapPredicate + + def load(self, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> Any + if isinstance(doc, MutableMapping): + r = [] # type: List[Any] + for k in sorted(doc.keys()): + val = doc[k] + if isinstance(val, CommentedMap): + v = copy.copy(val) + v.lc.data = val.lc.data + v.lc.filename = val.lc.filename + v[self.mapSubject] = k + r.append(v) + elif isinstance(val, MutableMapping): + v2 = copy.copy(val) + v2[self.mapSubject] = k + r.append(v2) + else: + if self.mapPredicate: + v3 = {self.mapPredicate: val} + v3[self.mapSubject] = k + r.append(v3) + else: + raise ValidationException("No mapPredicate") + doc = r + return self.inner.load(doc, baseuri, loadingOptions) + + +def _document_load(loader, doc, baseuri, loadingOptions): + # type: (_Loader, Any, Text, LoadingOptions) -> Any + if isinstance(doc, string_types): + return _document_load_by_url( + loader, loadingOptions.fetcher.urljoin(baseuri, doc), loadingOptions + ) + + if isinstance(doc, MutableMapping): + if "$namespaces" in doc: + loadingOptions = LoadingOptions( + copyfrom=loadingOptions, namespaces=doc["$namespaces"] + ) + doc = {k: v for k, v in doc.items() if k != "$namespaces"} + + if "$base" in doc: + baseuri = doc["$base"] + + if "$graph" in doc: + return loader.load(doc["$graph"], baseuri, loadingOptions) + else: + return loader.load(doc, baseuri, loadingOptions, docRoot=baseuri) + + if isinstance(doc, MutableSequence): + return loader.load(doc, baseuri, loadingOptions) + + raise ValidationException("Oops, we shouldn't be here!") + + +def _document_load_by_url(loader, url, loadingOptions): + # type: (_Loader, Text, LoadingOptions) -> Any + if url in loadingOptions.idx: + return _document_load(loader, loadingOptions.idx[url], url, loadingOptions) + + text = loadingOptions.fetcher.fetch_text(url) + if isinstance(text, bytes): + textIO = StringIO(text.decode("utf-8")) + else: + textIO = StringIO(text) + textIO.name = str(url) + result = yaml.round_trip_load(textIO, preserve_quotes=True) + add_lc_filename(result, url) + + loadingOptions.idx[url] = result + + loadingOptions = LoadingOptions(copyfrom=loadingOptions, fileuri=url) + + return _document_load(loader, result, url, loadingOptions) + + +def file_uri(path, split_frag=False): # type: (str, bool) -> str + if path.startswith("file://"): + return path + if split_frag: + pathsp = path.split("#", 2) + frag = "#" + urllib.parse.quote(str(pathsp[1])) if len(pathsp) == 2 else "" + urlpath = urllib.request.pathname2url(str(pathsp[0])) + else: + urlpath = urllib.request.pathname2url(path) + frag = "" + if urlpath.startswith("//"): + return "file:{}{}".format(urlpath, frag) + else: + return "file://{}{}".format(urlpath, frag) + + +def prefix_url(url, namespaces): # type: (Text, Dict[Text, Text]) -> Text + for k, v in namespaces.items(): + if url.startswith(v): + return k + ":" + url[len(v) :] + return url + + +def save_relative_uri(uri, base_url, scoped_id, ref_scope, relative_uris): + # type: (Text, Text, bool, Optional[int], bool) -> Union[Text, List[Text]] + if not relative_uris or uri == base_url: + return uri + if isinstance(uri, MutableSequence): + return [ + save_relative_uri(u, base_url, scoped_id, ref_scope, relative_uris) + for u in uri + ] + elif isinstance(uri, text_type): + urisplit = urllib.parse.urlsplit(uri) + basesplit = urllib.parse.urlsplit(base_url) + if urisplit.scheme == basesplit.scheme and urisplit.netloc == basesplit.netloc: + if urisplit.path != basesplit.path: + p = os.path.relpath(urisplit.path, os.path.dirname(basesplit.path)) + if urisplit.fragment: + p = p + "#" + urisplit.fragment + return p + + basefrag = basesplit.fragment + "/" + if ref_scope: + sp = basefrag.split("/") + i = 0 + while i < ref_scope: + sp.pop() + i += 1 + basefrag = "/".join(sp) + + if urisplit.fragment.startswith(basefrag): + return urisplit.fragment[len(basefrag) :] + else: + return urisplit.fragment + return uri + else: + return save(uri, top=False, base_url=base_url) + + +class Documented(Savable): + pass + + +class RecordField(Documented): + """ +A field of a record. + """ + def __init__( + self, + doc, # type: Any + name, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.name = name + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordField + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `name`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'RecordField'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, name, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'name', u'type']) + + +class RecordSchema(Savable): + def __init__( + self, + fields, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.fields = fields + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> RecordSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'fields' in _doc: + try: + fields = load_field(_doc.get( + 'fields'), idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `fields` field is not valid because:", + SourceLine(_doc, 'fields', str), + [e] + ) + ) + else: + fields = None + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `fields`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'RecordSchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(fields, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.fields is not None: + r['fields'] = save( + self.fields, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'fields', u'type']) + + +class EnumSchema(Savable): + """ +Define an enumerated type. + + """ + def __init__( + self, + symbols, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.symbols = symbols + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> EnumSchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + symbols = load_field(_doc.get( + 'symbols'), uri_array_of_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `symbols` field is not valid because:", + SourceLine(_doc, 'symbols', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `symbols`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'EnumSchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(symbols, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.symbols is not None: + u = save_relative_uri( + self.symbols, + base_url, + True, + None, + relative_uris) + if u: + r['symbols'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'symbols', u'type']) + + +class ArraySchema(Savable): + def __init__( + self, + items, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.items = items + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> ArraySchema + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + items = load_field(_doc.get( + 'items'), uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `items` field is not valid because:", + SourceLine(_doc, 'items', str), + [e] + ) + ) + try: + type = load_field(_doc.get( + 'type'), typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `items`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'ArraySchema'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(items, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.items is not None: + u = save_relative_uri( + self.items, + base_url, + False, + 2, + relative_uris) + if u: + r['items'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'items', u'type']) + + +class Labeled(Savable): + pass + + +class Identified(Savable): + pass + + +class Parameter(Documented, Identified): + """ +Define an input or output parameter to a process. + + """ + pass + + +class InputParameter(Parameter): + pass + + +class OutputParameter(Parameter): + pass + + +class Process(Identified, Labeled, Documented): + """ + +The base executable type in CWL is the `Process` object defined by the +document. Note that the `Process` object is abstract and cannot be +directly executed. + + """ + pass + + +class HasStepPosition(Savable): + pass + + +class StepPosition(Savable): + """ +This field specifies the location of the step's node when rendered in the workflow editor. + """ + def __init__( + self, + top, # type: Any + left, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.top = top + self.left = left + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> StepPosition + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + try: + top = load_field(_doc.get( + 'top'), floattype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `top` field is not valid because:", + SourceLine(_doc, 'top', str), + [e] + ) + ) + try: + left = load_field(_doc.get( + 'left'), floattype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `left` field is not valid because:", + SourceLine(_doc, 'left', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `top`, `left`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'StepPosition'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(top, left, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.top is not None: + r['top'] = save( + self.top, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if self.left is not None: + r['left'] = save( + self.left, + top=False, + base_url=base_url, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'top', u'left']) + + +class ReferencesTool(Savable): + pass + + +class ToolShedRepository(Savable): + def __init__( + self, + changeset_revision, # type: Any + name, # type: Any + owner, # type: Any + tool_shed, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.changeset_revision = changeset_revision + self.name = name + self.owner = owner + self.tool_shed = tool_shed + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> ToolShedRepository + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'name' in _doc: + try: + name = load_field(_doc.get( + 'name'), uri_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `name` field is not valid because:", + SourceLine(_doc, 'name', str), + [e] + ) + ) + else: + name = None + + if name is None: + if docRoot is not None: + name = docRoot + else: + raise ValidationException("Missing name") + baseuri = name + try: + changeset_revision = load_field(_doc.get( + 'changeset_revision'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `changeset_revision` field is not valid because:", + SourceLine(_doc, 'changeset_revision', str), + [e] + ) + ) + try: + owner = load_field(_doc.get( + 'owner'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `owner` field is not valid because:", + SourceLine(_doc, 'owner', str), + [e] + ) + ) + try: + tool_shed = load_field(_doc.get( + 'tool_shed'), strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_shed` field is not valid because:", + SourceLine(_doc, 'tool_shed', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `changeset_revision`, `name`, `owner`, `tool_shed`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'ToolShedRepository'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(changeset_revision, name, owner, tool_shed, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.name is not None: + u = save_relative_uri( + self.name, + base_url, + True, + None, + relative_uris) + if u: + r['name'] = u + + if self.changeset_revision is not None: + r['changeset_revision'] = save( + self.changeset_revision, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.owner is not None: + r['owner'] = save( + self.owner, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.tool_shed is not None: + r['tool_shed'] = save( + self.tool_shed, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'changeset_revision', u'name', u'owner', u'tool_shed']) + + +class WorkflowInputParameter(InputParameter): + def __init__( + self, + doc, # type: Any + id, # type: Any + default, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.id = id + self.default = default + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowInputParameter + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'default' in _doc: + try: + default = load_field(_doc.get( + 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, 'default', str), + [e] + ) + ) + else: + default = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `id`, `default`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowInputParameter'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, id, default, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.default is not None: + r['default'] = save( + self.default, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'id', u'default', u'type']) + + +class WorkflowOutputParameter(OutputParameter): + """ +Describe an output parameter of a workflow. The parameter must be +connected to one parameter defined in the workflow that +will provide the value of the output parameter. It is legal to +connect a WorkflowInputParameter to a WorkflowOutputParameter. + + """ + def __init__( + self, + doc, # type: Any + id, # type: Any + outputSource, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.doc = doc + self.id = id + self.outputSource = outputSource + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowOutputParameter + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'outputSource' in _doc: + try: + outputSource = load_field(_doc.get( + 'outputSource'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `outputSource` field is not valid because:", + SourceLine(_doc, 'outputSource', str), + [e] + ) + ) + else: + outputSource = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_GalaxyTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `doc`, `id`, `outputSource`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowOutputParameter'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(doc, id, outputSource, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.outputSource is not None: + r['outputSource'] = save( + self.outputSource, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'doc', u'id', u'outputSource', u'type']) + + +class WorkflowStep(Identified, Labeled, Documented, HasStepPosition, ReferencesTool): + """ +Workflow step. + + """ + def __init__( + self, + id, # type: Any + label, # type: Any + doc, # type: Any + position, # type: Any + tool_id, # type: Any + tool_shed_repository, # type: Any + tool_version, # type: Any + in_, # type: Any + out, # type: Any + type, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.position = position + self.tool_id = tool_id + self.tool_shed_repository = tool_shed_repository + self.tool_version = tool_version + self.in_ = in_ + self.out = out + self.type = type + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStep + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + if 'position' in _doc: + try: + position = load_field(_doc.get( + 'position'), union_of_None_type_or_StepPositionLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `position` field is not valid because:", + SourceLine(_doc, 'position', str), + [e] + ) + ) + else: + position = None + if 'tool_id' in _doc: + try: + tool_id = load_field(_doc.get( + 'tool_id'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_id` field is not valid because:", + SourceLine(_doc, 'tool_id', str), + [e] + ) + ) + else: + tool_id = None + if 'tool_shed_repository' in _doc: + try: + tool_shed_repository = load_field(_doc.get( + 'tool_shed_repository'), union_of_None_type_or_ToolShedRepositoryLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_shed_repository` field is not valid because:", + SourceLine(_doc, 'tool_shed_repository', str), + [e] + ) + ) + else: + tool_shed_repository = None + if 'tool_version' in _doc: + try: + tool_version = load_field(_doc.get( + 'tool_version'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `tool_version` field is not valid because:", + SourceLine(_doc, 'tool_version', str), + [e] + ) + ) + else: + tool_version = None + if 'in' in _doc: + try: + in_ = load_field(_doc.get( + 'in'), idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `in` field is not valid because:", + SourceLine(_doc, 'in', str), + [e] + ) + ) + else: + in_ = None + if 'out' in _doc: + try: + out = load_field(_doc.get( + 'out'), uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `out` field is not valid because:", + SourceLine(_doc, 'out', str), + [e] + ) + ) + else: + out = None + if 'type' in _doc: + try: + type = load_field(_doc.get( + 'type'), typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `type` field is not valid because:", + SourceLine(_doc, 'type', str), + [e] + ) + ) + else: + type = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `label`, `doc`, `position`, `tool_id`, `tool_shed_repository`, `tool_version`, `in`, `out`, `type`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStep'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, label, doc, position, tool_id, tool_shed_repository, tool_version, in_, out, type, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.position is not None: + r['position'] = save( + self.position, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_id is not None: + r['tool_id'] = save( + self.tool_id, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_shed_repository is not None: + r['tool_shed_repository'] = save( + self.tool_shed_repository, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.tool_version is not None: + r['tool_version'] = save( + self.tool_version, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.in_ is not None: + r['in'] = save( + self.in_, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.out is not None: + u = save_relative_uri( + self.out, + self.id, + True, + None, + relative_uris) + if u: + r['out'] = u + + if self.type is not None: + r['type'] = save( + self.type, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'label', u'doc', u'position', u'tool_id', u'tool_shed_repository', u'tool_version', u'in', u'out', u'type']) + + +class Sink(Savable): + pass + + +class WorkflowStepInput(Identified, Sink, Labeled): + """ +TODO: + + """ + def __init__( + self, + id, # type: Any + source, # type: Any + label, # type: Any + default, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.source = source + self.label = label + self.default = default + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStepInput + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'source' in _doc: + try: + source = load_field(_doc.get( + 'source'), uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `source` field is not valid because:", + SourceLine(_doc, 'source', str), + [e] + ) + ) + else: + source = None + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'default' in _doc: + try: + default = load_field(_doc.get( + 'default'), union_of_None_type_or_Any_type, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `default` field is not valid because:", + SourceLine(_doc, 'default', str), + [e] + ) + ) + else: + default = None + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `source`, `label`, `default`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStepInput'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, source, label, default, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.source is not None: + u = save_relative_uri( + self.source, + self.id, + False, + 2, + relative_uris) + if u: + r['source'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if self.default is not None: + r['default'] = save( + self.default, + top=False, + base_url=self.id, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'source', u'label', u'default']) + + +class WorkflowStepOutput(Identified): + """ +Associate an output parameter of the underlying process with a workflow +parameter. The workflow parameter (given in the `id` field) be may be used +as a `source` to connect with input parameters of other workflow steps, or +with an output parameter of the process. + +A unique identifier for this workflow output parameter. This is +the identifier to use in the `source` field of `WorkflowStepInput` +to connect the output value to downstream parameters. + + """ + def __init__( + self, + id, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> WorkflowStepOutput + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'WorkflowStepOutput'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + if self.id is not None: + u = save_relative_uri( + self.id, + base_url, + True, + None, + relative_uris) + if u: + r['id'] = u + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id']) + + +class GalaxyWorkflow(Process): + """ +This is documentation for a workflow! + """ + def __init__( + self, + id, # type: Any + label, # type: Any + doc, # type: Any + inputs, # type: Any + outputs, # type: Any + name, # type: Any + steps, # type: Any + extension_fields=None, # type: Optional[Dict[Text, Any]] + loadingOptions=None # type: Optional[LoadingOptions] + ): # type: (...) -> None + + if extension_fields: + self.extension_fields = extension_fields + else: + self.extension_fields = yaml.comments.CommentedMap() + if loadingOptions: + self.loadingOptions = loadingOptions + else: + self.loadingOptions = LoadingOptions() + self.id = id + self.label = label + self.doc = doc + self.inputs = inputs + self.outputs = outputs + self.name = name + self.class_ = "GalaxyWorkflow" + self.steps = steps + + @classmethod + def fromDoc(cls, doc, baseuri, loadingOptions, docRoot=None): + # type: (Any, Text, LoadingOptions, Optional[Text]) -> GalaxyWorkflow + + _doc = copy.copy(doc) + if hasattr(doc, 'lc'): + _doc.lc.data = doc.lc.data + _doc.lc.filename = doc.lc.filename + errors = [] + + if _doc.get('class') != 'GalaxyWorkflow': + raise ValidationException("Not a GalaxyWorkflow") + + if 'id' in _doc: + try: + id = load_field(_doc.get( + 'id'), uri_union_of_None_type_or_strtype_True_False_None, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `id` field is not valid because:", + SourceLine(_doc, 'id', str), + [e] + ) + ) + else: + id = None + + if id is None: + if docRoot is not None: + id = docRoot + else: + id = "_:" + str(uuid.uuid4()) + baseuri = id + if 'label' in _doc: + try: + label = load_field(_doc.get( + 'label'), union_of_None_type_or_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `label` field is not valid because:", + SourceLine(_doc, 'label', str), + [e] + ) + ) + else: + label = None + if 'doc' in _doc: + try: + doc = load_field(_doc.get( + 'doc'), union_of_None_type_or_strtype_or_array_of_strtype, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `doc` field is not valid because:", + SourceLine(_doc, 'doc', str), + [e] + ) + ) + else: + doc = None + try: + inputs = load_field(_doc.get( + 'inputs'), idmap_inputs_array_of_WorkflowInputParameterLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `inputs` field is not valid because:", + SourceLine(_doc, 'inputs', str), + [e] + ) + ) + try: + outputs = load_field(_doc.get( + 'outputs'), idmap_outputs_array_of_WorkflowOutputParameterLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `outputs` field is not valid because:", + SourceLine(_doc, 'outputs', str), + [e] + ) + ) + try: + steps = load_field(_doc.get( + 'steps'), idmap_steps_union_of_array_of_WorkflowStepLoader, baseuri, loadingOptions) + except ValidationException as e: + errors.append( + ValidationException( + "the `steps` field is not valid because:", + SourceLine(_doc, 'steps', str), + [e] + ) + ) + + extension_fields = yaml.comments.CommentedMap() + for k in _doc.keys(): + if k not in cls.attrs: + if ":" in k: + ex = expand_url(k, + u"", + loadingOptions, + scoped_id=False, + vocab_term=False) + extension_fields[ex] = _doc[k] + else: + errors.append( + ValidationException( + "invalid field `%s`, expected one of: `id`, `label`, `doc`, `inputs`, `outputs`, `name`, `class`, `steps`" % (k), + SourceLine(_doc, k, str) + ) + ) + break + + if errors: + raise ValidationException("Trying 'GalaxyWorkflow'", None, errors) + loadingOptions = copy.deepcopy(loadingOptions) + loadingOptions.original_doc = _doc + return cls(id, label, doc, inputs, outputs, name, steps, extension_fields=extension_fields, loadingOptions=loadingOptions) + + def save(self, top=False, base_url="", relative_uris=True): + # type: (bool, Text, bool) -> Dict[Text, Any] + r = yaml.comments.CommentedMap() # type: Dict[Text, Any] + for ef in self.extension_fields: + r[prefix_url(ef, self.loadingOptions.vocab)] = self.extension_fields[ef] + + r['class'] = 'GalaxyWorkflow' + + if self.id is not None: + u = save_relative_uri( + self.id, + self.name, + True, + None, + relative_uris) + if u: + r['id'] = u + + if self.label is not None: + r['label'] = save( + self.label, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.doc is not None: + r['doc'] = save( + self.doc, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.inputs is not None: + r['inputs'] = save( + self.inputs, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.outputs is not None: + r['outputs'] = save( + self.outputs, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if self.steps is not None: + r['steps'] = save( + self.steps, + top=False, + base_url=self.name, + relative_uris=relative_uris) + + if top and self.loadingOptions.namespaces: + r["$namespaces"] = self.loadingOptions.namespaces + + return r + + attrs = frozenset([u'id', u'label', u'doc', u'inputs', u'outputs', u'name', u'class', u'steps']) + + +_vocab = { + "Any": "https://w3id.org/cwl/salad#Any", + "ArraySchema": "https://w3id.org/cwl/salad#ArraySchema", + "Documented": "https://w3id.org/cwl/salad#Documented", + "EnumSchema": "https://w3id.org/cwl/salad#EnumSchema", + "File": "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/File", + "GalaxyType": "https://galaxyproject.org/gxformat2/v19.09#GalaxyType", + "GalaxyWorkflow": "https://galaxyproject.org/gxformat2/v19.09#GalaxyWorkflow", + "HasStepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition", + "Identified": "https://w3id.org/cwl/cwl#Identified", + "InputParameter": "https://w3id.org/cwl/cwl#InputParameter", + "Labeled": "https://w3id.org/cwl/cwl#Labeled", + "OutputParameter": "https://w3id.org/cwl/cwl#OutputParameter", + "Parameter": "https://w3id.org/cwl/cwl#Parameter", + "PrimitiveType": "https://w3id.org/cwl/salad#PrimitiveType", + "Process": "https://w3id.org/cwl/cwl#Process", + "RecordField": "https://w3id.org/cwl/salad#RecordField", + "RecordSchema": "https://w3id.org/cwl/salad#RecordSchema", + "ReferencesTool": "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool", + "Sink": "https://galaxyproject.org/gxformat2/v19.09#Sink", + "StepPosition": "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition", + "ToolShedRepository": "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository", + "WorkflowInputParameter": "https://galaxyproject.org/gxformat2/v19.09#WorkflowInputParameter", + "WorkflowOutputParameter": "https://galaxyproject.org/gxformat2/v19.09#WorkflowOutputParameter", + "WorkflowStep": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStep", + "WorkflowStepInput": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepInput", + "WorkflowStepOutput": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepOutput", + "WorkflowStepType": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType", + "array": "https://w3id.org/cwl/salad#array", + "boolean": "http://www.w3.org/2001/XMLSchema#boolean", + "collection": "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/collection", + "data": "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/data", + "double": "http://www.w3.org/2001/XMLSchema#double", + "enum": "https://w3id.org/cwl/salad#enum", + "float": "http://www.w3.org/2001/XMLSchema#float", + "int": "http://www.w3.org/2001/XMLSchema#int", + "long": "http://www.w3.org/2001/XMLSchema#long", + "null": "https://w3id.org/cwl/salad#null", + "pause": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/pause", + "record": "https://w3id.org/cwl/salad#record", + "string": "http://www.w3.org/2001/XMLSchema#string", + "subworkflow": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/subworkflow", + "tool": "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/tool", +} +_rvocab = { + "https://w3id.org/cwl/salad#Any": "Any", + "https://w3id.org/cwl/salad#ArraySchema": "ArraySchema", + "https://w3id.org/cwl/salad#Documented": "Documented", + "https://w3id.org/cwl/salad#EnumSchema": "EnumSchema", + "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/File": "File", + "https://galaxyproject.org/gxformat2/v19.09#GalaxyType": "GalaxyType", + "https://galaxyproject.org/gxformat2/v19.09#GalaxyWorkflow": "GalaxyWorkflow", + "https://galaxyproject.org/gxformat2/gxformat2common#HasStepPosition": "HasStepPosition", + "https://w3id.org/cwl/cwl#Identified": "Identified", + "https://w3id.org/cwl/cwl#InputParameter": "InputParameter", + "https://w3id.org/cwl/cwl#Labeled": "Labeled", + "https://w3id.org/cwl/cwl#OutputParameter": "OutputParameter", + "https://w3id.org/cwl/cwl#Parameter": "Parameter", + "https://w3id.org/cwl/salad#PrimitiveType": "PrimitiveType", + "https://w3id.org/cwl/cwl#Process": "Process", + "https://w3id.org/cwl/salad#RecordField": "RecordField", + "https://w3id.org/cwl/salad#RecordSchema": "RecordSchema", + "https://galaxyproject.org/gxformat2/gxformat2common#ReferencesTool": "ReferencesTool", + "https://galaxyproject.org/gxformat2/v19.09#Sink": "Sink", + "https://galaxyproject.org/gxformat2/gxformat2common#StepPosition": "StepPosition", + "https://galaxyproject.org/gxformat2/gxformat2common#ToolShedRepository": "ToolShedRepository", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowInputParameter": "WorkflowInputParameter", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowOutputParameter": "WorkflowOutputParameter", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStep": "WorkflowStep", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepInput": "WorkflowStepInput", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepOutput": "WorkflowStepOutput", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType": "WorkflowStepType", + "https://w3id.org/cwl/salad#array": "array", + "http://www.w3.org/2001/XMLSchema#boolean": "boolean", + "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/collection": "collection", + "https://galaxyproject.org/gxformat2/v19.09#GalaxyType/data": "data", + "http://www.w3.org/2001/XMLSchema#double": "double", + "https://w3id.org/cwl/salad#enum": "enum", + "http://www.w3.org/2001/XMLSchema#float": "float", + "http://www.w3.org/2001/XMLSchema#int": "int", + "http://www.w3.org/2001/XMLSchema#long": "long", + "https://w3id.org/cwl/salad#null": "null", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/pause": "pause", + "https://w3id.org/cwl/salad#record": "record", + "http://www.w3.org/2001/XMLSchema#string": "string", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/subworkflow": "subworkflow", + "https://galaxyproject.org/gxformat2/v19.09#WorkflowStepType/tool": "tool", +} + +inttype = _PrimitiveLoader(int) +booltype = _PrimitiveLoader(bool) +strtype = _PrimitiveLoader((str, text_type)) +Any_type = _AnyLoader() +floattype = _PrimitiveLoader(float) +None_type = _PrimitiveLoader(type(None)) +DocumentedLoader = _RecordLoader(Documented) +PrimitiveTypeLoader = _EnumLoader(("null", "boolean", "int", "long", "float", "double", "string",)) +AnyLoader = _EnumLoader(("Any",)) +RecordFieldLoader = _RecordLoader(RecordField) +RecordSchemaLoader = _RecordLoader(RecordSchema) +EnumSchemaLoader = _RecordLoader(EnumSchema) +ArraySchemaLoader = _RecordLoader(ArraySchema) +LabeledLoader = _RecordLoader(Labeled) +IdentifiedLoader = _RecordLoader(Identified) +ParameterLoader = _RecordLoader(Parameter) +InputParameterLoader = _RecordLoader(InputParameter) +OutputParameterLoader = _RecordLoader(OutputParameter) +ProcessLoader = _RecordLoader(Process) +HasStepPositionLoader = _RecordLoader(HasStepPosition) +StepPositionLoader = _RecordLoader(StepPosition) +ReferencesToolLoader = _RecordLoader(ReferencesTool) +ToolShedRepositoryLoader = _RecordLoader(ToolShedRepository) +GalaxyTypeLoader = _EnumLoader(("File", "data", "collection",)) +WorkflowStepTypeLoader = _EnumLoader(("tool", "subworkflow", "pause",)) +WorkflowInputParameterLoader = _RecordLoader(WorkflowInputParameter) +WorkflowOutputParameterLoader = _RecordLoader(WorkflowOutputParameter) +WorkflowStepLoader = _RecordLoader(WorkflowStep) +SinkLoader = _RecordLoader(Sink) +WorkflowStepInputLoader = _RecordLoader(WorkflowStepInput) +WorkflowStepOutputLoader = _RecordLoader(WorkflowStepOutput) +GalaxyWorkflowLoader = _RecordLoader(GalaxyWorkflow) +array_of_strtype = _ArrayLoader(strtype) +union_of_None_type_or_strtype_or_array_of_strtype = _UnionLoader((None_type, strtype, array_of_strtype,)) +uri_strtype_True_False_None = _URILoader(strtype, True, False, None) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype,)) +array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _ArrayLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype) +union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype = _UnionLoader((PrimitiveTypeLoader, RecordSchemaLoader, EnumSchemaLoader, ArraySchemaLoader, strtype, array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype,)) +typedsl_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_2 = _TypeDSLLoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, 2) +array_of_RecordFieldLoader = _ArrayLoader(RecordFieldLoader) +union_of_None_type_or_array_of_RecordFieldLoader = _UnionLoader((None_type, array_of_RecordFieldLoader,)) +idmap_fields_union_of_None_type_or_array_of_RecordFieldLoader = _IdMapLoader(union_of_None_type_or_array_of_RecordFieldLoader, 'name', 'type') +enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader = _EnumLoader(("record",)) +typedsl_enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader_2 = _TypeDSLLoader(enum_d9cba076fca539106791a4f46d198c7fcfbdb779Loader, 2) +uri_array_of_strtype_True_False_None = _URILoader(array_of_strtype, True, False, None) +enum_d961d79c225752b9fadb617367615ab176b47d77Loader = _EnumLoader(("enum",)) +typedsl_enum_d961d79c225752b9fadb617367615ab176b47d77Loader_2 = _TypeDSLLoader(enum_d961d79c225752b9fadb617367615ab176b47d77Loader, 2) +uri_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_False_True_2 = _URILoader(union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype_or_array_of_union_of_PrimitiveTypeLoader_or_RecordSchemaLoader_or_EnumSchemaLoader_or_ArraySchemaLoader_or_strtype, False, True, 2) +enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader = _EnumLoader(("array",)) +typedsl_enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader_2 = _TypeDSLLoader(enum_d062602be0b4b8fd33e69e29a841317b6ab665bcLoader, 2) +union_of_None_type_or_strtype = _UnionLoader((None_type, strtype,)) +uri_union_of_None_type_or_strtype_True_False_None = _URILoader(union_of_None_type_or_strtype, True, False, None) +union_of_None_type_or_Any_type = _UnionLoader((None_type, Any_type,)) +union_of_WorkflowInputParameterLoader = _UnionLoader((WorkflowInputParameterLoader,)) +array_of_union_of_WorkflowInputParameterLoader = _ArrayLoader(union_of_WorkflowInputParameterLoader) +idmap_inputs_array_of_union_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowInputParameterLoader, 'id', 'type') +union_of_WorkflowOutputParameterLoader = _UnionLoader((WorkflowOutputParameterLoader,)) +array_of_union_of_WorkflowOutputParameterLoader = _ArrayLoader(union_of_WorkflowOutputParameterLoader) +idmap_outputs_array_of_union_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_union_of_WorkflowOutputParameterLoader, 'id', 'type') +union_of_None_type_or_StepPositionLoader = _UnionLoader((None_type, StepPositionLoader,)) +union_of_None_type_or_ToolShedRepositoryLoader = _UnionLoader((None_type, ToolShedRepositoryLoader,)) +union_of_None_type_or_GalaxyTypeLoader = _UnionLoader((None_type, GalaxyTypeLoader,)) +typedsl_union_of_None_type_or_GalaxyTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_GalaxyTypeLoader, 2) +array_of_WorkflowStepInputLoader = _ArrayLoader(WorkflowStepInputLoader) +union_of_None_type_or_array_of_WorkflowStepInputLoader = _UnionLoader((None_type, array_of_WorkflowStepInputLoader,)) +idmap_in__union_of_None_type_or_array_of_WorkflowStepInputLoader = _IdMapLoader(union_of_None_type_or_array_of_WorkflowStepInputLoader, 'id', 'source') +union_of_strtype_or_WorkflowStepOutputLoader = _UnionLoader((strtype, WorkflowStepOutputLoader,)) +array_of_union_of_strtype_or_WorkflowStepOutputLoader = _ArrayLoader(union_of_strtype_or_WorkflowStepOutputLoader) +union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type = _UnionLoader((array_of_union_of_strtype_or_WorkflowStepOutputLoader, None_type,)) +uri_union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type_True_False_None = _URILoader(union_of_array_of_union_of_strtype_or_WorkflowStepOutputLoader_or_None_type, True, False, None) +union_of_None_type_or_WorkflowStepTypeLoader = _UnionLoader((None_type, WorkflowStepTypeLoader,)) +typedsl_union_of_None_type_or_WorkflowStepTypeLoader_2 = _TypeDSLLoader(union_of_None_type_or_WorkflowStepTypeLoader, 2) +uri_union_of_None_type_or_strtype_or_array_of_strtype_False_False_2 = _URILoader(union_of_None_type_or_strtype_or_array_of_strtype, False, False, 2) +array_of_WorkflowInputParameterLoader = _ArrayLoader(WorkflowInputParameterLoader) +idmap_inputs_array_of_WorkflowInputParameterLoader = _IdMapLoader(array_of_WorkflowInputParameterLoader, 'id', 'type') +array_of_WorkflowOutputParameterLoader = _ArrayLoader(WorkflowOutputParameterLoader) +idmap_outputs_array_of_WorkflowOutputParameterLoader = _IdMapLoader(array_of_WorkflowOutputParameterLoader, 'id', 'type') +uri_strtype_False_True_None = _URILoader(strtype, False, True, None) +array_of_WorkflowStepLoader = _ArrayLoader(WorkflowStepLoader) +union_of_array_of_WorkflowStepLoader = _UnionLoader((array_of_WorkflowStepLoader,)) +idmap_steps_union_of_array_of_WorkflowStepLoader = _IdMapLoader(union_of_array_of_WorkflowStepLoader, 'id', 'None') +union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader,)) +array_of_union_of_GalaxyWorkflowLoader = _ArrayLoader(union_of_GalaxyWorkflowLoader) +union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader = _UnionLoader((GalaxyWorkflowLoader, array_of_union_of_GalaxyWorkflowLoader,)) + + +def load_document(doc, baseuri=None, loadingOptions=None): + # type: (Any, Optional[Text], Optional[LoadingOptions]) -> Any + if baseuri is None: + baseuri = file_uri(os.getcwd()) + "/" + if loadingOptions is None: + loadingOptions = LoadingOptions() + return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, doc, baseuri, loadingOptions) + + +def load_document_by_string(string, uri, loadingOptions=None): + # type: (Any, Text, Optional[LoadingOptions]) -> Any + result = yaml.round_trip_load(string, preserve_quotes=True) + add_lc_filename(result, uri) + + if loadingOptions is None: + loadingOptions = LoadingOptions(fileuri=uri) + loadingOptions.idx[uri] = result + + return _document_load(union_of_GalaxyWorkflowLoader_or_array_of_union_of_GalaxyWorkflowLoader, result, uri, loadingOptions) diff --git a/schema/v19.09/workflow.yml b/schema/v19.09/workflow.yml new file mode 100644 index 0000000..95920d1 --- /dev/null +++ b/schema/v19.09/workflow.yml @@ -0,0 +1,237 @@ +saladVersion: v1.1 +$base: "https://galaxyproject.org/gxformat2/v19_09#" + +$namespaces: + gxformat2: "https://galaxyproject.org/gxformat2/v19_09#" + gxformat2common: "https://galaxyproject.org/gxformat2/gxformat2common#" + cwl: "https://w3id.org/cwl/cwl#" + sld: "https://w3id.org/cwl/salad#" + rdfs: "http://www.w3.org/2000/01/rdf-schema#" + +$graph: +- name: "WorkflowDoc" + type: documentation + doc: + - | + # Galaxy Workflow Format 2 Description + + The traditional Galaxy workflow description (.ga) is not meant to be concise and is neither readily human readable or human writable. + Format 2 addresses all three of these limitations while also converging (where it makes sense without sacrificing these other goals) + with the workflow description with that used by the Common Workflow Language. + + This standard is in active development and a moving target in many ways, but we will try to keep what is ingestible by Galaxy + backward-compatible going forward. + +- $import: "../common/metaschema/metaschema_base.yml" +- $import: "./Process.yml" # trimmed down version of cwl's Process.yml +- $import: "../common/common.yml" + +- name: GalaxyType + type: enum + extends: "sld:PrimitiveType" + symbols: + - File + - data + - collection + doc: + - "Extends primitive types with the native Galaxy concepts such datasets and collections." + - "File: an alias for data - there are subtle differences between a plain file, the CWL concept of 'File', and the Galaxy concept of a dataset - this may have subtly difference semantics in the future" + - "data: a Galaxy dataset" + - "collection: a Galaxy dataset collection" + +- name: WorkflowStepType + type: enum + symbols: + - tool + - subworkflow + - pause + doc: + - | + Module types used by Galaxy steps. Galaxy's native format allows additional types such as data_input, data_input_collection, and parameter_type + but these should be represented as ``inputs`` in Format2. + - "tool: Run a tool." + - "subworkflow: Run a subworkflow." + - "pause: Pause computation on this branch of workflow until user allows it to continue." + +- name: WorkflowInputParameter + type: record + extends: cwl:InputParameter + docParent: "#GalaxyWorkflow" + fields: + - name: type + type: GalaxyType? + default: data + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + +- name: WorkflowOutputParameter + type: record + extends: cwl:OutputParameter + docParent: "#GalaxyWorkflow" + doc: | + Describe an output parameter of a workflow. The parameter must be + connected to one parameter defined in the workflow that + will provide the value of the output parameter. It is legal to + connect a WorkflowInputParameter to a WorkflowOutputParameter. + fields: + - name: outputSource + doc: | + Specifies workflow parameter that supply the value of to + the output parameter. + # Steps don't reference outputs in gxformat2 (yet anyway). + # Can we just link the step if before the / + #jsonldPredicate: + # "_id": "gxformat2:outputSource" + # "_type": "@id" + # refScope: 0 + type: + - string? + - name: type + type: GalaxyType? + default: data + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + doc: | + Specify valid types of data that may be assigned to this parameter. + +- name: WorkflowStep + type: record + extends: [cwl:Identified, cwl:Labeled, sld:Documented, gxformat2common:HasStepPosition, gxformat2common:ReferencesTool] + docParent: "#GalaxyWorkflow" + doc: | + Workflow step. + fields: + - name: in + type: WorkflowStepInput[]? + jsonldPredicate: + _id: "gxformat2:in" + mapSubject: id + mapPredicate: source + doc: | + Defines the input parameters of the workflow step. The process is ready to + run when all required input parameters are associated with concrete + values. Input parameters include a schema for each parameter which is + used to validate the input object. It may also be used build a user + interface for constructing the input object. + - name: out + type: + - type: array + items: [string, WorkflowStepOutput] + - "null" + jsonldPredicate: + _id: "gxformat2:out" + _type: "@id" + identity: true + doc: | + Defines the parameters representing the output of the process. May be + used to generate and/or validate the output object. + - name: state + type: Any? + doc: | + Structured tool state. + - name: type + type: WorkflowStepType? + jsonldPredicate: + "_id": "sld:type" + "_type": "@vocab" + refScope: 2 + typeDSL: True + default: tool + doc: | + Workflow step module's type (defaults to 'tool'). + - name: run + type: + - "null" + - GalaxyWorkflow + jsonldPredicate: + _id: "cwl:run" + _type: "@id" + subscope: run + doc: | + Specifies a subworkflow to run. + +- name: Sink + type: record + abstract: true + fields: + - name: source + doc: | + Specifies one or more workflow parameters that will provide input to + the underlying step parameter. + jsonldPredicate: + "_id": "cwl:source" + "_type": "@id" + refScope: 2 + type: + - string? + - string[]? + +- type: record + name: WorkflowStepInput + extends: [cwl:Identified, Sink, cwl:Labeled] + docParent: "#WorkflowStep" + doc: | + TODO: + + fields: + - name: default + type: ["null", Any] + doc: | + The default value for this parameter to use if either there is no + `source` field, or the value produced by the `source` is `null`. The + default must be applied prior to scattering or evaluating `valueFrom`. + jsonldPredicate: + _id: "sld:default" + noLinkCheck: true + +- type: record + name: WorkflowStepOutput + docParent: "#WorkflowStep" + extends: cwl:Identified + doc: | + Associate an output parameter of the underlying process with a workflow + parameter. The workflow parameter (given in the `id` field) be may be used + as a `source` to connect with input parameters of other workflow steps, or + with an output parameter of the process. + + A unique identifier for this workflow output parameter. This is + the identifier to use in the `source` field of `WorkflowStepInput` + to connect the output value to downstream parameters. + + +- name: GalaxyWorkflow + type: record + extends: cwl:Process + specialize: + - specializeFrom: cwl:InputParameter + specializeTo: WorkflowInputParameter + - specializeFrom: cwl:OutputParameter + specializeTo: WorkflowOutputParameter + documentRoot: true + doc: This is documentation for a workflow! + fields: + - name: name + type: string? + jsonldPredicate: "@id" # will this bite me? + doc: | + The name of the workflow. + - name: "class" + jsonldPredicate: + "_id": "@type" + "_type": "@vocab" + type: string + - name: steps + doc: {$include: ../common/steps_description.txt} + type: + - type: array + items: "#WorkflowStep" + jsonldPredicate: + mapSubject: id diff --git a/setup.cfg b/setup.cfg index 888cdda..be0fa22 100644 --- a/setup.cfg +++ b/setup.cfg @@ -15,3 +15,4 @@ max-line-length = 150 max-complexity = 30 import-order-style = google application-import-names = gxformat2 +exclude = gxformat2/schema/v19_09.py diff --git a/setup.py b/setup.py index 82fda38..5ea5b60 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,7 @@ def get_var(var_name): PROJECT_DESCRIPTION = 'Galaxy Workflow Format 2 Descriptions' PACKAGES = [ 'gxformat2', + 'gxformat2.schema', ] ENTRY_POINTS = ''' ''' diff --git a/tests/test_lint.py b/tests/test_lint.py index 8169bf7..df8347f 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -7,6 +7,7 @@ from gxformat2._yaml import ordered_dump, ordered_load from ._helpers import assert_valid_native, to_native +_deep_copy = copy.deepcopy TEST_PATH = os.path.abspath(os.path.dirname(__file__)) TEST_EXAMPLES = os.path.join(TEST_PATH, "examples") @@ -81,6 +82,49 @@ """ +NESTED_WORKFLOW = """ +class: GalaxyWorkflow +inputs: + outer_input: data +outputs: + outer_output: + outputSource: second_cat/out_file1 +steps: + first_cat: + tool_id: cat1 + in: + input1: outer_input + nested_workflow: + run: + class: GalaxyWorkflow + inputs: + inner_input: data + outputs: + workflow_output: + outputSource: random_lines/out_file1 + steps: + random_lines: + tool_id: random_lines1 + state: + num_lines: 2 + input: + $link: inner_input + seed_source: + seed_source_selector: set_seed + seed: asdf + in: + inner_input: first_cat/out_file1 + split: + tool_id: split + in: + input1: nested_workflow/workflow_output + second_cat: + tool_id: cat_list + in: + input1: split/output +""" + + def setup_module(module): # Setup an examples directory with examples we want to correspond to what exit codes, # do this so we can run same tests in Java. @@ -90,30 +134,45 @@ def setup_module(module): assert_valid_native(green_native) _dump_with_exit_code(green_native, 0, "basic_native") - invalid_format2_no_format_dict = copy.deepcopy(green_format2) + invalid_format2_no_format_dict = _deep_copy(green_format2) del invalid_format2_no_format_dict["class"] _dump_with_exit_code(invalid_format2_no_format_dict, 2, "format2_no_class") - invalid_ga_no_format_dict = copy.deepcopy(green_native) + invalid_ga_no_format_dict = _deep_copy(green_native) del invalid_ga_no_format_dict["a_galaxy_workflow"] _dump_with_exit_code(invalid_ga_no_format_dict, 2, "native_no_class") - red_ga_no_outputs = copy.deepcopy(green_native) + invalid_ga_steps_not_order_index_dict = _deep_copy(green_native) + steps = invalid_ga_steps_not_order_index_dict["steps"] + step_0 = steps.pop("0") + steps["moo_0"] = step_0 + _dump_with_exit_code(invalid_ga_steps_not_order_index_dict, 2, "native_step_not_order_index") + + invalid_ga_no_steps = _deep_copy(green_native) + invalid_ga_no_steps.pop("steps") + _dump_with_exit_code(invalid_ga_no_steps, 2, "native_no_steps") + + invalid_format2_no_steps_dict = _deep_copy(green_format2) + del invalid_format2_no_steps_dict["steps"] + _dump_with_exit_code(invalid_format2_no_steps_dict, 2, "format2_no_steps") + + red_ga_no_outputs = _deep_copy(green_native) red_ga_no_outputs_steps = red_ga_no_outputs.get("steps") for step in red_ga_no_outputs_steps.values(): step.pop("workflow_outputs", None) _dump_with_exit_code(red_ga_no_outputs, 1, "native_no_outputs") - red_ga_no_output_labels = copy.deepcopy(green_native) + red_ga_no_output_labels = _deep_copy(green_native) red_ga_no_output_labels_steps = red_ga_no_output_labels.get("steps") for step in red_ga_no_output_labels_steps.values(): for workflow_output in step.get("workflow_outputs", []): workflow_output["label"] = None _dump_with_exit_code(red_ga_no_outputs, 1, "native_no_output_labels") - red_format2_no_outputs = copy.deepcopy(green_format2) + # gotta call this a format error to implement Process in schema... + red_format2_no_outputs = _deep_copy(green_format2) del red_format2_no_outputs["outputs"] - _dump_with_exit_code(red_format2_no_outputs, 1, "format2_no_output") + _dump_with_exit_code(red_format2_no_outputs, 2, "format2_no_output") green_format2_rules = ordered_load(RULES_TOOL) _dump_with_exit_code(green_format2_rules, 0, "format2_rules") @@ -126,6 +185,19 @@ def setup_module(module): green_native_rules = to_native(WORKFLOW_WITH_REPEAT) _dump_with_exit_code(green_native_rules, 0, "native_repeat") + green_format2_nested = ordered_load(NESTED_WORKFLOW) + _dump_with_exit_code(green_format2_nested, 0, "format2_nested") + green_native_nested = to_native(NESTED_WORKFLOW) + _dump_with_exit_code(green_native_nested, 0, "native_nested") + + invalid_format2_nested = _deep_copy(green_format2_nested) + del invalid_format2_nested["steps"]["nested_workflow"]["run"]["steps"] + _dump_with_exit_code(invalid_format2_nested, 2, "format2_nested_no_steps") + + invalid_native_nested = _deep_copy(green_native_nested) + del invalid_native_nested["steps"]['2']['subworkflow']['steps'] + _dump_with_exit_code(invalid_native_nested, 2, "native_nested_no_steps") + def test_lint_ga_basic(): assert main(["lint", os.path.join(TEST_PATH, "wf3-shed-tools-raw.ga")]) == 1 # no outputs