diff --git a/.gitignore b/.gitignore index 7fd723c9d..7ec8e427b 100644 --- a/.gitignore +++ b/.gitignore @@ -15,7 +15,8 @@ dist/ downloads/ eggs/ .eggs/ -lib/ +# need to comment out otherwise the swagger js libs don't get copied in the gh-pages deploy +#lib/ lib64/ parts/ sdist/ diff --git a/.travis.yml b/.travis.yml index 25e78dbf8..39095e842 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,60 +1,62 @@ language: python - python: - '2.7' - '3.6' - -# Specifying `branches.only = ['master']` can cause tagged builds to -# not deploy. See travis-ci/travis-ci#2498 and travis-ci/travis-ci#1675. -# We can fix this by only build master and branches/tags that follow the -# format x.y.z. branches: only: - - master - - /^\d+\.\d+(\.\d+)?(-\S*)?$/ - + - master + - develop + - "/^\\d+\\.\\d+(\\.\\d+)?(-\\S*)?$/" + - "/^feature\\/issue-\\d+(-\\S*)?$/" + - "/^release\\/\\d+\\.\\d+(\\.\\d+)?$/" stages: - - linting - - test - # The deploy stage only has `deploy` actions which will only run on commits to master - # (and not pull requests). That said, we still explicitly skip this stage because - # if we don't, Travis will only stop execution once the build stage has already been - # set up. If we skip the deploy stage for pull requests at the stage level, we can - # shave off a couple minutes of runtime. - - name: deploy - if: type != pull_request - +- linting +- test +- name: deploy + if: type != pull_request && branch = "master" jobs: include: - # If the linting stage fails, then none of the other stages will run. - - stage: linting - python: '3.6' - script: - # Travis will install requirements.txt by default - - flake8 --select=E121,E123,E126,E226,E24,E704,W503,W504 --ignore=E501 app.py tests - - # Deploy to PyPI on every tagged commit - - stage: deploy - python: '3.6' - script: ignore - before_install: ignore - deploy: - - provider: pypi - on: - tags: true - branch: master - python: '3.6' - repo: ga4gh/data-object-service-schemas - user: david4096 - password: - secure: LlQn8ZBAb5ekujHnoDrmzrmXaM6TpyzByNHPH4FTbbdnJ8lkDPb/ZhYvdmqrOvXPQg81/IoYKlIvP7fY9kc3oGUJ2IXhcPFqiw8njsRE5Qaebp+YppQO7C3IWGlHoZtXNtC608ZSA4x0oneNeNy+Y8KYnqKbmOlbuvrYRlNYfe9/8z7yLPH8wdmp0GyvbViedr3p7PXhtQVUKAgPpgjffZnSA7P/Y6AdkvjHHv2xMAzWP/QmOFWZNxUXjg0miR0K7eGFeGBNMM/+QsVXrGOu/TCtPtJ4JXyD86nzrZUbsOluyAblxwGlrv05se5ImVhR210OC5zvSW2902y/lxCw5uek+xg4/tcSA1ckshxLeu02GfDygCktMUtqtKVIZ+qvU7H4dEQ6Jnz9yBvZW5M6V94Ew3wBFy0RB5I9k3MMQY21FdynIUEZzBgJbOChCbmlIDT1varBHvWBiwg8EwPOVuJt1CsOoptJxUsoJND4tAOPIvXMNI17qGJ+VWAVMVNn7cVUuhEeGXwQF4urrkFBA7WIYOp6O9R8Ipg6WnQdxVdnqb3NsEc19SRdFXQ82SYibKfIZxjpdmYVgKzTYsJGMhfG6fTw9D4JABhggfgShsnByrFtbbkn/9g64jXDOjwPLeRXwXYZe6ZV6M69PDWdo0o326Qq/OHBG5eU7z2plNI= - + - stage: linting + python: '3.6' + script: + - flake8 --select=E121,E123,E126,E226,E24,E704,W503,W504 --ignore=E501 app.py + tests + - stage: build_pages + language: java + jdk: oraclejdk8 + before_install: + - chmod +x gradlew + - chmod +x scripts/fetchpages.sh + - chmod +x scripts/stagepages.sh + script: + - "./scripts/fetchpages.sh" + - "./gradlew installSwagger buildSwagger asciidoctor" + - "./scripts/stagepages.sh" + deploy: + provider: pages + skip-cleanup: true + github-token: "$GITHUB_TOKEN" + on: + all_branches: true + - stage: deploy + python: '3.6' + script: ignore + before_install: ignore + deploy: + - provider: pypi + on: + tags: true + branch: master + python: '3.6' + repo: ga4gh/data-repository-service-schemas + user: ga4gh-cloud-workstream + password: + secure: O3xHQwUG2pEhU0FXmeaWzDY70iRz0J+LeXGhyH9UJz84WCSSfmwEyxH4gJlucHLEmUBV+nuwxbe6vRzN2jiO34qSXrdSV7fippw2ZCuYM+hwOz0rB42B0fUVqMBIMjAusKdoumaeOS15spOV+06qSAtzB+bhI0ZqhSEFy7yeX1qIdx33bJUSqB73r9be+gndafXG9vK2lqz0C8tMA3cYFze6alX3vde8dWYuTeq65XhXyqrwELhCMTVTYQmHit9Y7qMqcJdb5a/O/Q8QcmhxiowxfZXqzXtRKfAFaukq1FobG05WYVcz3Uwsog6EVkqlQ9WZ7LNv2ZskLRut+o/bq2zuYGoiEtjLu4SVaPQrgRrcAf4HisMx6elJIjQffNjaaZTJ89kMJq8lGV/t+HfBSEUwxU6gK3ueDIAHDLbnaPbGTPs8bBNo2XZ+7rRU3xCkiDB/ZwOktuwguGFBSOA7UT129JOp8yQ3y4G2SvwwMNo7KUKr2R4PDY5q6FG2XoZZqJHOkPP/iOvy3e+3gpUg3tGYxsQF2XJaarvzqOfkrHMMnlKmCrkcueVeIRyxbMfp/FylxAL4D4AY6xdM2Otl7+hZjB49ZruoRitoa+N5w6AEG0CaIQoJWQ3k2UIGektjdhrDszJS47Q65efjiO93XEcUiEXh2bM4KJfYDxHI13M= before_install: - python setup.py sdist -- pip install dist/ga4gh_dos_schemas-*.tar.gz +- pip install dist/ga4gh_drs_schemas-*.tar.gz - npm install -g swagger2openapi - script: - make schemas - nosetests python/ -- ga4gh_dos_client +- ga4gh_drs_client diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c051f26c9..900830767 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,9 @@ -How to contribute to the GA4GH Schemas +How to contribute to the DRS GA4GH Schema ====================================== Thank you for taking the time to contribute. We appreciate it! -The GA4GH schemas define an API for sharing genomic and phenotypic data. +The GA4GH DRS schema defines an API for sharing data. There are two ways to contribute to the effort - via issues, which are used for discussion, and pull requests, which are concrete proposals of @@ -41,18 +41,25 @@ repositories each with the same name is a branch set, e.g. the master branch in each repository forms the master branch set. Some general rules to follow: - -- [Fork](https://help.github.com/articles/fork-a-repo) the main - project into your personal GitHub space to work on. -- Create a branch for each update that you're working on. These - branches are often called "feature" or "topic" branches. Any changes - that you push to your feature branch will automatically be shown in - the pull request. -- If necessary, replicate the last two steps' fork-and-branch process - for each of the compliance and server repositories to create a - branch set - each constituent repository branch will be - identically named. -- Coordinate pull requests across the branch set by making them as +- Create an issue in Github to track your work and start a conversation. Make a note of the number, you'll + need it when naming your feature branch below. +- We follow [HubFlow](https://datasift.github.io/gitflow/) which means we use + a feature branch strategy with pull requests always going to `develop` + and releases happening from `master`. **Please read the HubFlow guide linked above, it's a quick read and will give you a really good idea of how our branches work. Do not make pull requests to `master`!** +- If you are a core developer with write access to the repo, make a feature + branch following HubFlow conventions in the repo (see next step). Otherwise + [fork](https://help.github.com/articles/fork-a-repo) the repo into your personal GitHub space to work on. +- Create a "feature" branch for each update that you're working on (either in the main repo or your fork depending + on the previous step). These branches should start with "feature/issue-[number]-[some-description]". For example + "feature/issue-123-improving-the-docs". Most devs will use the HubFlow command line tools to do this however, if you + make a feature branch in GitHub's UI, then please make sure you follow this naming convention. +- If you are creating a feature branch in the main repo and you follow this + convention nice things will happen e.g. TravisCI will check your branch and the documentation and swagger will be built + for you, see the [README.md](README.md) for how to construct a URL to view these for your feature branch. +- When you're happy with your feature branch, make a [Pull Request](https://help.github.com/articles/about-pull-requests/) + in GitHub from your feature branch (or fork with a feature branch) to develop. Pick at least one other person to review + and write up a good message that links back to the issue you started this whole process with. +- If you have multiple related pull requests, coordinate pull requests across the branch set by making them as simultaneously as possible, and [cross referencing them](http://stackoverflow.com/questions/23019608/github-commit-syntax-to-link-a-pull-request-issue). - Keep your pull requests as small as possible. Large pull requests @@ -68,40 +75,32 @@ will automatically run tests to ensure valid schema syntax. If your pull request fails to pass tests, review the test log, make changes and then push them to your feature branch to be tested again. -Issue Resolution -================ +Builds with Travis-CI +===================== + +We use Travis for CI testing. If you create a fork and feature branch +this will not automatically be built from our Travis. However, if you +are a developer and have created a feature branch following the naming +convention above, you should see automated builds. + +Check https://travis-ci.org/ga4gh/data-repository-service-schemas/builds to see the status of the builds. + +Pull Request Voting Process +=========================== + +DRS is very much focused on meeting the needs of our Driver Projects +so this voting process is focused on their needs. + +1) We always have an issue created before a PR, this is where a description and initial conversation takes place + +2) Someone is assigned the ticket, they bring together one (or more) pull requests... they might do it themselves or ask for help. Multiple pull requests could be used if there are different approaches that need to be explored + +3) David, Brian, and Rishi review the PRs every week on the call (and also ping the mailing list), set a deadline by which drivers (and a few key non-drivers) need to respond with a +1, 0, or -1 by. A non-vote means 0 so neutral. We try for no "-1"s. Strive to reach consensus with our drivers. We ask that a -1 give us details why. + +4) David and Brian as Work Stream leads retain a veto if something goes off the rails + +5) We merge or discard depending on the vote/veto by the date we set when the PR was shared with the group -Once a pull request or issue have been submitted, anyone can comment or -vote on to express their opinion following the Apache voting system. -Quick summary: - -- **+1** something you agree with -- **-1** if you have a strong objection to an issue, which will be - taken very seriously. A -1 vote should provide an - alternative solution. -- **+0** or **-0** for neutral comments or weak opinions. -- It's okay to have input without voting -- Silence gives assent - -A pull request with at least two **+1** votes, no **-1** votes, that has -been open for at least 3 days, and whose cross-referenced pull requests -to server and compliance have similarly been upvoted is ready to be -merged. The merge should be done by someone from a different -organization than the submitter. (We sometimes waive the 3 days for -cosmetic-only changes -- use good judgment.) A pull request to either -the schemas, servers or compliance repository that involves changes to -the others should not be merged without coordinating, mergable pull -requests to the other repositories. Conversely, when merging a pull -request the other pull requests in the branch set must be merged at the -same time (In practise, when merging a branch set including the schemas -repository, merge the pull request to schemas first to avoid the -continuous integration build issues). If an issue gets any **-1** votes, -the comments on the issue need to reach consensus before the issue can -be resolved one way or the other. There isn't any strict time limit on a -contentious issue. - -The project will strive for full consensus on everything until it runs -into a problem with this model. Syntax Style and Conventions ============================ @@ -186,12 +185,12 @@ resolution](#issue%20resolution). Release Branches ================ -From time to time the group will make a release. This is achieved by -creating a branch set including all the repositories named -"release-foo", where foo is the release name. Only bug fixes are allowed -to release branch sets. To refer to a specific version of a release -branch set either the commit id can be used, or alternatively (better), -a tag can be created (which should be replicated across repositories). +From time to time the group will make a release, this is done with the HubFlow +release process which generally involves creating a branch +"release-foo", where foo is the release name. And following the HubFlow +tooling for pushing this to master/develop and taggging in GitHub. +Only bug fixes are allowed +to the release branch and the release branch is removed after a successful HubFlow release. Retired Task Teams ================== diff --git a/DOCSBUILD.md b/DOCSBUILD.md new file mode 100644 index 000000000..d1a16f17f --- /dev/null +++ b/DOCSBUILD.md @@ -0,0 +1,346 @@ +# Documentation Build Process + +This doc (from James Eddy) describes the build process for Swagger UI and static docs (in Travis CI) and subsequent deployment to GitHub Pages (cc @briandoconnor, @denis-yuen, @david4096, @natanlao). + +These instructions are based on the current configuration for the [**Workflow Execution Service (WES) API schema repo**](https://github.com/ga4gh/workflow-execution-service-schemas), which includes: + ++ **When code is merged into the `master` branch of this repository**, artifacts are created and hosted at the following paths: + + ga4gh.github.io/[repo]/swagger-ui/ — Swagger UI for the API spec + + ga4gh.github.io/[repo]/docs/ — reference docs for the API + + ga4gh.github.io/[repo]/swagger.json — API spec in JSON format + + ga4gh.github.io/[repo]/swagger.yaml — API spec in YAML format + ++ **For non-`master` branches**, reviewers can preview documentation and other pages under "ga4gh.github.io/[repo]/preview/[branch-name]/" + + swagger-ui/ — Swagger UI preview for current version of [branch-name] + + docs/ — docs preview for current version of [branch-name] + + swagger.json — spec (JSON) preview for current version of [branch-name] + + swagger.yaml — spec (YAML) preview for current version of [branch-name] + ++ When changes are pushed to branches on a fork of the main repo (and the user has set up Travis for their forked repo), the same path apply but should be relative to "[user-or-org].github.io/[repo]/". + ++ `README.md` and `CONTRIBUTING.md` updated with all of the above links + ++ `README.md` badges indicating Travis CI build status and Swagger/OpenAPI validation status + + +--- + +## Reference docs with `asciidoctor` and `swagger2markup` + +Uses the swagger2markup gradle plugin and asciidoctor to (1) automatically generate human-readable asciidoc files from the contract-first OpenAPI (swagger) yaml spec; and (2) incorporate +manual content to build an overall document in HTML and PDF formats. + +You'll need gradle installed to test locally. + +
+ +Steps + +### Set up directory + +I started with the setup used in [**this template**](https://github.com/Swagger2Markup/swagger2markup-gradle-project-template) and copied over files for the Swagger2Markup [**gradle plugin**](http://swagger2markup.github.io/swagger2markup/1.3.1/#_gradle_plugin). + +**Note:** the choice of directory structure used here was my own, and is somewhat arbitrary. You can reorganize however you like, but you'll need to keep track of paths across various scripts and config files. + +```terminal +. # top level repo directory, e.g., 'workflow-execution-service-schemas/' +├── build.gradle +├── gradle +│   └── wrapper +│   ├── gradle-wrapper.jar +│   └── gradle-wrapper.properties +└── gradlew +``` + +### Update `gradle.settings` + +Change root project name (to the name of your repo's project): +```groovy +rootProject.name = 'workflow-execution-service-schemas' +``` + +### Update `build.gradle` + +Add `asciiDocDir` to `ext`: +```groovy +ext { + asciiDocDir = file("docs/asciidoc") + asciiDocOutputDir = file("docs/asciidoc/swagger2markup") +} +``` + +Update paths in `convertSwagger2markup`: +```groovy +convertSwagger2markup { + swaggerInput file("openapi/workflow_execution_service.swagger.yaml").getAbsolutePath() + outputDir asciiDocOutputDir + config = ['swagger2markup.markupLanguage' : 'ASCIIDOC', + 'swagger2markup.extensions.dynamicDefinitions.contentPath' : file('docs/asciidoc/swagger2markup/definitions').absolutePath, + 'swagger2markup.extensions.dynamicOverview.contentPath' : file('docs/asciidoc/swagger2markup/overview').absolutePath, + 'swagger2markup.extensions.dynamicPaths.contentPath' : file('docs/asciidoc/swagger2markup/paths').absolutePath, + 'swagger2markup.extensions.dynamicSecurity.contentPath' : file('docs/asciidoc/swagger2markup/security').absolutePath] +} +``` + +Add `sourceDir` and `outputDir` to `asciidoctor`: +```groovy +asciidoctor { + dependsOn convertSwagger2markup + sourceDir asciiDocDir + outputDir file("docs") + sources { + include 'index.adoc' + } + backends = ['html5', 'pdf'] + attributes = [ + doctype: 'book', + toc: 'left', + toclevels: '3', + numbered: '', + sectlinks: '', + sectanchors: '', + hardbreaks: '', + generated: asciiDocOutputDir + ] +} +``` + +Update paths in `watch`: +```groovy +watch { + asciidoc { + files fileTree('docs/asciidoc') + tasks 'asciidoctor' + } +} +``` + +### Generate AsciiDoc docs + +Run `./gradlew convertSwagger2markup` to convert swagger YAML to AsciiDoc files and initialize the `docs` folder: +```terminal +. +└── docs +    └── asciidoc +       └── swagger2markup +       ├── definitions.adoc +       ├── overview.adoc +       ├── paths.adoc +       └── security.adoc +``` + +### Add `index.adoc` and `front_matter.adoc` + +The [index file](https://github.com/ga4gh/workflow-execution-service-schemas/blob/master/docs/asciidoc/index.adoc) allows you to control the order in which pages are built for HTML and PDF docs; it looks like this: +```adoc +include::{generated}/overview.adoc[] +include::front_matter.adoc[] +include::{generated}/paths.adoc[] +include::{generated}/definitions.adoc[] +``` + +The ["front matter" file](https://github.com/ga4gh/workflow-execution-service-schemas/blob/master/docs/asciidoc/front_matter.adoc) is where you can add any manual content that you want to integrate with the +generated docs. This content needs to be composed using AsciiDoc (`.adoc`) format: + +```adoc +== Section header + +Some summary text. + +Features: + +* feature 1 +* feature 2 + +== Another section header + +More text... +``` + +### Build reference docs + +Run `./gradlew asciidoctor` to test. Check `docs/asciidoc/html5/index.html` to see the generated HTML report or `docs/asciidoc/pdf/index.pdf` to see the generated PDF report. + +```terminal +. +└── docs +    ├── README.md +    ├── asciidoc +    │   ├── front_matter.adoc +    │   ├── index.adoc +    │   └── swagger2markup +    │   ├── definitions.adoc +    │   ├── overview.adoc +    │   ├── paths.adoc +    │   └── security.adoc +    ├── html5 +    │   └── index.html +    └── pdf +    └── index.pdf +``` + +You can also add a `README.md` to the `docs` folder with a link to where generated docs will be hosted: +```md +View the full [Reference Documentation](https://ga4gh.github.io/workflow-execution-service-schemas/docs/) for the Workflow Execution Service API. +``` + +
+ +## Swagger UI + +I initially used the node package [**generator-openapi-repo**](https://github.com/Rebilly/generator-openapi-repo) to set up Swagger UI stuff for the repo. However, I found that the generator code did way more than I needed, and some other stuff that I couldn't control. I cut out some of the excess pieces and rewrote the associated scripts to minimize the need for random JS code. + +Also, with help from @coverbeck, I updated the gradle plugin to install and provide an interface to the Swagger UI node components (eliminating the need for `gulpfile.js`). + +
+ +Steps + +### Set up directory + +```terminal +. +├── gulpfile.js # deprecated; need to remove +├── package.json +└── scripts +    ├── buildui.js # deprecated; need to remove +    ├── fetchpages.sh +    └── stagepages.sh +``` + +### Add/edit `package.json` + +You should be able to copy the contents of [`package.json`](https://github.com/ga4gh/workflow-execution-service-schemas/blob/master/package.json) from the WES repo to get started. Update `name` and `version` to match the information for your repo. + +### Edit `stagepages.sh` + +This script builds Swagger UI and sets up various elements in their target locations for deployment to GitHub pages. The path to the swagger YAML is hardcoded in a couple lines, so you'll need to change that. + +```shell +#!/usr/bin/env bash + +set -e +set -v + +if [ "$TRAVIS_BRANCH" == "master" ]; then + cp docs/html5/index.html docs/ + cp openapi/workflow_execution_service.swagger.yaml ./swagger.yaml + cp -R web_deploy/* . +elif [ "$TRAVIS_BRANCH" != "gh-pages" ]; then + branch=$(echo "$TRAVIS_BRANCH" | awk '{print tolower($0)}') + branchpath="preview/$branch" + mkdir -p "$branchpath/docs" + cp docs/html5/index.html "$branchpath/docs/" + cp openapi/workflow_execution_service.swagger.yaml "$branchpath/swagger.yaml" + cp -R web_deploy/* "$branchpath/" +fi +``` + +
+ +### Tweak .gitignore + +You need to remove the following line from .gitignore otherwise +the javascript libraries from swagger won't be copied during +gh-pages deploy: + + lib/ + + +## Configure Travis CI for repo + +This last step should be pretty straightforward — even though it was the hardest and most time consuming to troubleshoot. :) Here, you'll extend the `.travis.yml` config to run additional steps (after your API spec code has been built and tested) to build, set up, and deploy docs and Swagger elements. + +
+ +Steps + +### Create/add GitHub token + +Follow [instructions](https://docs.travis-ci.com/user/deployment/pages/#setting-the-github-token) from Travis CI docs. + +### Update `travis.yml` + +If you already have a build/test/deply job configured in Travis, you can separate this as a separate stage in `jobs/include` — it's OK for different stages to use different environments. I believe you could also use `matrix` here, but this seems to work. + +```yaml +jobs: + include: + - stage: test + language: python + python: + - '2.7' + before_install: + - sudo apt-get update -qq + - pip install . --process-dependency-links + - pip install -r python/dev-requirements.txt + script: + - nosetests python/ + - flake8 python + - ga4gh_wes_client + deploy: + ... + + - stage: build_pages + ... +``` + +Add docs/swagger build commands for Java-based stage: + +**Note:** the `fetchpages.sh` step here effectively acts to retrieve the current state of the `gh-pages` branch and store it to be re-pushed along with the newly generated pages — rather than overwritten. + +```yaml +jobs: + include: + - stage: test + ... + + - stage: build_pages + language: java + jdk: oraclejdk8 + before_install: + - chmod +x gradlew + - chmod +x scripts/fetchpages.sh + - chmod +x scripts/stagepages.sh + script: + - "./scripts/fetchpages.sh" + - "./gradlew installSwagger buildSwagger asciidoctor" + - "./scripts/stagepages.sh" +``` + +Add deploy instructions for GitHub pages: + +**Note:** It is important that all of your build/deploy steps for docs and Swagger elements use the same language for the build environment (and preferably part of the same job/stage). Travis *does not* cache information between jobs of different languages, and so pushing to `gh-pages` without missing or overwriting something from a previous job gets really complicated. + +```yaml +jobs: + include: + - stage: test + ... + + - stage: build_pages + language: java + jdk: oraclejdk8 + before_install: + - chmod +x gradlew + - chmod +x scripts/fetchpages.sh + - chmod +x scripts/stagepages.sh + script: + - "./scripts/fetchpages.sh" + - "./gradlew installSwagger buildSwagger asciidoctor" + - "./scripts/stagepages.sh" + deploy: + provider: pages + skip-cleanup: true + github-token: $GITHUB_TOKEN + on: + all_branches: true +``` + +Push to your repo and cross your fingers... + +
+ +## Update README links/badges + +For ideas on how to set up references to docs and Swagger elements in your main `README.md`, refer to the [**README**](https://github.com/ga4gh/workflow-execution-service-schemas/blob/master/README.md) for the WES API repo. diff --git a/Makefile b/Makefile index f595f89ec..6764ed83a 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ -SWAGGER_PATH = openapi/data_object_service.swagger.yaml -OPENAPI3_PATH = openapi/data_object_service.openapi.yaml -SMARTAPI_PATH = openapi/data_object_service.smartapi.yaml -SMARTAPI_PART_PATH = openapi/data_object_service.smartapi.yaml.part +SWAGGER_PATH = openapi/data_repository_service.swagger.yaml +OPENAPI3_PATH = openapi/data_repository_service.openapi.yaml +SMARTAPI_PATH = openapi/data_repository_service.smartapi.yaml +SMARTAPI_PART_PATH = openapi/data_repository_service.smartapi.yaml.part SWAGGER2OPENAPI_PATH = swagger2openapi $(OPENAPI3_PATH) : $(SWAGGER_PATH) @@ -14,4 +14,3 @@ schemas : $(OPENAPI3_PATH) $(SMARTAPI_PATH) true .PHONY: schemas - diff --git a/README.md b/README.md index 57cf6e95b..fa110807d 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,22 @@ -# Schemas for the Data Repository Service (DRS) API -
-[![Build Status](https://travis-ci.org/ga4gh/data-object-service-schemas.svg?branch=master)](https://travis-ci.org/ga4gh/data-object-service-schemas) -[![Swagger Validator](https://img.shields.io/swagger/valid/2.0/https/raw.githubusercontent.com/OAI/OpenAPI-Specification/master/examples/v2.0/json/petstore-expanded.json.svg)](https://raw.githubusercontent.com/ga4gh/data-object-service-schemas/master/openapi/data_object_service.swagger.yaml) -[![Read the Docs badge](https://readthedocs.org/projects/data-object-service/badge/)](https://data-object-service.readthedocs.io/en/latest) -![PyPI - Python Version](https://img.shields.io/pypi/pyversions/ga4gh-dos-schemas.svg) +GA4GH Logo -[View the schemas in Swagger UI](http://ga4gh.github.io/data-repository-service-schemas/) +# Data Repository Service (DRS) API + +`master` branch status: [![Build Status](https://travis-ci.org/ga4gh/data-repository-service-schemas.svg?branch=master)](https://travis-ci.org/ga4gh/data-repository-service-schemas?branch=master) +Swagger Validator + + +The [Global Alliance for Genomics and Health](http://genomicsandhealth.org/) (GA4GH) is an international coalition, formed to enable the sharing of genomic and clinical data. + +# About the GA4GH Cloud Work Stream + +The GA4GH [Cloud Work Stream](https://ga4gh.cloud) helps the genomics and health communities take full advantage of modern cloud environments. +Our initial focus is on “bringing the algorithms to the data”, by creating standards for defining, sharing, and executing portable workflows. + +We work with platform development partners and industry leaders to develop standards that will facilitate interoperability. + +# What is DRS? The goal of DRS is to create a generic API on top of existing object storage systems so workflow systems can access data in a single, standard way regardless of where it's @@ -17,19 +28,46 @@ The API is split into two sections: * **Data Object management**, which enables the creation, updating, deletion, versioning, and unique identification of files and data bundles (flat collections of files); and -* **Data Object querying**, which can locate data objects across different cloud environments - and DRS implementations. +* **Data Object access**, which can locate data objects across different cloud + and other environments. + +# API Definition + +See the human-readable **Reference Documentation** + +* [Released (master)](https://ga4gh.github.io/data-repository-service-schemas/docs/): the current release +* [Release 0.0.1](https://ga4gh.github.io/data-repository-service-schemas/preview/release/0.0.1/docs/): the initial DRS after the rename from DOS. +* [Stable Development (develop)](https://ga4gh.github.io/data-repository-service-schemas/preview/develop/docs/): This is where feature branches are merged into, it is our stable development branch. + +See the **[OpenAPI YAML description](openapi/data_repository_service.swagger.yaml)**. You can also explore the specification in the Swagger UI: + +* [Released (master)](https://ga4gh.github.io/data-repository-service-schemas/swagger-ui/) +* [Release 0.0.1](https://ga4gh.github.io/data-repository-service-schemas/preview/release/0.0.1/swagger-ui/) +* [Stable Development (develop)](https://ga4gh.github.io/data-repository-service-schemas/preview/develop/swagger-ui/) + +> All documentation and pages hosted at 'ga4gh.github.io/data-repository-service-schemas' reflect the latest API release from the `master` branch. To monitor the latest development work on various branches, add 'preview/\' to the URLs above (e.g., 'https://ga4gh.github.io/workflow-execution-service/preview/\/docs'). + +# Use Cases + +See the [Use Cases](USECASES.md) document for DRS use cases and possible +future directions. + +# Example DRS Server and Client ## Getting started Installing is as easy as: ``` -$ pip install ga4gh-dos-schemas +$ pip install ga4gh-drs-schemas ``` This will install both a demonstration server and a Python client that will allow you to -manage Data Objects in a local server. You can start the demo server using `ga4gh_dos_server`. +manage Data Objects in a local server. + +## Sample Service + +You can start the demo server using `ga4gh_drs_server`. This starts a Data Repository Service at http://localhost:8080. ``` @@ -49,12 +87,23 @@ curl http://localhost:8080/ga4gh/dos/v1/dataobjects/hg38-chr22 curl -X GET http://localhost:8080/ga4gh/dos/v1/dataobjects -d checksum=41b47ce1cc21b558409c19b892e1c0d1 ``` +## For More Information on the Sample Service and Client + For more on getting started, check out the -[quickstart guide](https://data-object-service.readthedocs.io/en/latest/quickstart.html) -or the rest of the documentation at [ReadtheDocs](https://data-object-service.readthedocs.io/en/latest/)! +[quickstart guide](https://data-repository-service.readthedocs.io/en/latest/quickstart.html) +or the rest of the documentation at [ReadtheDocs](https://data-repository-service.readthedocs.io/en/latest/)! + +# How to Contribute Changes + +See [CONTRIBUTING.md](CONTRIBUTING.md). + +If a security issue is identified with the specification, please send an email to security-notification@ga4gh.org detailing your concerns. + +# License + +See the [LICENSE](LICENSE). -## Getting involved! +# More Information -The Data Repository Service Schemas are Apache 2 Licensed Open Source software. Please join us -in the [issues](https://github.com/ga4gh/data-object-service-schemas/issues) or check out the -contributing docs! +* [Global Alliance for Genomics and Health](http://genomicsandhealth.org) +* [GA4GH Cloud Work Stream](https://ga4gh.cloud) diff --git a/USECASES.md b/USECASES.md index 037a7a28f..3b63b7094 100644 --- a/USECASES.md +++ b/USECASES.md @@ -1,20 +1,28 @@ # Overview -A place to document our use cases for Data Object Service (DOS). +A place to document our use cases for Data Repository Service (DRS). -## Presentation wtih an Overview +## Overview from Driver Projects + +See [this spreadsheet](https://docs.google.com/spreadsheets/d/1BoigMy4I44Wbd0y-GRvLtUCkGmRk5RAlJYf6zJGlRWg/edit#gid=0) for an overview of what use cases our Driver Projects would like DRS to support. + +## Older Information + +The sections below are from efforts in 2018 and earlier when DRS was referred to as Data Object Service (DOS). + +### Presentation with an Overview https://docs.google.com/presentation/d/18vB5wDvvvW4nlZDtbidcY5Sv5TbbegLRTT2Ar1a38-U/edit#slide=id.g1cffa4d16d_15_0 -## Google Doc with Use Cases +### Google Doc with Use Cases https://docs.google.com/document/d/1KNKYhMLzzbbS4x79PZE_GgVPaPSa4zNpZiey4fF1-Vo/edit -## Another Google Doc from Jonathan with Use Cases +### Another Google Doc from Jonathan with Use Cases https://docs.google.com/document/d/1UyqzlFpV-jzkB16wWjbwL4OL9HiQZRaqdzHDha7XOmU/edit?ts=59168060#heading=h.a8u6rulb7fu -## Cromwell +### Cromwell The Cromwell group has 4 instances via 2 projects where DOS is being considered. These are all similar enough to effectively be a single use case. We need to model an institution's local storage as a private cloud exposing basic read/write/sizeof functionality akin to GCS or S3. For instance instead of `gs:this/is/my/bucket` it'd be `site1:this/is/my/bucket` or `site2:this/is/my/bucket`. While these use cases don't *need* human readability the stakeholders have expressed preferences towards this over UUID approaches. Similar stances have been taken regarding putting a redirection utility in front of these private clouds. diff --git a/build.gradle b/build.gradle new file mode 100644 index 000000000..af804d0b7 --- /dev/null +++ b/build.gradle @@ -0,0 +1,106 @@ +buildscript { + repositories { + jcenter() + mavenCentral() + maven { url 'http://oss.jfrog.org/artifactory/oss-snapshot-local/' } + maven { + url "https://plugins.gradle.org/m2/" + } + //mavenLocal() + } + dependencies { + classpath 'org.asciidoctor:asciidoctor-gradle-plugin:1.5.3' + classpath 'io.github.swagger2markup:swagger2markup-gradle-plugin:1.3.1' + classpath "io.github.swagger2markup:swagger2markup:1.3.1" + classpath "io.github.swagger2markup:swagger2markup-import-files-ext:1.3.1" + classpath "com.bluepapa32:gradle-watch-plugin:0.1.5" + classpath "org.kordamp.gradle:livereload-gradle-plugin:0.2.1" + classpath "com.moowork.gradle:gradle-node-plugin:1.2.0" + } +} + +apply plugin: 'org.asciidoctor.convert' +apply plugin: 'com.bluepapa32.watch' +apply plugin: 'org.kordamp.gradle.livereload' +apply plugin: 'io.github.swagger2markup' +apply plugin: 'com.moowork.node' +node { + version = '8.9.0' + download = true +} + +group 'io.github.swagger2markup' +version '1.3.1' + +repositories { + jcenter() + mavenCentral() +} + +ext { + asciiDocDir = file("docs/asciidoc") + asciiDocOutputDir = file("docs/asciidoc/swagger2markup") +} + +convertSwagger2markup { + swaggerInput file("openapi/data_repository_service.swagger.yaml").getAbsolutePath() + outputDir asciiDocOutputDir + config = ['swagger2markup.markupLanguage' : 'ASCIIDOC', + 'swagger2markup.extensions.dynamicDefinitions.contentPath' : file('docs/asciidoc/swagger2markup/definitions').absolutePath, + 'swagger2markup.extensions.dynamicOverview.contentPath' : file('docs/asciidoc/swagger2markup/overview').absolutePath, + 'swagger2markup.extensions.dynamicPaths.contentPath' : file('docs/asciidoc/swagger2markup/paths').absolutePath, + 'swagger2markup.extensions.dynamicSecurity.contentPath' : file('docs/asciidoc/swagger2markup/security').absolutePath] +} + +asciidoctorj { + version = '1.5.5' +} + +asciidoctor { + dependsOn convertSwagger2markup + sourceDir asciiDocDir + outputDir file("docs") + sources { + include 'index.adoc' + } + backends = ['html5', 'pdf'] + attributes = [ + doctype: 'book', + toc: 'left', + toclevels: '3', + numbered: '', + sectlinks: '', + sectanchors: '', + hardbreaks: '', + generated: asciiDocOutputDir + ] +} + +dependencies { + // add converters and extensions using `asciidoctor` configuration + asciidoctor 'org.asciidoctor:asciidoctorj-pdf:1.5.0-alpha.15' +} + +watch { + asciidoc { + files fileTree('docs/asciidoc') + tasks 'asciidoctor' + } +} + +liveReload { + docRoot asciidoctor.outputDir.canonicalPath +} + + +task wrapper(type: Wrapper) { + gradleVersion = '3.5' +} + +task installSwagger(type: NpmTask) { + npmCommand = ["install"] +} + +task buildSwagger(type: NpmTask) { + npmCommand = ["run", "build"] +} diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..d28a1541a --- /dev/null +++ b/docs/README.md @@ -0,0 +1 @@ +View the main [reference documentation page](https://ga4gh.github.io/data-repository-service-schemas/) which is generated from these templates for the Data Repository Service. diff --git a/docs/asciidoc/back_matter.adoc b/docs/asciidoc/back_matter.adoc new file mode 100644 index 000000000..714f24601 --- /dev/null +++ b/docs/asciidoc/back_matter.adoc @@ -0,0 +1,39 @@ +== Appendix: Motivation + +[cols="40a,60a"] +|=== +|Data sharing requires portable data, consistent with the FAIR data principles (findable, accessible, interoperable, reusable). Today’s researchers and clinicians are surrounded by potentially useful data, but often need bespoke tools and processes to work with each dataset. And today’s data publishers don’t have a reliable way to make their data useful to all (and only) the people they choose. +|image::figure1.png[] +_Figure 1: there’s an ocean of data, with many different tools to drink from it, but no guarantee that any tool will work with any subset of the data_ +|=== + +[cols="40a,60a"] +|=== +|We need a standard way for data producers to make their data available to data consumers, that supports the control needs of the former and the access needs of the latter. And we need it to be interoperable, so anyone who builds access tools and systems can be confident they'll work with all the data out there, and anyone who publishes data can be confident it will work with all the tools out there. +|image::figure2.png[] +_Figure 2: by defining a standard Data Repository API, and adapting tools to use it, every data publisher can now make their data useful to every data consumer_ +|=== + + +[cols="75a,25a"] +|=== + +|We envision a world where: + +* there are many many **data consumers**, working in research and in care, who can use the tools of their choice to access any all data that they have permission to see +* there are many **data access tools** and platforms, supporting discovery, visualization, analysis, and collaboration +* there are many **data repositories**, each with their own policies and characteristics, which can be accessed by a variety of tools +* there are many **data publishing tools** and platforms, supporting a variety of data lifecycles and formats +* there are many many **data producers**, generating data of all types, who can use the tools of their choice to make their data as widely available as is appropriate + +|image::figure3.png[] +_Figure 3: a standard Data Repository API enables an ecosystem of data producers and consumers_ +|=== + +This spec defines a standard **Data Repository Service (DRS) API** (“the yellow box”), to enable that ecosystem of data producers and consumers. Our goal is that all data consumers need to know about a data repo is "here's the DRS endpoint to access it", and all data publishers need to know about tapping into the world of consumption tools is "here's how to tell it where my DRS endpoint lives". + +=== Federation + +The world's biomedical data is controlled by groups with very different policies and restrictions on where their data lives and how it can be accessed. A primary purpose of DRS is to support unified access to disparate and distributed data. (As opposed to the alternative centralized model of "let's just bring all the data into one single data repository”, which would be technically easier but is no more realistic than “let’s just bring all the websites into one single web host”.) + +In a DRS-enabled world, tool builders don’t have to worry about where the data their tools operate on lives -- they can count on DRS to give them access. And tool users only need to know which DRS server is managing the data they need, and whether they have permissions; they don’t have to worry about how to physically get access to, or (worse) make a copy of the data. For example, if I have appropriate permissions, I can run a pooled analysis where I run a single tool across data managed by different DRS servers, potentially in different locations. diff --git a/docs/asciidoc/figure1.png b/docs/asciidoc/figure1.png new file mode 100644 index 000000000..17bafc83e Binary files /dev/null and b/docs/asciidoc/figure1.png differ diff --git a/docs/asciidoc/figure2.png b/docs/asciidoc/figure2.png new file mode 100644 index 000000000..15f5eeade Binary files /dev/null and b/docs/asciidoc/figure2.png differ diff --git a/docs/asciidoc/figure3.png b/docs/asciidoc/figure3.png new file mode 100644 index 000000000..062f0b74b Binary files /dev/null and b/docs/asciidoc/figure3.png differ diff --git a/docs/asciidoc/front_matter.adoc b/docs/asciidoc/front_matter.adoc new file mode 100644 index 000000000..48c3cb888 --- /dev/null +++ b/docs/asciidoc/front_matter.adoc @@ -0,0 +1,46 @@ +== Introduction + +The Data Repository Service (DRS) API provides a generic interface to data repositories so data consumers, including workflow systems, can access data in a single, standard way regardless of where it's stored and how it's managed. This document describes the DRS API and provides details on the specific endpoints, request formats, and response. It is intended for developers of DRS-compatible services and of clients that will call these DRS services. + +The primary functionality of DRS is to map a logical ID to a means for physically retrieving the data represented by the ID. The sections below describe the characteristics of those IDs, the types of data supported, and how the mapping works. + +**NOTE**: this document represents a work in progress towards DRS 1.0.0. It may not be fully in sync +with the OpenAPI schema since both are being worked on. The 0.0.1 release represents the +schema as it existed at the time of the transition from the DOS to DRS name and is subject to +change as we evolve it to a DRS 1.0.0. + +== DRS API Principles + +=== DRS IDs + +Each implementation of DRS can choose its own id scheme, as long as it follows these guidelines: + +* DRS IDs are URL-safe text strings made up of alphanumeric characters and any of [.-_/] +* One DRS ID MUST always return the same object data (or, in the case of a collection, the same set of objects). This constraint aids with reproducibility. +* DRS does NOT support semantics around multiple versions of an object. (For example, there’s no notion of “get latest version” or “list all versions” in DRS v1.) Individual implementation MAY choose an ID scheme that includes version hints. +* DRS implementations MAY have more than one ID that maps to the same object. + +=== DRS Datatypes + +DRS v1 supports two datatypes: + +* Blobs -- these are file-like objects +* Collections -- these are sets of other DRS objects (either Blobs or Collections) + +=== Read-only + +DRS v1 is a read-only API. We expect that each implementation will define its own mechanisms and interfaces (graphical and/or programmatic) for adding and updating data. + +=== URI convention (WORK IN PROGRESS) + +For convenience, we define a recommended syntax for fully referencing DRS-accessible objects. Strings of the form drs:/// mean “make a DRS call to the HTTP address at , passing in the DRS id , to retrieve the object”. For example, these strings are useful when passing objects to a WES server for processing. + +=== Standards + +The DRS API specification is written in OpenAPI and embodies a RESTful service philosophy. It uses JSON in requests and responses and standard HTTP/HTTPS for information transport. + +== Authorization & Authentication (WORK IN PROGRESS) + +Users must supply credentials that establish their identity and authorization in order to use a DRS endpoint. We recommend that DRS implementations use an OAuth2 https://oauth.net/2/bearer-tokens/[bearer token], although they can choose other mechanisms if appropriate. DRS callers can use the `auth_instructions_url` from the https://ga4gh.github.io/data-repository-service-schemas/#/DataRepositoryService/GetServiceInfo[service-info endpoint] to learn how to obtain and use a bearer token for a particular implementation. + +The DRS implementation is responsible for checking that a user is authorized to submit requests. The particular authorization policy is up to the DRS implementer. diff --git a/docs/asciidoc/index.adoc b/docs/asciidoc/index.adoc new file mode 100644 index 000000000..bfcac0c61 --- /dev/null +++ b/docs/asciidoc/index.adoc @@ -0,0 +1,5 @@ +include::{generated}/overview.adoc[] +include::front_matter.adoc[] +include::{generated}/paths.adoc[] +include::{generated}/definitions.adoc[] +include::back_matter.adoc[] diff --git a/docs/html5/index.html b/docs/html5/index.html new file mode 100644 index 000000000..d31d49033 --- /dev/null +++ b/docs/html5/index.html @@ -0,0 +1,1709 @@ + + + + + + + +Workflow Execution Service + + + + + +
+
+

1. Overview

+
+
+

1.1. Version information

+
+

Version : 0.3.0

+
+
+
+

1.2. URI scheme

+
+

BasePath : /ga4gh/wes/v1
+Schemes : HTTP, HTTPS

+
+
+
+

1.3. Consumes

+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+

1.4. Produces

+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
+
+

2. Executive Summary

+
+
+

An executive summary summarising the major points of the document. To be added for issue #37.

+
+
+
+
+

3. Introduction

+
+
+

Introduction setting the document’s scope.

+
+
+
+
+

4. Standards

+
+
+

Standards incorporated (?)

+
+
+

4.1. Authorization & Authentication

+
+

Security issues around authentication and authorization need to be resolved and added into the specification. To be added for issue #36.

+
+
+
+
+
+

5. Paths

+
+
+

5.1. Run a workflow.

+
+
+
POST /runs
+
+
+
+

5.1.1. Description

+
+

This endpoint creates a new workflow run and
+returns the workflow ID to monitor its progress.

+
+
+

The request may upload files that are required to execute the
+workflow identified as workflow_attachment. The parts
+supplied in workflow_attachment may include the primary
+workflow, tools imported by the workflow, other files
+referenced by the workflow, or files which are part of the
+input. The implementation should stage these files to a
+temporary directory and execute the workflow from there.
+These parts must have a Content-Disposition header with a
+"filename" provided for each part. Filenames may include
+subdirectories, but must not include references to parent
+directories with '..', implementations should guard against
+maliciously constructed filenames.

+
+
+

The workflow_url is either an absolute URL to a workflow
+file that is accessible by the WES endpoint, or a relative URL
+corresponding to one of the files attached using
+workflow_attachment.

+
+
+

The workflow_params JSON object specifies input parameters,
+such as input files. The exact format of the JSON object
+depends on the conventions of the workflow language being
+used. Input files should either be absolute URLs, or relative
+URLs corresponding to files uploaded using
+workflow_attachment. The WES endpoint must understand and
+be able to access URLs supplied in the input. This is
+implementation specific.

+
+
+

See documentation for WorkflowRequest for detail about other
+fields.

+
+
+
+

5.1.2. Parameters

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameSchema

FormData

tags
+optional

string (application/json)

FormData

workflow_attachment
+optional

< string (binary) > array

FormData

workflow_engine_parameters
+optional

string (application/json)

FormData

workflow_params
+optional

string (application/json)

FormData

workflow_type
+optional

string

FormData

workflow_type_version
+optional

string

FormData

workflow_url
+optional

string

+
+
+

5.1.3. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

RunId

400

The request is malformed.

ErrorResponse

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.1.4. Consumes

+
+
    +
  • +

    multipart/form-data

    +
  • +
+
+
+
+

5.1.5. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+

5.2. List the workflow runs.

+
+
+
GET /runs
+
+
+
+

5.2.1. Description

+
+

This should be provided in a stable
+ordering, however the ordering of this list is implementation
+dependent. When paging through the list, the client should
+not make assumptions about live updates, but should assume the
+contents of the list reflect the workflow list at the moment
+that the first page is requested. To monitor a specific
+workflow run, use GetRunStatus or GetRunLog.

+
+
+
+

5.2.2. Parameters

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

page_size
+optional

OPTIONAL
+The preferred number of workflow runs to return in a page.
+If not provided, the implementation should use a default page size.
+The implementation must not return more items
+than "page_size", but it may return fewer. Clients should
+not assume that if fewer than "page_size" items is
+returned that all items have been returned. The
+availability of additional pages is indicated by the value
+of "next_page_token" in the response.

integer (int64)

Query

page_token
+optional

OPTIONAL
+Token to use to indicate where to start getting results. If unspecified, return the first
+page of results.

string

+
+
+

5.2.3. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

RunListResponse

400

The request is malformed.

ErrorResponse

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.2.4. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+

5.3. Get detailed info about a workflow run.

+
+
+
GET /runs/{run_id}
+
+
+
+

5.3.1. Parameters

+ +++++ + + + + + + + + + + + + + + +
TypeNameSchema

Path

run_id
+required

string

+
+
+

5.3.2. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

RunLog

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

404

The requested workflow run not found.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.3.3. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+

5.4. Cancel a running workflow.

+
+
+
DELETE /runs/{run_id}
+
+
+
+

5.4.1. Parameters

+ +++++ + + + + + + + + + + + + + + +
TypeNameSchema

Path

run_id
+required

string

+
+
+

5.4.2. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

RunId

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

404

The requested workflow run wasn’t found.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.4.3. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+

5.5. Get quick status info about a workflow run.

+
+
+
GET /runs/{run_id}/status
+
+
+
+

5.5.1. Parameters

+ +++++ + + + + + + + + + + + + + + +
TypeNameSchema

Path

run_id
+required

string

+
+
+

5.5.2. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

RunStatus

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

404

The requested workflow run wasn’t found.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.5.3. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+

5.6. Get information about Workflow Execution Service.

+
+
+
GET /service-info
+
+
+
+

5.6.1. Description

+
+

May include information related (but not limited to) the workflow descriptor formats, versions supported, the WES API versions supported, and information about general the service availability.
+x-swagger-router-controller: ga4gh.wes.server

+
+
+
+

5.6.2. Responses

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

ServiceInfo

400

The request is malformed.

ErrorResponse

401

The request is unauthorized.

ErrorResponse

403

The requester is not authorized to perform this action.

ErrorResponse

500

An unexpected error occurred.

ErrorResponse

+
+
+

5.6.3. Tags

+
+
    +
  • +

    WorkflowExecutionService

    +
  • +
+
+
+
+
+
+
+

6. Definitions

+
+
+

6.1. DefaultWorkflowEngineParameter

+
+

A message that allows one to describe default parameters for a workflow
+engine.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

default_value
+optional

The stringified version of the default parameter. e.g. "2.45".

string

type
+optional

Describes the type of the parameter, e.g. float.

string

+
+
+

6.2. ErrorResponse

+
+

An object that can optionally include information about the error.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

msg
+optional

A detailed error message.

string

status_code
+optional

The integer representing the HTTP status code (e.g. 200, 404).

integer

+
+
+

6.3. Log

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

cmd
+optional

< string > array

end_time
+optional

string

exit_code
+optional

integer (int32)

name
+optional

string

start_time
+optional

string

stderr
+optional

string

stdout
+optional

string

+
+
+

6.4. RunId

+ ++++ + + + + + + + + + + + + +
NameSchema

run_id
+optional

string

+
+
+

6.5. RunListResponse

+
+

The service will return a RunListResponse when receiving a successful RunListRequest.

+
+ +++++ + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

next_page_token
+optional

A token which may be supplied as "page_token" in workflow run list request to get the next page
+of results. An empty string indicates there are no more items to return.

string

runs
+optional

A list of workflow runs that the service has executed or is executing.

< RunStatus > array

+
+
+

6.6. RunLog

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

outputs
+optional

WesObject

request
+optional

The original request message used to initiate this execution.

RunRequest

run_id
+optional

string

run_log
+optional

Log

state
+optional

State

task_logs
+optional

< Log > array

+
+
+

6.7. RunRequest

+
+

To execute a workflow, send a run request including all the details needed to begin downloading
+and executing a given workflow.

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

tags
+optional

< string, string > map

workflow_engine_parameters
+optional

OPTIONAL
+Additional parameters can be sent to the workflow engine using this field. Default values
+for these parameters are provided at the ServiceInfo endpoint.

< string, string > map

workflow_params
+optional

REQUIRED
+The workflow run parameterization document (typically a JSON file), includes all parameterizations for the run
+including input and output file locations.

WesObject

workflow_type
+optional

string

workflow_type_version
+optional

string

workflow_url
+optional

REQUIRED
+The workflow CWL or WDL document.
+When workflow attachments files are provided, the workflow_url may be a relative path
+corresponding to one of the attachments.

string

+
+
+

6.8. RunStatus

+
+

Small description of a workflow run, returned by server during listing

+
+ ++++ + + + + + + + + + + + + + + + + +
NameSchema

run_id
+required

string

state
+optional

State

+
+
+

6.9. ServiceInfo

+
+

A message containing useful information about the running service, including supported versions and
+default settings.

+
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

auth_instructions_url
+optional

A web page URL with information about how to get an
+authorization token necessary to use a specific endpoint.

string

contact_info
+optional

An email address or web page URL with contact information
+for the operator of a specific WES endpoint. Users of the
+endpoint should use this to report problems or security
+vulnerabilities.

string

default_workflow_engine_parameters
+optional

Each workflow engine can present additional parameters that can be sent to the
+workflow engine. This message will list the default values, and their types for each
+workflow engine.

< DefaultWorkflowEngineParameter > array

supported_filesystem_protocols
+optional

The filesystem protocols supported by this service, currently these may include common
+protocols such as 'http', 'https', 'sftp', 's3', 'gs', 'file', 'synapse', or others as
+supported by this service.

< string > array

supported_wes_versions
+optional

< string > array

system_state_counts
+optional

The system statistics, key is the statistic, value is the count of runs in that state.
+See the State enum for the possible keys.

< string, integer (int64) > map

tags
+optional

< string, string > map

workflow_engine_versions
+optional

< string, string > map

workflow_type_versions
+optional

< string, WorkflowTypeVersion > map

+
+
+

6.10. State

+
+
    +
  • +

    UNKNOWN: The state of the task is unknown.

    +
  • +
+
+
+

This provides a safe default for messages where this field is missing,
+for example, so that a missing field does not accidentally imply that
+the state is QUEUED.
+ - QUEUED: The task is queued.
+ - INITIALIZING: The task has been assigned to a worker and is currently preparing to run.
+For example, the worker may be turning on, downloading input files, etc.
+ - RUNNING: The task is running. Input files are downloaded and the first Executor
+has been started.
+ - PAUSED: The task is paused.

+
+
+

An implementation may have the ability to pause a task, but this is not required.
+ - COMPLETE: The task has completed running. Executors have exited without error
+and output files have been successfully uploaded.
+ - EXECUTOR_ERROR: The task encountered an error in one of the Executor processes. Generally,
+this means that an Executor exited with a non-zero exit code.
+ - SYSTEM_ERROR: The task was stopped due to a system error, but not from an Executor,
+for example an upload failed due to network issues, the worker’s ran out
+of disk space, etc.
+ - CANCELED: The task was canceled by the user.

+
+
+

Type : enum (UNKNOWN, QUEUED, INITIALIZING, RUNNING, PAUSED, COMPLETE, EXECUTOR_ERROR, SYSTEM_ERROR, CANCELED)

+
+
+
+

6.11. WesObject

+
+

An arbitrary structured object.

+
+
+

Type : object

+
+
+
+

6.12. WorkflowTypeVersion

+
+

Available workflow types supported by a given instance of the service.

+
+ +++++ + + + + + + + + + + + + + + +
NameDescriptionSchema

workflow_type_version
+optional

an array of one or more acceptable types for the Workflow Type. For
+example, to send a base64 encoded WDL gzip, one could would offer
+"base64_wdl1.0_gzip". By setting this value, and the path of the main WDL
+to be executed in the workflow_url to "main.wdl" in the RunRequest.

< string > array

+
+
+
+
+ + + \ No newline at end of file diff --git a/docs/pdf/index.pdf b/docs/pdf/index.pdf new file mode 100644 index 000000000..b8c9b55eb Binary files /dev/null and b/docs/pdf/index.pdf differ diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst index a602d1fbb..b1e2c29de 100644 --- a/docs/source/quickstart.rst +++ b/docs/source/quickstart.rst @@ -14,7 +14,7 @@ work in a virtualenv:: Then, install from PyPI:: - $ pip install ga4gh-dos-schemas + $ pip install ga4gh-drs-schemas Or, to install from source:: @@ -27,11 +27,11 @@ Running the client and server There's a handy command line hook for the server:: - $ ga4gh_dos_server + $ ga4gh_drs_server and for the client:: - $ ga4gh_dos_demo + $ ga4gh_drs_demo (The client doesn't do anything yet but will soon.) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 000000000..1eb19f159 Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 000000000..fc700a5dc --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Thu Jul 20 10:52:24 CEST 2017 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-3.5-bin.zip diff --git a/gradlew b/gradlew new file mode 100755 index 000000000..4453ccea3 --- /dev/null +++ b/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save ( ) { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/gulpfile.js b/gulpfile.js new file mode 100644 index 000000000..5ddb1d034 --- /dev/null +++ b/gulpfile.js @@ -0,0 +1,50 @@ +var gulp = require('gulp'); +var util = require('gulp-util') +var gulpConnect = require('gulp-connect'); +var connect = require('connect'); +var cors = require('cors'); +var path = require('path'); +var exec = require('child_process').exec; +var portfinder = require('portfinder'); +var swaggerRepo = require('swagger-repo'); + +var DIST_DIR = 'web_deploy'; + +gulp.task('serve', ['build', 'watch', 'edit'], function() { + portfinder.getPort({port: 3000}, function (err, port) { + gulpConnect.server({ + root: [DIST_DIR], + livereload: true, + port: port, + middleware: function (gulpConnect, opt) { + return [ + cors() + ] + } + }); + }); +}); + +gulp.task('edit', function() { + portfinder.getPort({port: 5000}, function (err, port) { + var app = connect(); + app.use(swaggerRepo.swaggerEditorMiddleware()); + app.listen(port); + util.log(util.colors.green('swagger-editor started http://localhost:' + port)); + }); +}); + +gulp.task('build', function (cb) { + exec('npm run build', function (err, stdout, stderr) { + console.log(stderr); + cb(err); + }); +}); + +gulp.task('reload', ['build'], function () { + gulp.src(DIST_DIR).pipe(gulpConnect.reload()) +}); + +gulp.task('watch', function () { + gulp.watch(['spec/**/*', 'web/**/*'], ['reload']); +}); diff --git a/openapi/data_object_service.smartapi.yaml.part b/openapi/data_repository_service.smartapi.yaml.part similarity index 63% rename from openapi/data_object_service.smartapi.yaml.part rename to openapi/data_repository_service.smartapi.yaml.part index 926c6a829..04d8306a1 100644 --- a/openapi/data_object_service.smartapi.yaml.part +++ b/openapi/data_repository_service.smartapi.yaml.part @@ -1,13 +1,13 @@ # To generate the SmartAPI file, we use swagger2openapi to generate -# data_object_service.openapi.yaml, then merge that file with this file -# to create data_object_service.smartapi.yaml. -# (data_object_service.smartapi.yaml should not be edited directly.) +# data_repository_service.openapi.yaml, then merge that file with this file +# to create data_repository_service.smartapi.yaml. +# (data_repository_service.smartapi.yaml should not be edited directly.) servers: - url: https://{host}:{port}/{basePath} description: The production API server variables: host: - default: dos.example.org + default: drs.example.org description: The host the service is serving from. port: enum: @@ -15,9 +15,9 @@ servers: default: '443' basePath: # The default base includes a version in the path. - default: ga4gh/dos/v1 + default: ga4gh/drs/v1 info: x-implementationLanguage: en termsOfService: https://www.ga4gh.org/policies/termsandconditions.html tags: - - name: NIHdatacommons \ No newline at end of file + - name: NIHdatacommons diff --git a/openapi/data_object_service.swagger.yaml b/openapi/data_repository_service.swagger.yaml similarity index 82% rename from openapi/data_object_service.swagger.yaml rename to openapi/data_repository_service.swagger.yaml index 2b9fd3342..7e90821ab 100644 --- a/openapi/data_object_service.swagger.yaml +++ b/openapi/data_repository_service.swagger.yaml @@ -1,16 +1,15 @@ swagger: '2.0' -basePath: '/ga4gh/dos/v1' +basePath: '/ga4gh/drs/v1' info: - title: Data Object Service - version: 0.5.0 - description: https://github.com/ga4gh/data-object-service-schemas + title: Data Repository Service + version: 0.0.1 + description: https://github.com/ga4gh/data-repository-service-schemas contact: - name: David Steinberg - email: davidcs@ucsc.edu - x-role: responsible developer + name: GA4GH Cloud Work Stream + email: ga4gh-cloud@ga4gh.org license: name: Apache 2.0 - url: https://raw.githubusercontent.com/ga4gh/data-object-service-schemas/master/LICENSE + url: https://raw.githubusercontent.com/ga4gh/data-repository-service-schemas/master/LICENSE schemes: - https - http @@ -29,17 +28,17 @@ paths: schema: $ref: '#/definitions/ServiceInfoResponse' tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - /databundles: + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + /bundles: post: summary: Create a new Data Bundle - operationId: CreateDataBundle + operationId: CreateBundle responses: '200': description: The Data Bundle was successfully created. schema: - $ref: '#/definitions/CreateDataBundleResponse' + $ref: '#/definitions/CreateBundleResponse' '400': description: The request is malformed. schema: @@ -61,18 +60,18 @@ paths: in: body required: true schema: - $ref: '#/definitions/CreateDataBundleRequest' + $ref: '#/definitions/CreateBundleRequest' tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server get: summary: List the Data Bundles - operationId: ListDataBundles + operationId: ListBundles responses: '200': description: 'Successfully listed Data Bundles.' schema: - $ref: '#/definitions/ListDataBundlesResponse' + $ref: '#/definitions/ListBundlesResponse' '400': description: The request is malformed. schema: @@ -133,17 +132,17 @@ paths: To get the next page of results, set this parameter to the value of `next_page_token` from the previous response. tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - '/databundles/{data_bundle_id}': + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + '/bundles/{bundle_id}': get: summary: Retrieve a Data Bundle - operationId: GetDataBundle + operationId: GetBundle responses: '200': description: Successfully found the Data Bundle. schema: - $ref: '#/definitions/GetDataBundleResponse' + $ref: '#/definitions/GetBundleResponse' '400': description: The request is malformed. schema: @@ -165,7 +164,7 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_bundle_id + - name: bundle_id in: path required: true type: string @@ -177,32 +176,32 @@ paths: in: query type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server delete: summary: Delete a Data Bundle - operationId: DeleteDataBundle + operationId: DeleteBundle responses: '200': description: '' schema: - $ref: '#/definitions/DeleteDataBundleResponse' + $ref: '#/definitions/DeleteBundleResponse' parameters: - - name: data_bundle_id + - name: bundle_id in: path required: true type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server put: summary: Update a Data Bundle - operationId: UpdateDataBundle + operationId: UpdateBundle responses: '200': description: The Data Bundle was updated successfully. schema: - $ref: '#/definitions/UpdateDataBundleResponse' + $ref: '#/definitions/UpdateBundleResponse' '400': description: The request is malformed. schema: @@ -224,7 +223,7 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_bundle_id + - name: bundle_id in: path required: true type: string @@ -232,21 +231,21 @@ paths: - name: body in: body required: true - description: The new content for the Data Bundle identified by the given data_bundle_id. If the ID specified in the request body is different than that specified in the path, the Data Bundle's ID will be replaced with the one in the request body. + description: The new content for the Data Bundle identified by the given bundle_id. If the ID specified in the request body is different than that specified in the path, the Data Bundle's ID will be replaced with the one in the request body. schema: - $ref: '#/definitions/UpdateDataBundleRequest' + $ref: '#/definitions/UpdateBundleRequest' tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - '/databundles/{data_bundle_id}/versions': + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + '/bundles/{bundle_id}/versions': get: - operationId: GetDataBundleVersions + operationId: GetBundleVersions summary: Retrieve all versions of a Data Bundle responses: '200': description: The versions for the Data Bundle were found successfully. schema: - $ref: '#/definitions/GetDataBundleVersionsResponse' + $ref: '#/definitions/GetBundleVersionsResponse' '400': description: The request is malformed. schema: @@ -268,22 +267,22 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_bundle_id + - name: bundle_id in: path required: true type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - /dataobjects: + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + /objects: post: summary: Make a new Data Object - operationId: CreateDataObject + operationId: CreateObject responses: '200': description: Successfully created the Data Object. schema: - $ref: '#/definitions/CreateDataObjectResponse' + $ref: '#/definitions/CreateObjectResponse' '400': description: The request is malformed. schema: @@ -308,18 +307,18 @@ paths: The Data Object to be created. The ID scheme is left up to the implementor but should be unique to the server instance. schema: - $ref: '#/definitions/CreateDataObjectRequest' + $ref: '#/definitions/CreateObjectRequest' tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server get: summary: List the Data Objects - operationId: ListDataObjects + operationId: ListObjects responses: '200': description: The Data Objects were listed successfully. schema: - $ref: '#/definitions/ListDataObjectsResponse' + $ref: '#/definitions/ListObjectsResponse' '400': description: The request is malformed. schema: @@ -386,17 +385,17 @@ paths: To get the next page of results, set this parameter to the value of `next_page_token` from the previous response. tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - '/dataobjects/{data_object_id}': + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + '/objects/{object_id}': get: summary: Retrieve a Data Object - operationId: GetDataObject + operationId: GetObject responses: '200': description: The Data Object was found successfully. schema: - $ref: '#/definitions/GetDataObjectResponse' + $ref: '#/definitions/GetObjectResponse' '400': description: The request is malformed. schema: @@ -418,7 +417,7 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_object_id + - name: object_id in: path required: true type: string @@ -429,16 +428,16 @@ paths: in: query type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server delete: summary: Delete a Data Object index entry - operationId: DeleteDataObject + operationId: DeleteObject responses: '200': description: 'The Data Object was deleted successfully.' schema: - $ref: '#/definitions/DeleteDataObjectResponse' + $ref: '#/definitions/DeleteObjectResponse' '400': description: The request is malformed. schema: @@ -460,21 +459,21 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_object_id + - name: object_id in: path required: true type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server put: summary: Update a Data Object - operationId: UpdateDataObject + operationId: UpdateObject responses: '200': description: The Data Object was successfully updated. schema: - $ref: '#/definitions/UpdateDataObjectResponse' + $ref: '#/definitions/UpdateObjectResponse' '400': description: The request is malformed. schema: @@ -496,7 +495,7 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_object_id + - name: object_id in: path required: true type: string @@ -504,21 +503,21 @@ paths: - name: body in: body required: true - description: The new Data Object for the given data_object_id. If the ID specified in the request body is different than that specified in the path, the Data Object's ID will be replaced with the one in the request body. + description: The new Data Object for the given object_id. If the ID specified in the request body is different than that specified in the path, the Data Object's ID will be replaced with the one in the request body. schema: - $ref: '#/definitions/UpdateDataObjectRequest' + $ref: '#/definitions/UpdateObjectRequest' tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server - '/dataobjects/{data_object_id}/versions': + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server + '/objects/{object_id}/versions': get: summary: Retrieve all versions of a Data Object - operationId: GetDataObjectVersions + operationId: GetObjectVersions responses: '200': description: The versions for the Data Object were returned successfully. schema: - $ref: '#/definitions/GetDataObjectVersionsResponse' + $ref: '#/definitions/GetObjectVersionsResponse' '400': description: The request is malformed. schema: @@ -540,13 +539,13 @@ paths: schema: $ref: '#/definitions/ErrorResponse' parameters: - - name: data_object_id + - name: object_id in: path required: true type: string tags: - - DataObjectService - x-swagger-router-controller: ga4gh.dos.server + - DataRepositoryService + x-swagger-router-controller: ga4gh.drs.server definitions: SystemMetadata: type: object @@ -570,13 +569,13 @@ definitions: The auth standard being used to make data available. For example, 'OAuth2.0'. auth_url: type: string - description: |- + description: |- The URL where the auth service is located, for example, a URL to get an OAuth token. additionalProperties: true description: |- OPTIONAL - A set of key-value pairs that represent sufficient metadata to be granted + A set of key-value pairs that represent sufficient metadata to be granted access to a resource. It may be helpful to provide details about a specific provider, for example. Checksum: @@ -598,45 +597,45 @@ definitions: multipart-md5 # multipart uploads provide a specialized tag in S3 sha256 sha512 - CreateDataBundleRequest: + CreateBundleRequest: type: object properties: - data_bundle: - $ref: '#/definitions/DataBundle' - CreateDataBundleResponse: + bundle: + $ref: '#/definitions/Bundle' + CreateBundleResponse: type: object - required: ['data_bundle_id'] + required: ['bundle_id'] properties: - data_bundle_id: + bundle_id: type: string description: |- The identifier of the Data Bundle created. - CreateDataObjectRequest: + CreateObjectRequest: type: object - required: ['data_object'] + required: ['object'] properties: - data_object: - $ref: '#/definitions/DataObject' + object: + $ref: '#/definitions/Object' description: |- The Data Object one would like to index. One must provide any aliases - and URLs to this file when sending the CreateDataObjectRequest. It is up + and URLs to this file when sending the CreateObjectRequest. It is up to implementations to validate that the Data Object is available from the provided URLs. - CreateDataObjectResponse: + CreateObjectResponse: type: object properties: - data_object_id: + object_id: type: string description: The ID of the created Data Object. - DataBundle: + Bundle: type: object - required: ['id', 'data_object_ids', 'created', 'updated', 'version', 'checksums'] + required: ['id', 'object_ids', 'created', 'updated', 'version', 'checksums'] properties: id: type: string description: |- An identifier, unique to this Data Bundle - data_object_ids: + object_ids: type: array items: type: string @@ -681,7 +680,7 @@ definitions: $ref: '#/definitions/SystemMetadata' user_metadata: $ref: '#/definitions/UserMetadata' - DataObject: + Object: type: object required: ['id', 'size', 'created', 'checksums'] properties: @@ -743,53 +742,53 @@ definitions: These aliases can be used to represent the Data Object's location in a directory (e.g. "bucket/folder/file.name") to make Data Objects more discoverable. They might also be used to represent - DeleteDataBundleResponse: + DeleteBundleResponse: type: object properties: - data_bundle_id: + bundle_id: type: string - DeleteDataObjectResponse: + DeleteObjectResponse: type: object - required: ['data_object_id'] + required: ['object_id'] properties: - data_object_id: + object_id: type: string description: |- The identifier of the Data Object deleted. - GetDataBundleResponse: + GetBundleResponse: type: object properties: - data_bundle: - $ref: '#/definitions/DataBundle' - GetDataBundleVersionsResponse: + bundle: + $ref: '#/definitions/Bundle' + GetBundleVersionsResponse: type: object - required: ['data_bundles'] + required: ['bundles'] properties: - data_bundles: + bundles: type: array items: - $ref: '#/definitions/DataBundle' + $ref: '#/definitions/Bundle' description: |- - All versions of the Data Bundles that match the GetDataBundleVersions + All versions of the Data Bundles that match the GetBundleVersions request. - GetDataObjectResponse: + GetObjectResponse: type: object - required: ['data_object'] + required: ['object'] properties: - data_object: - $ref: '#/definitions/DataObject' - GetDataObjectVersionsResponse: + object: + $ref: '#/definitions/Object' + GetObjectVersionsResponse: type: object - required: ['data_objects'] + required: ['objects'] properties: - data_objects: + objects: type: array items: - $ref: '#/definitions/DataObject' + $ref: '#/definitions/Object' description: |- - All versions of the Data Objects that match the GetDataObjectVersions + All versions of the Data Objects that match the GetObjectVersions request. - ListDataBundlesRequest: + ListBundlesRequest: description: |- Only return Data Bundles that match all of the request parameters. A page_size and page_token are provided for retrieving a large number of @@ -828,16 +827,16 @@ definitions: The continuation token, which is used to page through large result sets. To get the next page of results, set this parameter to the value of `next_page_token` from the previous response. - ListDataBundlesResponse: + ListBundlesResponse: type: object description: |- A list of Data Bundles matching the request parameters and a continuation token that can be used to retrieve more results. properties: - data_bundles: + bundles: type: array items: - $ref: '#/definitions/DataBundle' + $ref: '#/definitions/Bundle' description: The list of Data Bundles. next_page_token: type: string @@ -845,7 +844,7 @@ definitions: The continuation token, which is used to page through large result sets. Provide this value in a subsequent request to return the next page of results. This field will be empty if there aren't any additional results. - ListDataObjectsRequest: + ListObjectsRequest: type: object properties: alias: @@ -887,13 +886,13 @@ definitions: description: |- Allows a requester to list and filter Data Objects. Only Data Objects matching all of the requested parameters will be returned. - ListDataObjectsResponse: + ListObjectsResponse: type: object properties: - data_objects: + objects: type: array items: - $ref: '#/definitions/DataObject' + $ref: '#/definitions/Object' description: The list of Data Objects. next_page_token: type: string @@ -918,31 +917,31 @@ definitions: $ref: '#/definitions/UserMetadata' authorization_metadata: $ref: '#/definitions/AuthorizationMetadata' - UpdateDataBundleRequest: + UpdateBundleRequest: type: object - required: ['data_bundle'] + required: ['bundle'] properties: - data_bundle: - $ref: '#/definitions/DataBundle' - UpdateDataBundleResponse: + bundle: + $ref: '#/definitions/Bundle' + UpdateBundleResponse: type: object - required: ['data_bundle_id'] + required: ['bundle_id'] properties: - data_bundle_id: + bundle_id: type: string description: |- The identifier of the Data Bundle updated. - UpdateDataObjectRequest: + UpdateObjectRequest: type: object - required: ['data_object'] + required: ['object'] properties: - data_object: - $ref: '#/definitions/DataObject' - UpdateDataObjectResponse: + object: + $ref: '#/definitions/Object' + UpdateObjectResponse: type: object - required: ['data_object_id'] + required: ['object_id'] properties: - data_object_id: + object_id: type: string description: |- The identifier of the Data Object updated. diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..2dc824893 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,2489 @@ +{ + "name": "Data-Repository-Service-openapi-spec", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@types/babel-types": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/babel-types/-/babel-types-7.0.4.tgz", + "integrity": "sha512-WiZhq3SVJHFRgRYLXvpf65XnV6ipVHhnNaNvE8yCimejrGglkg38kEj0JcizqwSHxmPSjcTlig/6JouxLGEhGw==" + }, + "@types/babylon": { + "version": "6.16.3", + "resolved": "https://registry.npmjs.org/@types/babylon/-/babylon-6.16.3.tgz", + "integrity": "sha512-lyJ8sW1PbY3uwuvpOBZ9zMYKshMnQpXmeDHh8dj9j2nJm/xrW0FgB5gLSYOArj5X0IfaXnmhFoJnhS4KbqIMug==", + "requires": { + "@types/babel-types": "*" + } + }, + "acorn": { + "version": "3.3.0", + "resolved": "http://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", + "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=" + }, + "acorn-globals": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-3.1.0.tgz", + "integrity": "sha1-/YJw9x+7SZawBPqIDuXUZXOnMb8=", + "requires": { + "acorn": "^4.0.4" + }, + "dependencies": { + "acorn": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", + "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" + } + } + }, + "align-text": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", + "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "requires": { + "kind-of": "^3.0.2", + "longest": "^1.0.1", + "repeat-string": "^1.5.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "array-uniq": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", + "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=" + }, + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + }, + "babel-runtime": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", + "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", + "requires": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + } + }, + "babel-types": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", + "integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=", + "requires": { + "babel-runtime": "^6.26.0", + "esutils": "^2.0.2", + "lodash": "^4.17.4", + "to-fast-properties": "^1.0.3" + } + }, + "babylon": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", + "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==" + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "bower": { + "version": "1.8.4", + "resolved": "https://registry.npmjs.org/bower/-/bower-1.8.4.tgz", + "integrity": "sha1-54dqB23rgTf30GUl3F6MZtuC8oo=" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "center-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", + "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", + "requires": { + "align-text": "^0.1.3", + "lazy-cache": "^1.0.3" + } + }, + "character-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/character-parser/-/character-parser-2.2.0.tgz", + "integrity": "sha1-x84o821LzZdE5f/CxfzeHHMmH8A=", + "requires": { + "is-regex": "^1.0.3" + } + }, + "clean-css": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.1.tgz", + "integrity": "sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g==", + "requires": { + "source-map": "~0.6.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + }, + "colors": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/colors/-/colors-0.5.1.tgz", + "integrity": "sha1-fQAj6usVTo7p/Oddy5I9DtFmd3Q=" + }, + "commander": { + "version": "2.17.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", + "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" + }, + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "connect": { + "version": "3.6.6", + "resolved": "https://registry.npmjs.org/connect/-/connect-3.6.6.tgz", + "integrity": "sha1-Ce/2xVr3I24TcTWnJXSFi2eG9SQ=", + "requires": { + "debug": "2.6.9", + "finalhandler": "1.1.0", + "parseurl": "~1.3.2", + "utils-merge": "1.0.1" + } + }, + "constantinople": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/constantinople/-/constantinople-3.1.2.tgz", + "integrity": "sha512-yePcBqEFhLOqSBtwYOGGS1exHo/s1xjekXiinh4itpNQGCu4KA1euPh1fg07N2wMITZXQkBz75Ntdt1ctGZouw==", + "requires": { + "@types/babel-types": "^7.0.0", + "@types/babylon": "^6.16.2", + "babel-types": "^6.26.0", + "babylon": "^6.18.0" + } + }, + "content-disposition": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", + "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "cookiejar": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.2.tgz", + "integrity": "sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA==" + }, + "core-js": { + "version": "2.5.7", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.5.7.tgz", + "integrity": "sha512-RszJCAxg/PP6uzXVXL6BsxSXx/B05oJAQ2vkJRjyjrEcNVycaqOmNb5OTxZPE3xa5gwZduqza6L9JOCenh/Ecw==" + }, + "cors": { + "version": "2.8.4", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.4.tgz", + "integrity": "sha1-K9OB8usgECAQXNUOpZ2mMJBpRoY=", + "requires": { + "object-assign": "^4", + "vary": "^1" + } + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, + "deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=" + }, + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "doctypes": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/doctypes/-/doctypes-1.1.0.tgz", + "integrity": "sha1-6oCxBqh1OHdOijpKWv4pPeSJ4Kk=" + }, + "ebnf-parser": { + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/ebnf-parser/-/ebnf-parser-0.1.10.tgz", + "integrity": "sha1-zR9rpHfFY4xAyX7ZtXLbW6tdgzE=" + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + }, + "entities": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.1.tgz", + "integrity": "sha1-blwtClYhtdra7O+AuQ7ftc13cvA=" + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "estraverse": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-0.0.4.tgz", + "integrity": "sha1-AaCTLf7ldGhKWYr1pnw7+bZCjbI=" + }, + "esutils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", + "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=" + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + }, + "finalhandler": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.0.tgz", + "integrity": "sha1-zgtoVbRYU+eRsvzGgARtiCU91/U=", + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.1", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.2", + "statuses": "~1.3.1", + "unpipe": "~1.0.0" + } + }, + "foreach": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", + "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=" + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==" + }, + "get-stdin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-5.0.1.tgz", + "integrity": "sha1-Ei4WFZHiH/TFJTAwVpPyDmOTo5g=" + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + }, + "github-markdown": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/github-markdown/-/github-markdown-3.2.0.tgz", + "integrity": "sha512-mtc8/f3DOCoxLi/lVgCpx5I71YxusLiFqgDJZWPcx+wd+fa3z6+hOTmuKhekawq+/4ftwp93EipxDIf5P7a7Vg==", + "requires": { + "get-stdin": "^5.0.1", + "globby": "^6.1.0", + "highlight.js": "^9.12.0", + "markdown-it": "^8.3.1", + "minimist": "^1.2.0", + "pify": "^3.0.0", + "primer-css": "^9.0.0", + "pug": "^2.0.0-rc.2" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + } + } + }, + "github-markdown-css": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/github-markdown-css/-/github-markdown-css-2.10.0.tgz", + "integrity": "sha512-RX5VUC54uX6Lvrm226M9kMzsNeOa81MnKyxb3J0G5KLjyoOySOZgwyKFkUpv6iUhooiUZdogk+OTwQPJ4WttYg==" + }, + "globby": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", + "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", + "requires": { + "array-union": "^1.0.1", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "dependencies": { + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "help": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/help/-/help-3.0.2.tgz", + "integrity": "sha1-luGQ1KCkU7icLLSwWrOOOo+f2t0=" + }, + "highlight.js": { + "version": "9.12.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-9.12.0.tgz", + "integrity": "sha1-5tnb5Xy+/mB1HwKvM2GVhwyQwB4=" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "interpret": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.1.0.tgz", + "integrity": "sha1-ftGxQQxqDg94z5XTuEQMY/eLhhQ=" + }, + "invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + }, + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, + "is-expression": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-expression/-/is-expression-3.0.0.tgz", + "integrity": "sha1-Oayqa+f9HzRx3ELHQW5hwkMXrJ8=", + "requires": { + "acorn": "~4.0.2", + "object-assign": "^4.0.1" + }, + "dependencies": { + "acorn": { + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", + "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" + } + } + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=" + }, + "is-regex": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", + "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", + "requires": { + "has": "^1.0.1" + } + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + }, + "jison-lex": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/jison-lex/-/jison-lex-0.2.1.tgz", + "integrity": "sha1-rEuBXozOUTLrErXfz+jXB7iETf4=", + "requires": { + "lex-parser": "0.1.x", + "nomnom": "1.5.2" + } + }, + "js-base64": { + "version": "2.4.9", + "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.4.9.tgz", + "integrity": "sha512-xcinL3AuDJk7VSzsHgb9DvvIXayBbadtMZ4HFPx8rUszbW1MuNMlwYVC4zzCZ6e1sqZpnNS5ZFYOhXqA39T7LQ==" + }, + "js-stringify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/js-stringify/-/js-stringify-1.0.2.tgz", + "integrity": "sha1-Fzb939lyTyijaCrcYjCufk6Weds=" + }, + "json-pointer": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/json-pointer/-/json-pointer-0.6.0.tgz", + "integrity": "sha1-jlAFUKaqxUZKRzN32leqbMIoKNc=", + "requires": { + "foreach": "^2.0.4" + } + }, + "jstransformer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/jstransformer/-/jstransformer-1.0.0.tgz", + "integrity": "sha1-7Yvwkh4vPx7U1cGkT2hwntJHIsM=", + "requires": { + "is-promise": "^2.0.0", + "promise": "^7.0.1" + } + }, + "lazy-cache": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", + "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=" + }, + "lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "requires": { + "invert-kv": "^1.0.0" + } + }, + "lex-parser": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/lex-parser/-/lex-parser-0.1.4.tgz", + "integrity": "sha1-ZMTwJfF/1Tv7RXY/rrFvAVp0dVA=" + }, + "linkify-it": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.0.3.tgz", + "integrity": "sha1-2UpGSPmxwXnWT6lykSaL22zpQ08=", + "requires": { + "uc.micro": "^1.0.1" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + }, + "dependencies": { + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + } + } + }, + "lodash": { + "version": "4.17.11", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", + "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" + }, + "lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" + }, + "lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=" + }, + "longest": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", + "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=" + }, + "lru-cache": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.3.tgz", + "integrity": "sha512-fFEhvcgzuIoJVUF8fYr5KR0YqxD238zgObTps31YdADwPPAp82a4M8TrckkWyx7ekNlf9aBcVn81cFwwXngrJA==", + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "markdown-it": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", + "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", + "requires": { + "argparse": "^1.0.7", + "entities": "~1.1.1", + "linkify-it": "^2.0.0", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + } + }, + "mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + }, + "mem": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", + "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "requires": { + "mimic-fn": "^1.0.0" + } + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + }, + "mime-db": { + "version": "1.36.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz", + "integrity": "sha512-L+xvyD9MkoYMXb1jAmzI/lWYAxAMCPvIBSWur0PZ5nOf5euahRLVqH//FKW9mWp2lkqUgYiXPgkzfMUFi4zVDw==" + }, + "mime-types": { + "version": "2.1.20", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz", + "integrity": "sha512-HrkrPaP9vGuWbLK1B1FfgAkbqNjIuy4eHlIYnFi7kamZyLLrGlo2mpcx0bBmNpKqBtYtAfGbodDddIgddSJC2A==", + "requires": { + "mime-db": "~1.36.0" + } + }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + }, + "minimist": { + "version": "1.2.0", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + }, + "mkdirp": { + "version": "0.5.1", + "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "requires": { + "minimist": "0.0.8" + }, + "dependencies": { + "minimist": { + "version": "0.0.8", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + } + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "nomnom": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/nomnom/-/nomnom-1.5.2.tgz", + "integrity": "sha1-9DRUSKhTz71cDSYyDyR3qwUm/i8=", + "requires": { + "colors": "0.5.x", + "underscore": "1.1.x" + }, + "dependencies": { + "underscore": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.1.7.tgz", + "integrity": "sha1-QLq4S60Z0jAJbo1u9ii/8FXYPbA=" + } + } + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "requires": { + "path-key": "^2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" + }, + "on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "requires": { + "ee-first": "1.1.1" + } + }, + "once": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/once/-/once-1.3.3.tgz", + "integrity": "sha1-suJhVXzkwxTsgwTz+oJmPkKXyiA=", + "requires": { + "wrappy": "1" + } + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + }, + "parseurl": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", + "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + }, + "pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=" + }, + "pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "requires": { + "pinkie": "^2.0.0" + } + }, + "portfinder": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.17.tgz", + "integrity": "sha512-syFcRIRzVI1BoEFOCaAiizwDolh1S1YXSodsVhncbhjzjZQulhczNRbqnUl9N31Q4dKGOXsNDqxC2BWBgSMqeQ==", + "requires": { + "async": "^1.5.2", + "debug": "^2.2.0", + "mkdirp": "0.5.x" + }, + "dependencies": { + "async": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" + } + } + }, + "primer-alerts": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/primer-alerts/-/primer-alerts-1.5.1.tgz", + "integrity": "sha512-2dyRO6ZgZF9ZR67gg+viCtsYV9CG+z6UARW8DAf4CEDkeiR4K46R6kKSC/WveEv8LPJ0MF+L4IXjqvn5kqK7zA==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-avatars": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-avatars/-/primer-avatars-1.4.1.tgz", + "integrity": "sha512-kd+GPMZqdXZ+N07CYeCWmRHCHUCeeCFNWldbgCg07NRfU+Ne9n01CLR/DcNuG27oP5A30ubvHnoEofvkgG3ynw==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-base": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/primer-base/-/primer-base-1.5.1.tgz", + "integrity": "sha512-7tv0/V5sSRucp65NQXEyC8E2GP9EumLSKFDrjbl0liZTwSve8HUm7TmzgDXkO5pWd0VSfc7+5G1qrrdsGNmkPQ==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-blankslate": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-blankslate/-/primer-blankslate-1.4.1.tgz", + "integrity": "sha512-osgVtGY6UikfK1vuoBWijxT1C+SsD8RQxvYS8RFMDh6bReEf45//3n0NJVksRT8GdGUS8atRnrsWzRCtDcEwAg==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-box": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/primer-box/-/primer-box-2.5.1.tgz", + "integrity": "sha512-jfJjDLQlaM9e5pyzcJHALEb3Gml5uEoDDAoWKiKwvxEhl6da+5DB+HGbEt3/KUqt9B3e9Omy6IBvvRfgnqae7A==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-breadcrumb": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-breadcrumb/-/primer-breadcrumb-1.4.1.tgz", + "integrity": "sha512-cmDjIXSXClLQcrWDeJkZJWXkacfCluZU23mfsVM8K0oZyhiHZjbleOdsbwGXMKpFSEJ61wi4zvF9ZMkx1s8EdA==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-buttons": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/primer-buttons/-/primer-buttons-2.4.1.tgz", + "integrity": "sha512-wOb0FMkRI/sWntorY9KXzY/OVxT5P/V+xlotbEC3+SPfzTLG+vJCs5rsamMu7S4TzhOx675/DTSpMo83iY4h3w==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-cards": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/primer-cards/-/primer-cards-0.5.1.tgz", + "integrity": "sha512-iXqvCgL311UBoRbC/ioFYSUunFnS8rUGkyALtN3/qAw3LoeyE1vjcooV0f1ja/xOblH0vGa32HjEKo2sBTwQ7g==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-core": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/primer-core/-/primer-core-6.4.1.tgz", + "integrity": "sha512-IxOLJ3BGLLFhDQ329zGgBMLaflqTn/WKAbRhl3XGIVh2vyRLBr8XPhzTfWfeFKt9WQ3ljzZkwip1GsMX9qqb1Q==", + "requires": { + "primer-base": "1.5.1", + "primer-box": "2.5.1", + "primer-buttons": "2.4.1", + "primer-forms": "1.4.1", + "primer-layout": "1.4.1", + "primer-navigation": "1.4.1", + "primer-support": "4.4.1", + "primer-table-object": "1.4.1", + "primer-tooltips": "1.4.1", + "primer-truncate": "1.4.1", + "primer-utilities": "4.8.1" + } + }, + "primer-css": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/primer-css/-/primer-css-9.6.0.tgz", + "integrity": "sha512-qzTck5gvQevHvI3sUgP0D2QzLdmAqwd9h1rucMObOIbp8xQcM8zZGFNo71FBu7TxNu4A00McNvnadSNOgmnJnA==", + "requires": { + "primer-alerts": "1.5.1", + "primer-avatars": "1.4.1", + "primer-base": "1.5.1", + "primer-blankslate": "1.4.1", + "primer-box": "2.5.1", + "primer-breadcrumb": "1.4.1", + "primer-buttons": "2.4.1", + "primer-cards": "0.5.1", + "primer-core": "6.4.1", + "primer-forms": "1.4.1", + "primer-labels": "1.5.1", + "primer-layout": "1.4.1", + "primer-markdown": "3.7.1", + "primer-marketing": "5.4.1", + "primer-marketing-support": "1.3.1", + "primer-marketing-type": "1.4.1", + "primer-marketing-utilities": "1.4.1", + "primer-navigation": "1.4.1", + "primer-page-headers": "1.4.1", + "primer-page-sections": "1.4.1", + "primer-product": "5.4.1", + "primer-support": "4.4.1", + "primer-table-object": "1.4.1", + "primer-tables": "1.4.1", + "primer-tooltips": "1.4.1", + "primer-truncate": "1.4.1", + "primer-utilities": "4.8.1" + } + }, + "primer-forms": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-forms/-/primer-forms-1.4.1.tgz", + "integrity": "sha512-wr7Ieoyy9iHPtR1zEaWEwPRA3PHo1eaOj3Q4VhYOBRv3rk+H3/Z49hey7PLyocPnvbF1GdG2s5/VkZUdgBEuqg==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-labels": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/primer-labels/-/primer-labels-1.5.1.tgz", + "integrity": "sha512-dsX98awj7UYKvhed+j47ChldBX9sV5OSO3MhhVAHon7Kj5kxPCLFkTw/YlOxOHFX2VlbP/5jMM5W46xnSERy1g==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-layout": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-layout/-/primer-layout-1.4.1.tgz", + "integrity": "sha512-x4u8twf8XGFL3mKplh0XM7jUjTL9Is/97BmlehZMQE+740G/gywPo2CpOV2GMCxWzmhQVJhib8At1+UvN+qvZQ==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-markdown": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/primer-markdown/-/primer-markdown-3.7.1.tgz", + "integrity": "sha512-62I7tZaCCnOgjc2yE1cuu4WTwbym/eNIpEMB0CSWvFf8ZiTVKC5dNpFIwT1ipE35IjgOVopdHdqdPzMXoxKNAg==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-marketing": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/primer-marketing/-/primer-marketing-5.4.1.tgz", + "integrity": "sha512-ccadKuKA4kQDTaopHsj+lOujdxa8d14Ff8fq5HLvHlrpgKvdVwuKtIwNL2ryuas1FVeTddxa3lBnlAcuVTwWIQ==", + "requires": { + "primer-breadcrumb": "1.4.1", + "primer-cards": "0.5.1", + "primer-marketing-support": "1.3.1", + "primer-marketing-type": "1.4.1", + "primer-marketing-utilities": "1.4.1", + "primer-page-headers": "1.4.1", + "primer-page-sections": "1.4.1", + "primer-support": "4.4.1", + "primer-tables": "1.4.1" + } + }, + "primer-marketing-support": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/primer-marketing-support/-/primer-marketing-support-1.3.1.tgz", + "integrity": "sha512-GDwQ4TdZNS4p6UbSMxv7j7DlgegEDU43k2QKFJZ9EAtBN/rOKkf9gBa31yEiJQvgG7wZ84CvioObYtw885TL7g==" + }, + "primer-marketing-type": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-marketing-type/-/primer-marketing-type-1.4.1.tgz", + "integrity": "sha512-cJGHvDkCy1bYiM2EDOwc+k7Y61DfvOIWambU5WmsGq7fUI92MRWqpyWZpYVOFEWqItRjLnxPv4myJCm0itKIRQ==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-marketing-utilities": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-marketing-utilities/-/primer-marketing-utilities-1.4.1.tgz", + "integrity": "sha512-dbic/+lYITBnZKujg6s3GF0Mo3jhuiq1ps0a3negkBsxKsNyU68hoYEl2bN6UI5L2BX9GyzCLd58N1jyjV4uCw==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-navigation": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-navigation/-/primer-navigation-1.4.1.tgz", + "integrity": "sha512-fMVrR8l/JtTXLzwf+8nHeBvoIQKysfbMLbU3VArSMaQp1/IwRS9eT4NijdpGoIeh2tmxD4nA+BM/dWHfjMucAw==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-page-headers": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-page-headers/-/primer-page-headers-1.4.1.tgz", + "integrity": "sha512-kmSi4Sys2dqt74sO1b5LcJq/EAnLe9p8t6oAs4PfkwgYXAdJPwHyTfe2+fueHYgqi07AlK3bnr1gw9rFen475Q==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-page-sections": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-page-sections/-/primer-page-sections-1.4.1.tgz", + "integrity": "sha512-WR5abovsjAKlbZjn4q7+eLCEA3gnwh/tuZDJnZ3l2V5O+IpHYVXI5Boi6QxbQM3mbHOL19NJhQEyfcHXBe7AQw==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-product": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/primer-product/-/primer-product-5.4.1.tgz", + "integrity": "sha512-W1sVne7TPc2FF+c8GHqWnWtQQOzkr7R6mT1wwafwsY8GiCCkUxOtn0JPORrHzx67FV1GSWVMJ49F7pQPcl1Zcw==", + "requires": { + "primer-alerts": "1.5.1", + "primer-avatars": "1.4.1", + "primer-blankslate": "1.4.1", + "primer-labels": "1.5.1", + "primer-markdown": "3.7.1", + "primer-support": "4.4.1" + } + }, + "primer-support": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/primer-support/-/primer-support-4.4.1.tgz", + "integrity": "sha512-stQEoF4NfWy8JOVASUHxGY+Ot1eBguPH8rWoeLQy16zKzcS16kRccfvGbBXyv0G/aA+DdL8ZmjlXB2ubJ+wBJg==" + }, + "primer-table-object": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-table-object/-/primer-table-object-1.4.1.tgz", + "integrity": "sha512-OkE3knDjLlzSot0/Q9O/b5GuKWTaxFyB/2CcZttA3WizAkxlkV4ql/Xy8mFr6WxBQORkBrrbxWUZC+Ulj88ZIQ==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-tables": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-tables/-/primer-tables-1.4.1.tgz", + "integrity": "sha512-Dk9ttoxIDigcJQ0vhh3VDkOL+/spGdEJacRFvNsysS4IbDOUDilXLZFcUZB2wCbhXFHS/CObk+/3zoW39J/6tg==", + "requires": { + "primer-marketing-support": "1.3.1", + "primer-support": "4.4.1" + } + }, + "primer-tooltips": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-tooltips/-/primer-tooltips-1.4.1.tgz", + "integrity": "sha512-Id0g033elSx7Sy7+HDzha4Tuv24QxPzVtT15IHSOXXa900NjZqR1HHQIyMd1EkVfPt2sZ3Z0/k0cTvdqXs6eJQ==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-truncate": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/primer-truncate/-/primer-truncate-1.4.1.tgz", + "integrity": "sha512-spBAPx7944txGXLCNGv+WZnPG4MiMhqn3srKlpGL2nmfcLxI+geviD+GhY/b42GyCgxRhcucrt/jxKYfsaBdww==", + "requires": { + "primer-support": "4.4.1" + } + }, + "primer-utilities": { + "version": "4.8.1", + "resolved": "https://registry.npmjs.org/primer-utilities/-/primer-utilities-4.8.1.tgz", + "integrity": "sha512-3wu8GUsJVVa0IIUdIKP+ZVSFPmX8v2NQWjBSJK00GbSPwthztOnFEoE40Ru7wAuUDNXWu9zMnx2i0lcyDu+yBg==", + "requires": { + "primer-support": "4.4.1" + } + }, + "process-nextick-args": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", + "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" + }, + "promise": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz", + "integrity": "sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==", + "requires": { + "asap": "~2.0.3" + } + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "pug": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pug/-/pug-2.0.3.tgz", + "integrity": "sha1-ccuoJTfJWl6rftBGluQiH1Oqh44=", + "requires": { + "pug-code-gen": "^2.0.1", + "pug-filters": "^3.1.0", + "pug-lexer": "^4.0.0", + "pug-linker": "^3.0.5", + "pug-load": "^2.0.11", + "pug-parser": "^5.0.0", + "pug-runtime": "^2.0.4", + "pug-strip-comments": "^1.0.3" + } + }, + "pug-attrs": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pug-attrs/-/pug-attrs-2.0.3.tgz", + "integrity": "sha1-owlflw5kFR972tlX7vVftdeQXRU=", + "requires": { + "constantinople": "^3.0.1", + "js-stringify": "^1.0.1", + "pug-runtime": "^2.0.4" + } + }, + "pug-code-gen": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pug-code-gen/-/pug-code-gen-2.0.1.tgz", + "integrity": "sha1-CVHsgyJddNjPxHan+Zolm199BQw=", + "requires": { + "constantinople": "^3.0.1", + "doctypes": "^1.1.0", + "js-stringify": "^1.0.1", + "pug-attrs": "^2.0.3", + "pug-error": "^1.3.2", + "pug-runtime": "^2.0.4", + "void-elements": "^2.0.1", + "with": "^5.0.0" + } + }, + "pug-error": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/pug-error/-/pug-error-1.3.2.tgz", + "integrity": "sha1-U659nSm7A89WRJOgJhCfVMR/XyY=" + }, + "pug-filters": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pug-filters/-/pug-filters-3.1.0.tgz", + "integrity": "sha1-JxZVVbwEwjbkqisDZiRt+gIbYm4=", + "requires": { + "clean-css": "^4.1.11", + "constantinople": "^3.0.1", + "jstransformer": "1.0.0", + "pug-error": "^1.3.2", + "pug-walk": "^1.1.7", + "resolve": "^1.1.6", + "uglify-js": "^2.6.1" + } + }, + "pug-lexer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pug-lexer/-/pug-lexer-4.0.0.tgz", + "integrity": "sha1-IQwYRX7y4XYCQnQMXmR715TOwng=", + "requires": { + "character-parser": "^2.1.1", + "is-expression": "^3.0.0", + "pug-error": "^1.3.2" + } + }, + "pug-linker": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/pug-linker/-/pug-linker-3.0.5.tgz", + "integrity": "sha1-npp65ABWgtAn3uuWsAD4juuDoC8=", + "requires": { + "pug-error": "^1.3.2", + "pug-walk": "^1.1.7" + } + }, + "pug-load": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/pug-load/-/pug-load-2.0.11.tgz", + "integrity": "sha1-5kjlftET/iwfRdV4WOorrWvAFSc=", + "requires": { + "object-assign": "^4.1.0", + "pug-walk": "^1.1.7" + } + }, + "pug-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/pug-parser/-/pug-parser-5.0.0.tgz", + "integrity": "sha1-45Stmz/KkxI5QK/4hcBuRKt+aOQ=", + "requires": { + "pug-error": "^1.3.2", + "token-stream": "0.0.1" + } + }, + "pug-runtime": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pug-runtime/-/pug-runtime-2.0.4.tgz", + "integrity": "sha1-4XjhvaaKsujArPybztLFT9iM61g=" + }, + "pug-strip-comments": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/pug-strip-comments/-/pug-strip-comments-1.0.3.tgz", + "integrity": "sha1-8VWVkiBu3G+FMQ2s9K+0igJa9Z8=", + "requires": { + "pug-error": "^1.3.2" + } + }, + "pug-walk": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/pug-walk/-/pug-walk-1.1.7.tgz", + "integrity": "sha1-wA1cUSi6xYBr7BXSt+fNq+QlMfM=" + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "regenerator-runtime": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", + "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" + }, + "repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=" + }, + "require-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/require-dir/-/require-dir-1.0.0.tgz", + "integrity": "sha512-PUJcQVTP4n6F8Un1GEEWhqnmBMfukVsL5gqwBxt7RF+nP+9hSOLJ/vSs5iUoXw1UWDgzqg9B/IIb15kfQKWsAQ==" + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + }, + "resolve": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz", + "integrity": "sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA==", + "requires": { + "path-parse": "^1.0.5" + } + }, + "right-align": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", + "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", + "requires": { + "align-text": "^0.1.1" + } + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "setprototypeof": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + }, + "shelljs": { + "version": "0.7.8", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.7.8.tgz", + "integrity": "sha1-3svPh0sNHl+3LhSxZKloMEjprLM=", + "requires": { + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" + }, + "dependencies": { + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "requires": { + "resolve": "^1.1.6" + } + }, + "resolve": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz", + "integrity": "sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA==", + "requires": { + "path-parse": "^1.0.5" + } + } + } + }, + "showdown": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/showdown/-/showdown-1.8.6.tgz", + "integrity": "sha1-kepO47elRIqspoIKTifmkMatdxw=", + "requires": { + "yargs": "^10.0.3" + } + }, + "signal-exit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "statuses": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", + "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + }, + "swagger-repo": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/swagger-repo/-/swagger-repo-1.5.1.tgz", + "integrity": "sha512-ZZD0clWcQNfbomM3DKHuFMLLePxSMDBumgGnrY1iGvSfkTRFX7lIU4H8H1cQsZtY9oJpJdhuzv35xpiGRvAULw==", + "requires": { + "body-parser": "^1.15.2", + "commander": "^2.9.0", + "cors": "^2.7.1", + "express": "^4.13.4", + "glob": "^7.0.0", + "js-yaml": "^3.5.3", + "json-pointer": "^0.6.0", + "jsonpath": "^1.0.0", + "lodash": "^4.5.0", + "mkdirp": "^0.5.1", + "require-dir": "^1.0.0", + "swagger-editor": "^2.10.3", + "swagger-ui": "^2.2.0", + "sway": "^1.0.0" + }, + "dependencies": { + "JSONSelect": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/JSONSelect/-/JSONSelect-0.4.0.tgz", + "integrity": "sha1-oI7cxn6z/L6Z7WMIVTRKDPKCu40=" + }, + "accepts": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", + "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", + "requires": { + "mime-types": "~2.1.18", + "negotiator": "0.6.1" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + }, + "body-parser": { + "version": "1.18.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", + "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "~1.6.3", + "iconv-lite": "0.4.23", + "on-finished": "~2.3.0", + "qs": "6.5.2", + "raw-body": "2.3.3", + "type-is": "~1.6.16" + } + }, + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + }, + "chance": { + "version": "1.0.16", + "resolved": "https://registry.npmjs.org/chance/-/chance-1.0.16.tgz", + "integrity": "sha512-2bgDHH5bVfAXH05SPtjqrsASzZ7h90yCuYT2z4mkYpxxYvJXiIydBFzVieVHZx7wLH1Ag2Azaaej2/zA1XUrNQ==" + }, + "cjson": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/cjson/-/cjson-0.2.1.tgz", + "integrity": "sha1-c82KrWXZ4VBfmvF0TTt5wVJ2gqU=" + }, + "combined-stream": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", + "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "cookie": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", + "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "deep-extend": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.4.2.tgz", + "integrity": "sha1-SLaZwn4zS/ifEIkr5DL25MfTSn8=" + }, + "delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + }, + "depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + }, + "deref": { + "version": "0.6.4", + "resolved": "http://registry.npmjs.org/deref/-/deref-0.6.4.tgz", + "integrity": "sha1-vVqW1F2+0wEbuBvfaN31S+jhvU4=", + "requires": { + "deep-extend": "^0.4.0" + } + }, + "drange": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/drange/-/drange-1.0.2.tgz", + "integrity": "sha512-bve7maXvfKW+vcsRpP8gzEDzkTg8O6AoCGvi/52pnllzhl/nmex8XLrHOUEQ42Z8GshcyftvG+E4s5vcd/qo0Q==" + }, + "escodegen": { + "version": "0.0.21", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-0.0.21.tgz", + "integrity": "sha1-U9ZSz6EDA4gnlFilJmxf/HCcY8M=", + "requires": { + "esprima": "~1.0.2", + "estraverse": "~0.0.4", + "source-map": ">= 0.1.2" + }, + "dependencies": { + "esprima": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=" + } + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + }, + "esutils": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", + "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=" + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + }, + "express": { + "version": "4.16.3", + "resolved": "https://registry.npmjs.org/express/-/express-4.16.3.tgz", + "integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=", + "requires": { + "accepts": "~1.3.5", + "array-flatten": "1.1.1", + "body-parser": "1.18.2", + "content-disposition": "0.5.2", + "content-type": "~1.0.4", + "cookie": "0.3.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.1.1", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.2", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.3", + "qs": "6.5.1", + "range-parser": "~1.2.0", + "safe-buffer": "5.1.1", + "send": "0.16.2", + "serve-static": "1.13.2", + "setprototypeof": "1.1.0", + "statuses": "~1.4.0", + "type-is": "~1.6.16", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "dependencies": { + "body-parser": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", + "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", + "requires": { + "bytes": "3.0.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.1", + "http-errors": "~1.6.2", + "iconv-lite": "0.4.19", + "on-finished": "~2.3.0", + "qs": "6.5.1", + "raw-body": "2.3.2", + "type-is": "~1.6.15" + } + }, + "iconv-lite": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", + "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" + }, + "qs": { + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", + "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" + }, + "raw-body": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", + "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.2", + "iconv-lite": "0.4.19", + "unpipe": "1.0.0" + }, + "dependencies": { + "depd": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", + "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" + }, + "http-errors": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", + "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", + "requires": { + "depd": "1.1.1", + "inherits": "2.0.3", + "setprototypeof": "1.0.3", + "statuses": ">= 1.3.1 < 2" + } + }, + "setprototypeof": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", + "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" + } + } + }, + "statuses": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" + } + } + }, + "faker": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/faker/-/faker-3.1.0.tgz", + "integrity": "sha1-D5CPr05uwCUk5UpX5DLFwBPgjJ8=" + }, + "finalhandler": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", + "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.2", + "statuses": "~1.4.0", + "unpipe": "~1.0.0" + }, + "dependencies": { + "statuses": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" + } + } + }, + "form-data": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", + "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "1.0.6", + "mime-types": "^2.1.12" + } + }, + "formidable": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.1.tgz", + "integrity": "sha512-Fs9VRguL0gqGHkXS5GQiMCr1VhZBxz0JnJs4JmMp/2jL18Fmbzvv7vOFRU+U8TBkHEE/CX1qDXzJplVULgsLeg==" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "graphlib": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.5.tgz", + "integrity": "sha512-XvtbqCcw+EM5SqQrIetIKKD+uZVNQtDPD1goIg7K73RuRZtVI5rYMdcCVSHm/AS1sCBZ7vt0p5WgXouucHQaOA==", + "requires": { + "lodash": "^4.11.1" + } + }, + "http-errors": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", + "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + } + }, + "iconv-lite": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", + "integrity": "sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "ipaddr.js": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", + "integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4=" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "jison": { + "version": "0.4.13", + "resolved": "https://registry.npmjs.org/jison/-/jison-0.4.13.tgz", + "integrity": "sha1-kEFwfWIkE2f1iDRTK58ZwsNvrHg=", + "requires": { + "JSONSelect": "0.4.0", + "cjson": "~0.2.1", + "ebnf-parser": "~0.1.9", + "escodegen": "0.0.21", + "esprima": "1.0.x", + "jison-lex": "0.2.x", + "lex-parser": "~0.1.3", + "nomnom": "1.5.2" + }, + "dependencies": { + "esprima": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.0.4.tgz", + "integrity": "sha1-n1V+CPw7TSbs6d00+Pv0drYlha0=" + } + } + }, + "js-yaml": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", + "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "json-refs": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/json-refs/-/json-refs-2.1.7.tgz", + "integrity": "sha1-uesB/in16j6Sh48VrqEK04taz4k=", + "requires": { + "commander": "^2.9.0", + "graphlib": "^2.1.1", + "js-yaml": "^3.8.3", + "native-promise-only": "^0.8.1", + "path-loader": "^1.0.2", + "slash": "^1.0.0", + "uri-js": "^3.0.2" + } + }, + "json-schema-faker": { + "version": "0.2.16", + "resolved": "https://registry.npmjs.org/json-schema-faker/-/json-schema-faker-0.2.16.tgz", + "integrity": "sha1-UdPKSJVdj+c09ZHXR7ckU75aePI=", + "requires": { + "chance": "~1.0.1", + "deref": "~0.6.3", + "faker": "~3.1.0", + "randexp": "~0.4.2" + } + }, + "jsonpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.0.0.tgz", + "integrity": "sha1-Rc2dTE0NaCXZC9fkD4PxGCsT3Qc=", + "requires": { + "esprima": "1.2.2", + "jison": "0.4.13", + "static-eval": "2.0.0", + "underscore": "1.7.0" + }, + "dependencies": { + "esprima": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", + "integrity": "sha1-dqD9Zvz+FU/SkmZ9wmQBl1CxZXs=" + } + } + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "lodash": { + "version": "4.17.10", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.10.tgz", + "integrity": "sha512-UejweD1pDoXu+AD825lWwp4ZGtSwgnpZxb3JDViD7StjQz+Nb/6l093lx4OQ0foGWNRoc19mWy7BzL+UAK2iVg==" + }, + "mime": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", + "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "native-promise-only": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/native-promise-only/-/native-promise-only-0.8.1.tgz", + "integrity": "sha1-IKMYwwy0X3H+et+/eyHJnBRy7xE=" + }, + "negotiator": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", + "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + }, + "optionator": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", + "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.4", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "wordwrap": "~1.0.0" + } + }, + "path-loader": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/path-loader/-/path-loader-1.0.8.tgz", + "integrity": "sha512-/JQCrTcrteaPB8IHefEAQbmBQReKj51A+yTyc745TBbO4FOySw+/l3Rh0zyad0Nrd87TMROlmFANQwCRsuvN4w==", + "requires": { + "native-promise-only": "^0.8.1", + "superagent": "^3.8.3" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=" + }, + "proxy-addr": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", + "integrity": "sha512-5erio2h9jp5CHGwcybmxmVqHmnCBZeewlfJ0pex+UW7Qny7OOZXTtH56TGNyBizkgiOwhJtMKrVzDTeKcySZwA==", + "requires": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.8.0" + } + }, + "qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + }, + "randexp": { + "version": "0.4.9", + "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.9.tgz", + "integrity": "sha512-maAX1cnBkzIZ89O4tSQUOF098xjGMC8N+9vuY/WfHwg87THw6odD2Br35donlj5e6KnB1SB0QBHhTQhhDHuTPQ==", + "requires": { + "drange": "^1.0.0", + "ret": "^0.2.0" + } + }, + "range-parser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", + "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + }, + "raw-body": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", + "integrity": "sha512-9esiElv1BrZoI3rCDuOuKCBRbuApGGaDPQfjSflGxdy4oyzqghxu6klEkkVIvBje+FF0BX9coEv8KqW6X/7njw==", + "requires": { + "bytes": "3.0.0", + "http-errors": "1.6.3", + "iconv-lite": "0.4.23", + "unpipe": "1.0.0" + } + }, + "readable-stream": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "ret": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.2.2.tgz", + "integrity": "sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ==" + }, + "safe-buffer": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", + "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" + }, + "send": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", + "integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==", + "requires": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.6.2", + "mime": "1.4.1", + "ms": "2.0.0", + "on-finished": "~2.3.0", + "range-parser": "~1.2.0", + "statuses": "~1.4.0" + }, + "dependencies": { + "statuses": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", + "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" + } + } + }, + "serve-static": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", + "integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==", + "requires": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.2", + "send": "0.16.2" + } + }, + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "optional": true + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "static-eval": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.0.tgz", + "integrity": "sha512-6flshd3F1Gwm+Ksxq463LtFd1liC77N/PX1FVVc3OzL3hAmo2fwHFbuArkcfi7s9rTNsLEhcRmXGFZhlgy40uw==", + "requires": { + "escodegen": "^1.8.1" + }, + "dependencies": { + "escodegen": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.0.tgz", + "integrity": "sha512-IeMV45ReixHS53K/OmfKAIztN/igDHzTJUhZM3k1jMhIZWjk45SMwAtBsEXiJp3vSPmTcu6CXn7mDvFHRN66fw==", + "requires": { + "esprima": "^3.1.3", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + } + }, + "esprima": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz", + "integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM=" + }, + "estraverse": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", + "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=" + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true + } + } + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "superagent": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", + "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.2.0", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.3.5" + }, + "dependencies": { + "debug": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", + "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", + "requires": { + "ms": "2.0.0" + } + } + } + }, + "swagger-editor": { + "version": "2.10.5", + "resolved": "https://registry.npmjs.org/swagger-editor/-/swagger-editor-2.10.5.tgz", + "integrity": "sha1-pDFsyw1Ap30w2t+R8PTbfkdflIo=" + }, + "swagger-methods": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/swagger-methods/-/swagger-methods-1.0.4.tgz", + "integrity": "sha512-xrKFLbrZ6VxRsg+M3uJozJtsEpNI/aPfZsOkoEjXw8vhAqdMIqwTYGj1f4dmUgvJvCdZhV5iArgtqXgs403ltg==" + }, + "sway": { + "version": "1.0.0", + "resolved": "http://registry.npmjs.org/sway/-/sway-1.0.0.tgz", + "integrity": "sha1-No/8Dpa9hCJu0bmzPWa+V9oE8Jo=", + "requires": { + "debug": "^2.2.0", + "js-base64": "^2.1.9", + "js-yaml": "^3.5.2", + "json-refs": "^2.1.5", + "json-schema-faker": "^0.2.8", + "lodash": "^4.2.0", + "native-promise-only": "^0.8.1", + "path-to-regexp": "^1.2.1", + "swagger-methods": "^1.0.0", + "swagger-schema-official": "2.0.0-bab6bed", + "z-schema": "^3.16.1" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "path-to-regexp": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.7.0.tgz", + "integrity": "sha1-Wf3g9DW62suhA6hOnTvGTpa5k30=", + "requires": { + "isarray": "0.0.1" + } + } + } + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "uri-js": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-3.0.2.tgz", + "integrity": "sha1-+QuFhQf4HepNz7s8TD2/orVX+qo=", + "requires": { + "punycode": "^2.1.0" + } + }, + "validator": { + "version": "10.7.1", + "resolved": "https://registry.npmjs.org/validator/-/validator-10.7.1.tgz", + "integrity": "sha512-tbB5JrTczfeHKLw3PnFRzGFlF1xUAwSgXEDb66EuX1ffCirspYpDEZo3Vc9j38gPdL4JKrDc5UPFfgYiw1IWRQ==" + }, + "z-schema": { + "version": "3.23.0", + "resolved": "https://registry.npmjs.org/z-schema/-/z-schema-3.23.0.tgz", + "integrity": "sha512-D8XV0BiHuQbWNEgu68RpjFZJ0C7jt+WYoszXKOohe54TdoTTauUvBQx+lsYCdalGIjGTFdQs5dxKvCUonUERzQ==", + "requires": { + "commander": "^2.7.1", + "lodash.get": "^4.0.0", + "lodash.isequal": "^4.0.0", + "validator": "^10.0.0" + } + } + } + }, + "swagger-schema-official": { + "version": "2.0.0-bab6bed", + "resolved": "https://registry.npmjs.org/swagger-schema-official/-/swagger-schema-official-2.0.0-bab6bed.tgz", + "integrity": "sha1-cAcEaNbSl3ylI3suUZyn0Gouo/0=" + }, + "swagger-ui": { + "version": "2.2.10", + "resolved": "https://registry.npmjs.org/swagger-ui/-/swagger-ui-2.2.10.tgz", + "integrity": "sha1-sl56IWZOXZC/OR2zDbCN5B6FLXs=" + }, + "to-fast-properties": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", + "integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=" + }, + "token-stream": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/token-stream/-/token-stream-0.0.1.tgz", + "integrity": "sha1-zu78cXp2xDFvEm0LnbqlXX598Bo=" + }, + "type-is": { + "version": "1.6.16", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", + "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.18" + } + }, + "uc.micro": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.5.tgz", + "integrity": "sha512-JoLI4g5zv5qNyT09f4YAvEZIIV1oOjqnewYg5D38dkQljIzpPT296dbIGvKro3digYI1bkb7W6EP1y4uDlmzLg==" + }, + "uglify-js": { + "version": "2.8.29", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", + "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", + "requires": { + "source-map": "~0.5.1", + "uglify-to-browserify": "~1.0.0", + "yargs": "~3.10.0" + }, + "dependencies": { + "camelcase": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", + "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=" + }, + "cliui": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", + "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", + "requires": { + "center-align": "^0.1.1", + "right-align": "^0.1.1", + "wordwrap": "0.0.2" + } + }, + "source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" + }, + "window-size": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", + "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=" + }, + "wordwrap": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", + "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=" + }, + "yargs": { + "version": "3.10.0", + "resolved": "http://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", + "requires": { + "camelcase": "^1.0.2", + "cliui": "^2.1.0", + "decamelize": "^1.0.0", + "window-size": "0.1.0" + } + } + } + }, + "uglify-to-browserify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", + "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", + "optional": true + }, + "underscore": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.7.0.tgz", + "integrity": "sha1-a7rwh3UA02vjTsqlhODbn+8DUgk=" + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + }, + "void-elements": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-2.0.1.tgz", + "integrity": "sha1-wGavtYK7HLQSjWDqkjkulNXp2+w=" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } + }, + "with": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/with/-/with-5.1.1.tgz", + "integrity": "sha1-+k2qktrzLE6pTtRTyB8EaGtXXf4=", + "requires": { + "acorn": "^3.1.0", + "acorn-globals": "^3.0.0" + } + }, + "wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" + }, + "wrap-ansi": { + "version": "2.1.0", + "resolved": "http://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "y18n": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz", + "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=" + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "yargs": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-10.1.2.tgz", + "integrity": "sha512-ivSoxqBGYOqQVruxD35+EyCFDYNEFL/Uo6FcOnz+9xZdZzK0Zzw4r4KhbrME1Oo2gOggwJod2MnsdamSG7H9ig==", + "requires": { + "cliui": "^4.0.0", + "decamelize": "^1.1.1", + "find-up": "^2.1.0", + "get-caller-file": "^1.0.1", + "os-locale": "^2.0.0", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^2.0.0", + "which-module": "^2.0.0", + "y18n": "^3.2.1", + "yargs-parser": "^8.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + }, + "cliui": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "requires": { + "string-width": "^2.1.1", + "strip-ansi": "^4.0.0", + "wrap-ansi": "^2.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "requires": { + "locate-path": "^2.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "os-locale": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", + "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "requires": { + "execa": "^0.7.0", + "lcid": "^1.0.0", + "mem": "^1.1.0" + } + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + } + } + }, + "yargs-parser": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-8.1.0.tgz", + "integrity": "sha512-yP+6QqN8BmrgW2ggLtTbdrOyBNSI7zBa4IykmiV5R1wl1JWNxQvWhMfMdmzIYtKU7oP3OOInY/tl2ov3BDjnJQ==", + "requires": { + "camelcase": "^4.1.0" + }, + "dependencies": { + "camelcase": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", + "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" + } + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 000000000..b8873c585 --- /dev/null +++ b/package.json @@ -0,0 +1,22 @@ +{ + "name": "Data-Repository-Service-openapi-spec", + "version": "1.0.0", + "dependencies": { + "bower": "^1.7.7", + "connect": "^3.4.1", + "cors": "^2.7.1", + "github-markdown": "^3.2.0", + "github-markdown-css": "^2.10.0", + "help": "^3.0.2", + "portfinder": "^1.0.3", + "shelljs": "^0.7.0", + "showdown": "^1.8.6", + "swagger-repo": "^1.5.1", + "swagger-ui": "^2.1.4" + }, + "private": true, + "scripts": { + "build": "node ./scripts/buildui.js", + "swagger": "swagger-repo" + } +} diff --git a/python/ga4gh/__init__.py b/python/ga4gh/__init__.py index 4cb530822..3627ee094 100644 --- a/python/ga4gh/__init__.py +++ b/python/ga4gh/__init__.py @@ -2,6 +2,6 @@ """ This top-level package simply declares the ga4gh namespace so it can be used across modules. In this project it contains the single -:mod:`ga4gh.dos` module. +:mod:`ga4gh.drs` module. """ __import__('pkg_resources').declare_namespace(__name__) diff --git a/python/ga4gh/dos/data_object_service.swagger.yaml b/python/ga4gh/dos/data_object_service.swagger.yaml deleted file mode 120000 index 567135c76..000000000 --- a/python/ga4gh/dos/data_object_service.swagger.yaml +++ /dev/null @@ -1 +0,0 @@ -../../../openapi/data_object_service.swagger.yaml \ No newline at end of file diff --git a/python/ga4gh/dos/__init__.py b/python/ga4gh/drs/__init__.py similarity index 52% rename from python/ga4gh/dos/__init__.py rename to python/ga4gh/drs/__init__.py index 588460d9b..4439d007b 100644 --- a/python/ga4gh/dos/__init__.py +++ b/python/ga4gh/drs/__init__.py @@ -1,2 +1,2 @@ # -*- coding: utf-8 -*- -__version__ = "0.5.0" +__version__ = "0.0.1" diff --git a/python/ga4gh/dos/client.py b/python/ga4gh/drs/client.py similarity index 92% rename from python/ga4gh/dos/client.py rename to python/ga4gh/drs/client.py index a67341781..fda157350 100644 --- a/python/ga4gh/dos/client.py +++ b/python/ga4gh/drs/client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ -This module exposes a single class :class:`ga4gh.dos.client.Client`, which +This module exposes a single class :class:`ga4gh.drs.client.Client`, which exposes the HTTP methods of the Data Object Service as named Python functions. This makes it easy to access resources that are described following these @@ -8,7 +8,7 @@ following the OpenAPI schema. It currently assumes that the service also hosts the swagger.json, in a style -similar to the demonstration server, :mod:`ga4gh.dos.server`. +similar to the demonstration server, :mod:`ga4gh.drs.server`. """ try: # for python3 compat import urlparse @@ -21,7 +21,7 @@ from bravado_core.exception import SwaggerValidationError from bravado_core.formatter import SwaggerFormat -import ga4gh.dos.schema +import ga4gh.drs.schema DEFAULT_CONFIG = { 'validate_requests': True, @@ -59,8 +59,8 @@ class Client: connects to the service to download the swagger.json and returns a client in the DataObjectService namespace:: - from ga4gh.dos.client import Client - client = Client(url='http://localhost:8000/ga4gh/dos/v1') + from ga4gh.drs.client import Client + client = Client(url='http://localhost:8000/ga4gh/drs/v1') models = client.models c = client.client @@ -80,7 +80,7 @@ class Client: If you want to use the client against a DOS implementation that does not present a ``swagger.json``, then you can use the local schema:: - client = Client(url='http://example.com/dos-base-path/', local=True) + client = Client(url='http://example.com/drs-base-path/', local=True) Note that since this uses the local schema, some operations that are not implemented by the implementation under test may fail. @@ -111,7 +111,7 @@ def __init__(self, url=None, config=DEFAULT_CONFIG, http_client=None, request_he point to the host and base path of the implementation under test:: - Client(url='https://example.com/ga4gh/dos/v1/', local=True) + Client(url='https://example.com/ga4gh/drs/v1/', local=True) If False, the ``url`` parameter should point to a Swagger specification (``swagger.json``). @@ -121,8 +121,8 @@ def __init__(self, url=None, config=DEFAULT_CONFIG, http_client=None, request_he if local: # :meth:`bravado.client.SwaggerClient.from_spec` takes a schema # as a Python dictionary, which we can conveniently expose - # via :func:`ga4gh.dos.schema.present_schema`. - schema = ga4gh.dos.schema.present_schema() + # via :func:`ga4gh.drs.schema.present_schema`. + schema = ga4gh.drs.schema.present_schema() # Set schema['host'] and schema['basePath'] to the provided # values if specified, otherwise leave them as they are @@ -140,7 +140,7 @@ def __init__(self, url=None, config=DEFAULT_CONFIG, http_client=None, request_he config=config, http_client=http_client, request_headers=request_headers) - self.client = self.models.DataObjectService + self.client = self.models.DataRepositoryService @classmethod def config(cls, url, http_client=None, request_headers=None): diff --git a/python/ga4gh/dos/controllers.py b/python/ga4gh/drs/controllers.py similarity index 65% rename from python/ga4gh/dos/controllers.py rename to python/ga4gh/drs/controllers.py index 34db042bc..a8c20b4a9 100644 --- a/python/ga4gh/dos/controllers.py +++ b/python/ga4gh/drs/controllers.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- """ -Data Object Service Controller Functions +Data Repository Service Controller Functions These controller functions for the demo server implement an opinionated version -of DOS by providing uuid's to newly create objects, and using timestamp +of DRS by providing uuid's to newly create objects, and using timestamp versions. -Initializes an in-memory dictionary for storing Data Objects. +Initializes an in-memory dictionary for storing Objects. """ import uuid import datetime @@ -16,8 +16,8 @@ DEFAULT_PAGE_SIZE = 100 # Our in memory registry -data_objects = {} -data_bundles = {} +objects = {} +bundles = {} # Application logic @@ -32,17 +32,17 @@ def now(): def get_most_recent(key): """ - Gets the most recent Data Object for a key. + Gets the most recent Object for a key. :param key: :return: """ max = {'updated': '01-01-1965 00:00:00Z'} - if key not in data_objects: - raise KeyError("Data object not found!") - for version in data_objects[key].keys(): - data_object = data_objects[key][version] - if parse(data_object['updated']) > parse(max['updated']): - max = data_object + if key not in objects: + raise KeyError("object not found!") + for version in objects[key].keys(): + object = objects[key][version] + if parse(object['updated']) > parse(max['updated']): + max = object return max @@ -55,22 +55,22 @@ def get_most_recent_bundle(key): :return: """ max = {'updated': '01-01-1965 00:00:00Z'} - for version in data_bundles[key].keys(): - data_bundle = data_bundles[key][version] - if parse(data_bundle['updated']) > parse(max['updated']): - max = data_bundle + for version in bundles[key].keys(): + bundle = bundles[key][version] + if parse(bundle['updated']) > parse(max['updated']): + max = bundle return max -def filter_data_objects(predicate): +def filter_objects(predicate): """ Filters data objects according to a function that acts on each item returning either True or False per item. """ - return [get_most_recent(x[0]) for x in filter(predicate, data_objects.items())] + return [get_most_recent(x[0]) for x in filter(predicate, objects.items())] -def filter_data_bundles(predicate): +def filter_bundles(predicate): """ Filters data bundles according to a function that acts on each item returning either True or False per item. @@ -79,7 +79,7 @@ def filter_data_bundles(predicate): """ return [ get_most_recent_bundle(x[0]) for x in filter( - predicate, data_bundles.items())] + predicate, bundles.items())] def add_created_timestamps(doc): @@ -102,8 +102,8 @@ def add_updated_timestamps(doc): stores = { - 'data_objects': data_objects, - 'data_bundles': data_bundles + 'objects': objects, + 'bundles': bundles } @@ -135,7 +135,7 @@ def create(body, key): # Data Object Controllers -def CreateDataObject(**kwargs): +def CreateObject(**kwargs): """ Creates a new Data Object by issuing an identifier if it is not provided. @@ -144,94 +144,94 @@ def CreateDataObject(**kwargs): :return: """ # TODO Safely create - body = kwargs['body']['data_object'] - doc = create(body, 'data_objects') - return({"data_object_id": doc['id']}, 200) + body = kwargs['body']['object'] + doc = create(body, 'objects') + return({"object_id": doc['id']}, 200) -def GetDataObject(**kwargs): +def GetObject(**kwargs): """ - Get a Data Object by data_object_id. + Get a Data Object by object_id. :param kwargs: :return: """ - data_object_id = kwargs['data_object_id'] + object_id = kwargs['object_id'] version = kwargs.get('version', None) # Implementation detail, this server uses integer version numbers. # Get the Data Object from our dictionary - data_object_key = data_objects.get(data_object_id, None) - if data_object_key and not version: - data_object = get_most_recent(data_object_id) - return({"data_object": data_object}, 200) - elif data_object_key and data_objects[data_object_id].get(version, None): - data_object = data_objects[data_object_id][version] - return ({"data_object": data_object}, 200) + object_key = objects.get(object_id, None) + if object_key and not version: + object = get_most_recent(object_id) + return({"object": object}, 200) + elif object_key and objects[object_id].get(version, None): + object = objects[object_id][version] + return ({"object": object}, 200) else: return({'msg': "The requested Data " "Object wasn't found", 'status_code': 404}, 404) -def GetDataObjectVersions(**kwargs): +def GetObjectVersions(**kwargs): """ Returns all versions of a Data Object. :param kwargs: :return: """ - data_object_id = kwargs['data_object_id'] + object_id = kwargs['object_id'] # Implementation detail, this server uses integer version numbers. # Get the Data Object from our dictionary - data_object_versions_dict = data_objects.get(data_object_id, None) - data_object_versions = [x[1] for x in data_object_versions_dict.items()] - if data_object_versions: - return({"data_objects": data_object_versions}, 200) + object_versions_dict = objects.get(object_id, None) + object_versions = [x[1] for x in object_versions_dict.items()] + if object_versions: + return({"objects": object_versions}, 200) else: return({'msg': "The requested Data " "Object wasn't found", 'status_code': 404}, 404) -def UpdateDataObject(**kwargs): +def UpdateObject(**kwargs): """ Update a Data Object by creating a new version. :param kwargs: :return: """ - data_object_id = kwargs['data_object_id'] - body = kwargs['body']['data_object'] + object_id = kwargs['object_id'] + body = kwargs['body']['object'] # Check to make sure we are updating an existing document. try: - old_data_object = get_most_recent(data_object_id) + old_object = get_most_recent(object_id) except KeyError: return "Data object not found", 404 # Upsert the new body in place of the old document doc = add_updated_timestamps(body) - doc['created'] = old_data_object['created'] + doc['created'] = old_object['created'] # We need to safely set the version if they provided one that # collides we'll pad it. If they provided a good one, we will # accept it. If they don't provide one, we'll give one. new_version = doc.get('version', None) - if not new_version or new_version in data_objects[data_object_id].keys(): + if not new_version or new_version in objects[object_id].keys(): doc['version'] = now() - doc['id'] = old_data_object['id'] - data_objects[data_object_id][doc['version']] = doc - return({"data_object_id": data_object_id}, 200) + doc['id'] = old_object['id'] + objects[object_id][doc['version']] = doc + return({"object_id": object_id}, 200) -def DeleteDataObject(**kwargs): +def DeleteObject(**kwargs): """ - Delete a Data Object by data_object_id. + Delete a Data Object by object_id. :param kwargs: :return: """ - data_object_id = kwargs['data_object_id'] - del data_objects[data_object_id] - return({"data_object_id": data_object_id}, 200) + object_id = kwargs['object_id'] + del objects[object_id] + return({"object_id": object_id}, 200) -def ListDataObjects(**kwargs): +def ListObjects(**kwargs): """ - Returns a list of Data Objects matching a ListDataObjectsRequest. + Returns a list of Data Objects matching a ListObjectsRequest. :param kwargs: alias, url, checksum, checksum_type, page_size, page_token :return: @@ -262,7 +262,7 @@ def filterer(item): return True # Lazy since we're in memory - filtered = filter_data_objects(filterer) + filtered = filter_objects(filterer) page_size = int(kwargs.get('page_size', DEFAULT_PAGE_SIZE)) # We'll page if there's a provided token or if we have too many # objects. @@ -275,54 +275,54 @@ def filterer(item): # If there is more than one page left of results next_page_token = int(kwargs.get('page_token', 0)) + 1 return ( - {"data_objects": page, + {"objects": page, "next_page_token": str(next_page_token)}, 200) else: - return ({"data_objects": page}, 200) + return ({"objects": page}, 200) else: page = filtered - return({"data_objects": page}, 200) + return({"objects": page}, 200) # Data Bundle Controllers -def CreateDataBundle(**kwargs): +def CreateBundle(**kwargs): """ Create a Data Bundle, issuing a new identifier if one is not provided. :param kwargs: :return: """ - body = kwargs['body']['data_bundle'] - doc = create(body, 'data_bundles') - return({"data_bundle_id": doc['id']}, 200) + body = kwargs['body']['bundle'] + doc = create(body, 'bundles') + return({"bundle_id": doc['id']}, 200) -def GetDataBundle(**kwargs): +def GetBundle(**kwargs): """ Get a Data Bundle by identifier. :param kwargs: :return: """ - data_bundle_id = kwargs['data_bundle_id'] + bundle_id = kwargs['bundle_id'] version = kwargs.get('version', None) # Implementation detail, this server uses integer version numbers. # Get the Data Object from our dictionary - data_bundle_key = data_bundles.get(data_bundle_id, None) - if data_bundle_key and not version: - data_bundle = get_most_recent_bundle(data_bundle_id) - return({"data_bundle": data_bundle}, 200) - elif data_bundle_key and data_objects[data_bundle_id].get(version, None): - data_bundle = data_bundles[data_bundle_id][version] - return ({"data_bundle": data_bundle}, 200) + bundle_key = bundles.get(bundle_id, None) + if bundle_key and not version: + bundle = get_most_recent_bundle(bundle_id) + return({"bundle": bundle}, 200) + elif bundle_key and objects[bundle_id].get(version, None): + bundle = bundles[bundle_id][version] + return ({"bundle": bundle}, 200) else: return({'msg': "The requested Data " "Bundle wasn't found", 'status_code': 404}, 404) -def UpdateDataBundle(**kwargs): +def UpdateBundle(**kwargs): """ Updates a Data Bundle to include new metadata by upserting the new bundle. @@ -330,59 +330,59 @@ def UpdateDataBundle(**kwargs): :param kwargs: :return: """ - data_bundle_id = kwargs['data_bundle_id'] - body = kwargs['body']['data_bundle'] + bundle_id = kwargs['bundle_id'] + body = kwargs['body']['bundle'] # Check to make sure we are updating an existing document. - old_data_bundle = get_most_recent_bundle(data_bundle_id) + old_bundle = get_most_recent_bundle(bundle_id) # Upsert the new body in place of the old document doc = add_updated_timestamps(body) - doc['created'] = old_data_bundle['created'] + doc['created'] = old_bundle['created'] # We need to safely set the version if they provided one that # collides we'll pad it. If they provided a good one, we will # accept it. If they don't provide one, we'll give one. new_version = doc.get('version', None) - if not new_version or new_version in data_bundles[data_bundle_id].keys(): + if not new_version or new_version in bundles[bundle_id].keys(): doc['version'] = now() - doc['id'] = old_data_bundle['id'] - data_bundles[data_bundle_id][doc['version']] = doc - return({"data_bundle_id": data_bundle_id}, 200) + doc['id'] = old_bundle['id'] + bundles[bundle_id][doc['version']] = doc + return({"bundle_id": bundle_id}, 200) -def GetDataBundleVersions(**kwargs): +def GetBundleVersions(**kwargs): """ Get all versions of a Data Bundle. :param kwargs: :return: """ - data_bundle_id = kwargs['data_bundle_id'] - data_bundle_versions_dict = data_bundles.get(data_bundle_id, None) - data_bundle_versions = [x[1] for x in data_bundle_versions_dict.items()] - if data_bundle_versions: - return({"data_bundles": data_bundle_versions}, 200) + bundle_id = kwargs['bundle_id'] + bundle_versions_dict = bundles.get(bundle_id, None) + bundle_versions = [x[1] for x in bundle_versions_dict.items()] + if bundle_versions: + return({"bundles": bundle_versions}, 200) else: return({'msg': "The requested Data " "Bundle wasn't found", 'status_code': 404}, 404) -def DeleteDataBundle(**kwargs): +def DeleteBundle(**kwargs): """ Deletes a Data Bundle by ID. :param kwargs: :return: """ - data_bundle_id = kwargs['data_bundle_id'] - del data_bundles[data_bundle_id] + bundle_id = kwargs['bundle_id'] + del bundles[bundle_id] return(kwargs, 200) -def ListDataBundles(**kwargs): +def ListBundles(**kwargs): """ - Takes a ListDataBundles request and returns the bundles that match + Takes a ListBundles request and returns the bundles that match that request. Possible kwargs: alias, url, checksum, checksum_type, page_size, page_token - :param kwargs: ListDataBundles request. + :param kwargs: ListBundles request. :return: """ def filterer(item): @@ -406,7 +406,7 @@ def filterer(item): return False return True # Lazy since we're in memory - filtered = filter_data_bundles(filterer) + filtered = filter_bundles(filterer) page_size = int(kwargs.get('page_size', DEFAULT_PAGE_SIZE)) # We'll page if there's a provided token or if we have too many # objects. @@ -419,15 +419,15 @@ def filterer(item): # If there is more than one page left of results next_page_token = int(kwargs.get('page_token', 0)) + 1 return ( - {"data_bundles": page, + {"bundles": page, "next_page_token": str(next_page_token)}, 200) else: - return ({"data_bundles": page}, 200) + return ({"bundles": page}, 200) else: page = filtered - return({"data_bundles": page}, 200) + return({"bundles": page}, 200) def GetServiceInfo(**kwargs): - import ga4gh.dos.schema - return ga4gh.dos.schema.present_schema()['info'], 200 + import ga4gh.drs.schema + return ga4gh.drs.schema.present_schema()['info'], 200 diff --git a/python/ga4gh/drs/data_repository_service.swagger.yaml b/python/ga4gh/drs/data_repository_service.swagger.yaml new file mode 120000 index 000000000..9d18830c6 --- /dev/null +++ b/python/ga4gh/drs/data_repository_service.swagger.yaml @@ -0,0 +1 @@ +../../../openapi/data_repository_service.swagger.yaml \ No newline at end of file diff --git a/python/ga4gh/dos/schema.py b/python/ga4gh/drs/schema.py similarity index 93% rename from python/ga4gh/dos/schema.py rename to python/ga4gh/drs/schema.py index 01db1a52f..fbde3b28c 100644 --- a/python/ga4gh/dos/schema.py +++ b/python/ga4gh/drs/schema.py @@ -4,7 +4,7 @@ import swagger_spec_validator.common cd = os.path.dirname(os.path.realpath(__file__)) -SWAGGER_PATH = os.path.join(cd, 'data_object_service.swagger.yaml') +SWAGGER_PATH = os.path.join(cd, 'data_repository_service.swagger.yaml') def present_schema(): @@ -25,7 +25,7 @@ def from_chalice_routes(routes, base_path=''): :param str base_path: the base path of the endpoints listed in `routes`. This is only necessary if a base path is manually prepended to each endpoint your service exposes, - e.g. ``@app.route('/ga4gh/dos/v1/dataobjects')``. + e.g. ``@app.route('/ga4gh/drs/v1/dataobjects')``. This string will be stripped from the beginning of each path in the `routes` object if it is present. The schema will be updated with this value. diff --git a/python/ga4gh/dos/server.py b/python/ga4gh/drs/server.py similarity index 71% rename from python/ga4gh/dos/server.py rename to python/ga4gh/drs/server.py index 511316ea8..d04173625 100644 --- a/python/ga4gh/dos/server.py +++ b/python/ga4gh/drs/server.py @@ -1,27 +1,27 @@ # -*- coding: utf-8 -*- """ -DOS Demonstration Server +DRS Demonstration Server -Running this server will start an ephemeral Data Object Service (its registry +Running this server will start an ephemeral Data Repository Service (its registry contents won't be saved after exiting). It uses the connexion module to translate the OpenAPI schema into named controller functions. -These functions are described in :mod:`ga4gh.dos.controllers` and +These functions are described in :mod:`ga4gh.drs.controllers` and are meant to provide a simple implementation of DOS. """ import connexion from flask_cors import CORS # These are imported by name by connexion so we assert it here. -from ga4gh.dos.controllers import * # noqa -from ga4gh.dos.schema import SWAGGER_PATH +from ga4gh.drs.controllers import * # noqa +from ga4gh.drs.schema import SWAGGER_PATH def configure_app(): # The model name has to match what is in # tools/prepare_swagger.sh controller. app = connexion.App( - "ga4gh.dos.server", + "ga4gh.drs.server", swagger_ui=True, swagger_json=True) app.add_api(SWAGGER_PATH) diff --git a/python/ga4gh/dos/test/__init__.py b/python/ga4gh/drs/test/__init__.py similarity index 85% rename from python/ga4gh/dos/test/__init__.py rename to python/ga4gh/drs/test/__init__.py index 2094eb5fb..bee266244 100644 --- a/python/ga4gh/dos/test/__init__.py +++ b/python/ga4gh/drs/test/__init__.py @@ -11,23 +11,23 @@ def test_requires(*operations): """ This is a decorator that identifies what DOS operations a given test case uses (where each DOS operation is named by its `operationId` in - the schema, e.g. ListDataBundles, UpdateDataObject, GetServiceInfo, + the schema, e.g. ListBundles, UpdateObject, GetServiceInfo, etc.) and skips them if the operation is not supported by the implementation under test. For example, given this test setup:: class Test(AbstractComplianceTest): - supports = ['UpdateDataBundles'] + supports = ['UpdateBundles'] - @test_requires('UpdateDataBundles') + @test_requires('UpdateBundles') def test_update_data_bundles(self): - self.dos_request('PUT', '/databundles/1234') + self.drs_request('PUT', '/databundles/1234') - @test_requires('ListDataBundles', 'UpdateDataBundles') + @test_requires('ListBundles', 'UpdateBundles') def test_list_and_update_data_bundles(self): - self.dos_request('GET', '/databundles') - self.dos_request('PUT', '/databundles/1234') + self.drs_request('GET', '/databundles') + self.drs_request('PUT', '/databundles/1234') ``test_update_data_bundles`` would run and ``test_list_and_update_data_bundles`` would be skipped. @@ -46,9 +46,9 @@ def wrapper(self): return decorator -class DataObjectServiceTest(unittest.TestCase): +class DataRepositoryServiceTest(unittest.TestCase): @staticmethod - def generate_data_objects(amount): + def generate_objects(amount): """ Yields a specified number of data objects with random attributes. @@ -101,17 +101,17 @@ def generate_data_objects(amount): } @staticmethod - def generate_data_bundles(amount): + def generate_bundles(amount): """ Yields a specified number of data bundles with random attributes. :param int amount: the amount of data bundles to generate """ - for bdl in DataObjectServiceTest.generate_data_objects(amount): + for bdl in DataRepositoryServiceTest.generate_objects(amount): del bdl['name'] del bdl['size'] del bdl['mime_type'] del bdl['urls'] # See :var:`generate_data_objects.types` above - bdl['data_object_ids'] = [str(uuid.uuid4()), str(uuid.uuid4())] + bdl['object_ids'] = [str(uuid.uuid4()), str(uuid.uuid4())] yield bdl diff --git a/python/ga4gh/dos/test/compliance.py b/python/ga4gh/drs/test/compliance.py similarity index 57% rename from python/ga4gh/dos/test/compliance.py rename to python/ga4gh/drs/test/compliance.py index be5e87914..5a2975ad3 100644 --- a/python/ga4gh/dos/test/compliance.py +++ b/python/ga4gh/drs/test/compliance.py @@ -9,16 +9,16 @@ import urllib import uuid -import ga4gh.dos.schema -from ga4gh.dos.test import DataObjectServiceTest, test_requires +import ga4gh.drs.schema +from ga4gh.drs.test import DataRepositoryServiceTest, test_requires logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) -class AbstractComplianceTest(DataObjectServiceTest): +class AbstractComplianceTest(DataRepositoryServiceTest): """ - This class implements a number of compliance tests for Data Object Service + This class implements a number of compliance tests for Object Service implementations. It is meant to provide a single, standardized test harness to verify that a given DOS implementation acts in a manner consistent with the schema. @@ -37,7 +37,7 @@ class AbstractComplianceTest(DataObjectServiceTest): For a service built using Chalice, you would likely be able to write something similar to this:: - from ga4gh.dos.test.compliance import AbstractComplianceTest + from ga4gh.drs.test.compliance import AbstractComplianceTest from chalice import LocalGateway, Config from my_chalice_app import chalice_app @@ -49,7 +49,7 @@ def setUpClass(cls): @classmethod def _make_request(self, meth, path, headers=None, body=None) headers = headers or {} - r = self.lg.handle_request(method=meth, path='/ga4gh/dos/v1' + path, + r = self.lg.handle_request(method=meth, path='/ga4gh/drs/v1' + path, headers=headers, body=body) return r['body'], r['statusCode'] @@ -60,23 +60,23 @@ def _make_request(self, meth, path, headers=None, body=None) the list of all DOS operations, named by the `operationId` key in the schema:: - supports = ['GetServiceInfo', 'GetDataBundleVersions', - 'CreateDataBundle', 'ListDataBundles', - 'UpdateDataObject', 'GetDataObject', ...] + supports = ['GetServiceInfo', 'GetBundleVersions', + 'CreateBundle', 'ListBundles', + 'UpdateObject', 'GetObject', ...] Adding / removing operations from this list will adjust which tests are run. So, doing something like:: class Test(AbstractComplianceTest): - self.supports = ['ListDataObjects'] + self.supports = ['ListObjects'] - would skip all tests calling UpdateDataBundle, GetDataBundle, - and any other endpoint that is not ListDataObjects. + would skip all tests calling UpdateBundle, GetBundle, + and any other endpoint that is not ListObjects. """ # Populate :var:`supports` with the `operationId` of each DOS endpoint # specified in the schema. supports = [] - for path in ga4gh.dos.schema.present_schema()['paths'].values(): + for path in ga4gh.drs.schema.present_schema()['paths'].values(): for method in path.values(): supports.append(method['operationId']) @@ -86,10 +86,10 @@ def _make_request(cls, meth, path, headers=None, body=None): Method that makes requests to a DOS implementation under test given a method, path, request headers, and a request body. - The provided path is the path provided in the Data Object Service + The provided path is the path provided in the Object Service schema - this means that in your implementation of this method, you might need to prepend the provided path with your ``basePath``, - e.g. ``/ga4gh/dos/v1``. + e.g. ``/ga4gh/drs/v1``. This method should return a tuple of the raw request content as a string and the return code of the request as an int. @@ -97,7 +97,7 @@ def _make_request(cls, meth, path, headers=None, body=None): :param str meth: the HTTP method to use in the request (i.e. GET, PUT, etc.) :param str path: path to make a request to, sans hostname (e.g. - `/databundles`) + `/bundles`) :param dict headers: headers to include with the request :param dict body: data to be included in the request body (serialized as JSON) @@ -108,7 +108,7 @@ def _make_request(cls, meth, path, headers=None, body=None): raise NotImplementedError @classmethod - def dos_request(cls, meth, path, headers=None, body=None, expected_status=200): + def drs_request(cls, meth, path, headers=None, body=None, expected_status=200): """ Wrapper function around :meth:`AbstractComplianceTest._make_request`. Logs the request being made, makes the request with @@ -117,13 +117,13 @@ def dos_request(cls, meth, path, headers=None, body=None, expected_status=200): It is assumed that any request made through this function is a request made to the underlying DOS implementation - e.g., - ``self.dos_request('https://example.com/')`` should be expected + ``self.drs_request('https://example.com/')`` should be expected to fail. :param str meth: the HTTP method to use in the request (i.e. GET, PUT, etc.) :param str path: path to make a request to, sans hostname (e.g. - `/databundles`) + `/bundles`) :param dict headers: headers to include with the request :param dict body: data to be included in the request body (**not** serialized as JSON) @@ -147,10 +147,10 @@ def dos_request(cls, meth, path, headers=None, body=None, expected_status=200): # Check to make sure the return code is what we expect msg = "{meth} {path} returned {status}, expected {expected_status}: {request}" # We could use :meth:`assertEqual` here, but if we do, - # :meth:`dos_request` must be an instance method. Since the only + # :meth:`drs_request` must be an instance method. Since the only # advantage we really lose is a prettier error message, we can # be a little verbose this one time. - # It's preferable that :meth:`dos_request` be defined as a class method + # It's preferable that :meth:`drs_request` be defined as a class method # to allow one-time server setup to be performed in meth:`setUpClass`, # which must necessarily be a class method. if not status == expected_status: @@ -165,8 +165,8 @@ def get_query_url(path, **kwargs): Returns the given path with the provided kwargs concatenated as query parameters, e.g.:: - >>> self.get_query_url('/dataobjects', alias=123) - '/dataobjects?alias=123' + >>> self.get_query_url('/objects', alias=123) + '/objects?alias=123' :param str path: URL path without query parameters :param kwargs: query parameters @@ -174,136 +174,136 @@ def get_query_url(path, **kwargs): """ return path + '?' + urllib.urlencode(kwargs) - def get_random_data_object(self): + def get_random_object(self): """ - Retrieves a 'random' data object by performing a ListDataObjects + Retrieves a 'random' data object by performing a ListObjects request with a large page size then randomly selecting a data object from the response. - As this test utilizes the ListDataObjects operation, be sure to + As this test utilizes the ListObjects operation, be sure to specify that as a test requirement with :func:`test_requires` when using this context manager in a test case. Usage:: - obj, url = self.get_random_data_object() + obj, url = self.get_random_object() :returns: a random data object as a dict and its relative URL - (e.g. '/dataobjects/abcdefg-12345') as a string + (e.g. '/objects/abcdefg-12345') as a string :rtype: tuple """ - r = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=100)) - data_obj = random.choice(r['data_objects']) - url = '/dataobjects/' + data_obj['id'] - return data_obj, url + r = self.drs_request('GET', self.get_query_url('/objects', page_size=100)) + obj = random.choice(r['objects']) + url = '/objects/' + obj['id'] + return obj, url - def get_random_data_bundle(self): + def get_random_bundle(self): """ - Retrieves a 'random' data bundle. Similar to :meth:`get_random_data_object` + Retrieves a 'random' data bundle. Similar to :meth:`get_random_object` but retrieves a data bundle instead. """ - r = self.dos_request('GET', self.get_query_url('/databundles', page_size=100)) - data_bdl = random.choice(r['data_bundles']) - url = '/databundles/' + data_bdl['id'] - return data_bdl, url + r = self.drs_request('GET', self.get_query_url('/bundles', page_size=100)) + bdl = random.choice(r['bundles']) + url = '/bundles/' + bdl['id'] + return bdl, url - # # ListDataObject tests - @test_requires('ListDataObjects') - def test_list_data_objects_simple(self): + # # ListObject tests + @test_requires('ListObjects') + def test_list_objects_simple(self): """ - Smoke test to verify that `GET /dataobjects` returns a response. + Smoke test to verify that `GET /objects` returns a response. """ - r = self.dos_request('GET', '/dataobjects') + r = self.drs_request('GET', '/objects') self.assertTrue(r) - @test_requires('ListDataObjects') - def test_list_data_objects_by_checksum(self): + @test_requires('ListObjects') + def test_list_objects_by_checksum(self): """ - Test that filtering by checksum in ListDataObjects works nicely. + Test that filtering by checksum in ListObjects works nicely. Since we can assume that checksums are unique between data objects, we can test this functionality by selecting a random - data object then using ListDataObjects with a checksum parameter + data object then using ListObjects with a checksum parameter and asserting that only one result is returned and that the result returned is the same as the one queried. """ - obj, _ = self.get_random_data_object() + obj, _ = self.get_random_object() for cs in obj['checksums']: - url = self.get_query_url('/dataobjects', checksum=cs['checksum'], checksum_type=cs['type']) - r = self.dos_request('GET', url) - self.assertEqual(len(r['data_objects']), 1) - self.assertEqual(r['data_objects'][0]['id'], obj['id']) + url = self.get_query_url('/objects', checksum=cs['checksum'], checksum_type=cs['type']) + r = self.drs_request('GET', url) + self.assertEqual(len(r['objects']), 1) + self.assertEqual(r['objects'][0]['id'], obj['id']) - @test_requires('ListDataObjects') - def test_list_data_objects_by_alias(self): + @test_requires('ListObjects') + def test_list_objects_by_alias(self): """ - Tests that filtering by alias in ListDataObjects works. We do - this by selecting a random data object with ListDataObjects - then performing another ListDataObjects query but filtering + Tests that filtering by alias in ListObjects works. We do + this by selecting a random data object with ListObjects + then performing another ListObjects query but filtering by the alias, then checking that every returned object contains the proper aliases. """ - reference_obj, _ = self.get_random_data_object() - url = self.get_query_url('/dataobjects', alias=reference_obj['aliases'][0]) - queried_objs = self.dos_request('GET', url)['data_objects'] + reference_obj, _ = self.get_random_object() + url = self.get_query_url('/objects', alias=reference_obj['aliases'][0]) + queried_objs = self.drs_request('GET', url)['objects'] for queried_obj in queried_objs: self.assertIn(reference_obj['aliases'][0], queried_obj['aliases']) - @test_requires('ListDataObjects') - def test_list_data_objects_with_nonexist_alias(self): + @test_requires('ListObjects') + def test_list_objects_with_nonexist_alias(self): """ Test to ensure that looking up a nonexistent alias returns an empty list. """ alias = str(uuid.uuid1()) # An alias that is unlikely to exist - body = self.dos_request('GET', self.get_query_url('/dataobjects', alias=alias)) - self.assertEqual(len(body['data_objects']), 0) + body = self.drs_request('GET', self.get_query_url('/objects', alias=alias)) + self.assertEqual(len(body['objects']), 0) - @test_requires('ListDataObjects') - def test_list_data_objects_paging(self): + @test_requires('ListObjects') + def test_list_objects_paging(self): """ Demonstrates basic paging features. """ # Test the page_size parameter - r = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=3)) - self.assertEqual(len(r['data_objects']), 3) - r = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=7)) - self.assertEqual(len(r['data_objects']), 7) + r = self.drs_request('GET', self.get_query_url('/objects', page_size=3)) + self.assertEqual(len(r['objects']), 3) + r = self.drs_request('GET', self.get_query_url('/objects', page_size=7)) + self.assertEqual(len(r['objects']), 7) # Next, given that the adjusting page_size works, we can test that paging - # works by making a ListDataObjects request with page_size=2, then making + # works by making a ListObjects request with page_size=2, then making # two requests with page_size=1, and comparing that the results are the same. - both = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=2)) - self.assertEqual(len(both['data_objects']), 2) - first = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=1)) - self.assertEqual(len(first['data_objects']), 1) - second = self.dos_request('GET', self.get_query_url('/dataobjects', page_size=1, + both = self.drs_request('GET', self.get_query_url('/objects', page_size=2)) + self.assertEqual(len(both['objects']), 2) + first = self.drs_request('GET', self.get_query_url('/objects', page_size=1)) + self.assertEqual(len(first['objects']), 1) + second = self.drs_request('GET', self.get_query_url('/objects', page_size=1, page_token=first['next_page_token'])) - self.assertEqual(len(second['data_objects']), 1) - self.assertEqual(first['data_objects'][0], both['data_objects'][0]) - self.assertEqual(second['data_objects'][0], both['data_objects'][1]) + self.assertEqual(len(second['objects']), 1) + self.assertEqual(first['objects'][0], both['objects'][0]) + self.assertEqual(second['objects'][0], both['objects'][1]) - @test_requires('ListDataObjects') - def test_list_data_object_querying(self): + @test_requires('ListObjects') + def test_list_object_querying(self): """ - Tests if ListDataObject handles multiple query parameters correctly. + Tests if ListObject handles multiple query parameters correctly. """ - # ListDataObjects supports querying by checksum, URL, and alias. + # ListObjects supports querying by checksum, URL, and alias. # To test this, let us take a data object with a unique checksum, # URL, and alias: - obj, _ = self.get_random_data_object() + obj, _ = self.get_random_object() def query(expected_results, expected_object=None, **kwargs): """ - Makes a ListDataObject query with parameters specifying + Makes a ListObject query with parameters specifying the checksum, URL, and alias of the ``obj`` data object above. :param int expected_results: the amount of results to expect - from the ListDataObjects request + from the ListObjects request :param dict expected_object: if expected_results is 1, then if only one object is returned from the query, assert that the returned object is this object - :param kwargs: query parameters for the ListDataObjects request + :param kwargs: query parameters for the ListObjects request """ args = { 'url': obj['urls'][0]['url'], @@ -312,16 +312,16 @@ def query(expected_results, expected_object=None, **kwargs): 'checksum_type': obj['checksums'][0]['type'] } args.update(kwargs) - url = self.get_query_url('/dataobjects', **args) - r = self.dos_request('GET', url) - self.assertEqual(len(r['data_objects']), expected_results) + url = self.get_query_url('/objects', **args) + r = self.drs_request('GET', url) + self.assertEqual(len(r['objects']), expected_results) if expected_object and expected_results == 1: - self.assertEqual(expected_object, r['data_objects'][0]) + self.assertEqual(expected_object, r['objects'][0]) rand = str(uuid.uuid1()) # If the data object we selected has a unique checksum, alias, and URL, - # then when we make a ListDataObjects requesting all three of those + # then when we make a ListObjects requesting all three of those # parameters, we should receive exactly one data object back - the one # we chose above. query(expected_results=1, expected_object=obj) @@ -338,99 +338,99 @@ def query(expected_results, expected_object=None, **kwargs): query(expected_results=0, alias=rand) query(expected_results=0, checksum=rand) - # # GetDataObject tests - @test_requires('ListDataObjects', 'GetDataObject') - def test_get_data_object(self): + # # GetObject tests + @test_requires('ListObjects', 'GetObject') + def test_get_object(self): """ - Lists Data Objects and then gets one by ID. + Lists Objects and then gets one by ID. """ - data_obj_1, url = self.get_random_data_object() - data_obj_2 = self.dos_request('GET', url)['data_object'] - # Test that the data object randomly chosen via `/dataobjects` - # can be retrieved via `/dataobjects/{data_object_id}` - self.assertEqual(data_obj_1, data_obj_2) + obj_1, url = self.get_random_object() + obj_2 = self.drs_request('GET', url)['object'] + # Test that the data object randomly chosen via `/objects` + # can be retrieved via `/objects/{object_id}` + self.assertEqual(obj_1, obj_2) - @test_requires('ListDataBundles', 'GetDataBundle') - def test_get_data_bundle(self): + @test_requires('ListBundles', 'GetBundle') + def test_get_bundle(self): """ Lists data bundles and then gets one by ID. """ - data_bdl_1, url = self.get_random_data_bundle() - data_bdl_2 = self.dos_request('GET', url)['data_bundle'] - # Test that the data object randomly chosen via `/databundles` - # can be retrieved via `/databundles/{data_bundle_id}` - self.assertEqual(data_bdl_1, data_bdl_2) + bdl_1, url = self.get_random_bundle() + bdl_2 = self.drs_request('GET', url)['bundle'] + # Test that the data object randomly chosen via `/bundles` + # can be retrieved via `/bundles/{bundle_id}` + self.assertEqual(bdl_1, bdl_2) - @test_requires('ListDataBundles') - def test_list_data_bundles_with_nonexist_alias(self): + @test_requires('ListBundles') + def test_list_bundles_with_nonexist_alias(self): """ Test to ensure that searching for data bundles with a nonexistent alias returns an empty list. """ alias = str(uuid.uuid1()) # An alias that is unlikely to exist - body = self.dos_request('GET', self.get_query_url('/databundles', alias=alias)) - self.assertEqual(len(body['data_bundles']), 0) + body = self.drs_request('GET', self.get_query_url('/bundles', alias=alias)) + self.assertEqual(len(body['bundles']), 0) - @test_requires('GetDataBundle') - def test_get_nonexistent_data_bundle(self): + @test_requires('GetBundle') + def test_get_nonexistent_bundle(self): """ Verifies that requesting a data bundle that doesn't exist results in HTTP 404 """ - bdl, url = self.get_random_data_bundle() - self.dos_request('GET', '/databundles/NonexistentDataBundle', - body={'data_bundle': bdl}, expected_status=404) + bdl, url = self.get_random_bundle() + self.drs_request('GET', '/bundles/NonexistentBundle', + body={'bundle': bdl}, expected_status=404) - @test_requires('UpdateDataObject') - def test_update_nonexistent_data_object(self): + @test_requires('UpdateObject') + def test_update_nonexistent_object(self): """ Verifies that trying to update a data object that doesn't exist returns HTTP 404 """ - obj, url = self.get_random_data_object() - self.dos_request('PUT', '/dataobjects/NonexistentObjID', expected_status=404, - body={'data_object': obj, 'data_object_id': obj['id']}) + obj, url = self.get_random_object() + self.drs_request('PUT', '/objects/NonexistentObjID', expected_status=404, + body={'object': obj, 'object_id': obj['id']}) - @test_requires('GetDataObject', 'ListDataObjects') - def test_update_data_object_with_bad_request(self): + @test_requires('GetObject', 'ListObjects') + def test_update_object_with_bad_request(self): """ Verifies that attempting to update a data object with a malformed request returns HTTP 400 """ - _, url = self.get_random_data_object() - self.dos_request('PUT', url, expected_status=400, body={'abc': ''}) + _, url = self.get_random_object() + self.drs_request('PUT', url, expected_status=400, body={'abc': ''}) - @test_requires('ListDataObjects', 'UpdateDataObject', 'GetDataObject') + @test_requires('ListObjects', 'UpdateObject', 'GetObject') def test_alias_update(self): """ Demonstrates updating a data object with a given alias. """ alias = 'daltest:' + str(uuid.uuid1()) # First, select a "random" object that we can test - data_object, url = self.get_random_data_object() + object, url = self.get_random_object() # Try and update with no changes. - self.dos_request('PUT', url, body={'data_object': data_object}) + self.drs_request('PUT', url, body={'object': object}) # We specify the Content-Type since Chalice looks for it when # deserializing the request body server-side # Test adding an alias (acceptably unique to try # retrieving the object by the alias) - data_object['aliases'].append(alias) + object['aliases'].append(alias) # Try and update, this time with a change. - update_response = self.dos_request('PUT', url, - body={'data_object': data_object}) - self.assertEqual(data_object['id'], update_response['data_object_id']) + update_response = self.drs_request('PUT', url, + body={'object': object}) + self.assertEqual(object['id'], update_response['object_id']) time.sleep(2) # Test and see if the update took place by retrieving the object # and checking its aliases - get_response = self.dos_request('GET', url) - self.assertEqual(update_response['data_object_id'], get_response['data_object']['id']) - self.assertIn(alias, get_response['data_object']['aliases']) + get_response = self.drs_request('GET', url) + self.assertEqual(update_response['object_id'], get_response['object']['id']) + self.assertIn(alias, get_response['object']['aliases']) - # Testing the update again by using a DOS ListDataObjectsRequest + # Testing the update again by using a DOS ListObjectsRequest # to locate the object by its new alias. list_request = { 'alias': alias, @@ -438,44 +438,44 @@ def test_alias_update(self): # we expect only one result. 'page_size': 10 } - list_url = self.get_query_url('/dataobjects', **list_request) - list_response = self.dos_request('GET', list_url) - self.assertEqual(1, len(list_response['data_objects'])) - self.assertIn(alias, list_response['data_objects'][0]['aliases']) + list_url = self.get_query_url('/objects', **list_request) + list_response = self.drs_request('GET', list_url) + self.assertEqual(1, len(list_response['objects'])) + self.assertIn(alias, list_response['objects'][0]['aliases']) # # Tear down and remove the test alias - # params['body']['data_object']['aliases'].remove(alias) - # self.dos_request('PUT', url, **params) + # params['body']['object']['aliases'].remove(alias) + # self.drs_request('PUT', url, **params) - @test_requires('ListDataObjects', 'UpdateDataObject') - def test_full_data_object_update(self): + @test_requires('ListObjects', 'UpdateObject') + def test_full_object_update(self): """ Demonstrates updating multiple fields of a data object at once. This incidentally also tests object conversion. """ # First, select a "random" object that we can test - data_object, url = self.get_random_data_object() + object, url = self.get_random_object() # Make a new data object that is different from the data object we retrieved attributes = { # 'name' and 'description' are optional fields and might not be present - 'name': data_object.get('name', '') + 'test-suffix', - # See DataBiosphere/dos-azul-lambda#87 - # 'description': data_object.get('description', '') + 'Change This', + 'name': object.get('name', '') + 'test-suffix', + # See Biosphere/drs-azul-lambda#87 + # 'description': object.get('description', '') + 'Change This', 'urls': [ {'url': 'https://cgl.genomics.ucsc.edu/'}, - {'url': 'https://github.com/DataBiosphere'} + {'url': 'https://github.com/Biosphere'} ] } - data_object.update(attributes) + object.update(attributes) # Now update the old data object with the new attributes we added - self.dos_request('PUT', url, body={'data_object': data_object}) + self.drs_request('PUT', url, body={'object': object}) time.sleep(2) # Give the server some time to catch up # Test and see if the update took place - get_response = self.dos_request('GET', url)['data_object'] + get_response = self.drs_request('GET', url)['object'] # We only compare the change attributes as DOS implementations # can update timestamps server-side - self.assertEqual(get_response['name'], data_object['name']) - self.assertEqual(get_response['urls'], data_object['urls']) + self.assertEqual(get_response['name'], object['name']) + self.assertEqual(get_response['urls'], object['urls']) diff --git a/python/test/test_compliance.py b/python/test/test_compliance.py index 1e55b37a5..3aad4c052 100644 --- a/python/test/test_compliance.py +++ b/python/test/test_compliance.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import werkzeug.datastructures -import ga4gh.dos.server -from ga4gh.dos.test.compliance import AbstractComplianceTest +import ga4gh.drs.server +from ga4gh.drs.test.compliance import AbstractComplianceTest # We set this so that `nose` doesn't try and run the abstract tests. # (If that happens, all of the tests fail since :meth:`_make_request` @@ -12,8 +12,8 @@ class TestCompliance(AbstractComplianceTest): """ - Runs the :class:`~ga4gh.dos.test.compliance.AbstractComplianceTest` - against :mod:`ga4gh.dos.server`. + Runs the :class:`~ga4gh.drs.test.compliance.AbstractComplianceTest` + against :mod:`ga4gh.drs.server`. """ # See above - if we don't explicitly set :var:`__test__` here, # this test suite won't run as we adjust the value of the variable @@ -22,24 +22,24 @@ class TestCompliance(AbstractComplianceTest): @classmethod def setUpClass(cls): - # :mod:`ga4gh.dos.server` is built on top of :mod:`connexion`, + # :mod:`ga4gh.drs.server` is built on top of :mod:`connexion`, # which is built on top of :mod:`flask`, which is built on top # of :mod:`werkzeug`, which means we can do some cool nice # things with testing. - app = ga4gh.dos.server.configure_app().app + app = ga4gh.drs.server.configure_app().app cls.client = app.test_client() # Populate our new server with some test data objects and bundles - for data_obj in cls.generate_data_objects(250): - cls.dos_request('POST', '/dataobjects', body={'data_object': data_obj}) - for data_bdl in cls.generate_data_bundles(250): - cls.dos_request('POST', '/databundles', body={'data_bundle': data_bdl}) + for data_obj in cls.generate_objects(250): + cls.drs_request('POST', '/objects', body={'object': data_obj}) + for data_bdl in cls.generate_bundles(250): + cls.drs_request('POST', '/bundles', body={'bundle': data_bdl}) @classmethod def _make_request(cls, meth, path, headers=None, body=None): # For documentation on this function call, see # :class:`werkzeug.test.EnvironBuilder` and :meth:`werkzeug.test.Client.get`. headers = werkzeug.datastructures.Headers(headers) - r = cls.client.open(method=meth, path='/ga4gh/dos/v1' + path, + r = cls.client.open(method=meth, path='/ga4gh/drs/v1' + path, data=body, headers=headers) return r.data, r.status_code diff --git a/python/test/test_package.py b/python/test/test_package.py index 55e9b750d..a038f5384 100644 --- a/python/test/test_package.py +++ b/python/test/test_package.py @@ -13,9 +13,9 @@ class TestPackage(unittest.TestCase): def setUpClass(cls): cwd = os.path.dirname(os.path.realpath(__file__)) spec_dir = os.path.join(cwd, '../../openapi') - cls.swagger_path = os.path.join(spec_dir, 'data_object_service.swagger.yaml') - cls.smartapi_path = os.path.join(spec_dir, 'data_object_service.smartapi.yaml') - cls.openapi_path = os.path.join(spec_dir, 'data_object_service.openapi.yaml') + cls.swagger_path = os.path.join(spec_dir, 'data_repository_service.swagger.yaml') + cls.smartapi_path = os.path.join(spec_dir, 'data_repository_service.smartapi.yaml') + cls.openapi_path = os.path.join(spec_dir, 'data_repository_service.openapi.yaml') # The :func:`unittest.skipUnless` calls depend on class variables, # which means that we can't decorate the test cases conventionally @@ -26,7 +26,7 @@ def setUpClass(cls): cls.test_smartapi_schema_validity = smartapi_dec(cls.test_smartapi_schema_validity) def test_version_consensus(self): - from ga4gh.dos import __version__ + from ga4gh.drs import __version__ with open(self.swagger_path, 'r') as f: spec_version = yaml.safe_load(f)['info']['version'] assert __version__ == spec_version @@ -76,31 +76,31 @@ def test_smartapi_schema_validity(self): def test_chalice_schema_generation(self): """ - Validate that the schema generated by :func:`ga4gh.dos.schema.from_chalice_routes` + Validate that the schema generated by :func:`ga4gh.drs.schema.from_chalice_routes` is valid. """ - from ga4gh.dos.schema import from_chalice_routes + from ga4gh.drs.schema import from_chalice_routes routes = { # Test a path that does not exist in the schema '/PathThatDoesNotExist': {'GET': None}, # Test a valid path with a nonexistent method - '/ga4gh/dos/v1/databundles': {'MethodThatDoesNotExist': None}, + '/ga4gh/drs/v1/bundles': {'MethodThatDoesNotExist': None}, # Test a path with a different case than what is defined in the schema - '/GA4GH/DOS/V1/DATABUNDLES/{data_bundle_id}': {'GET': None}, + '/GA4GH/DRS/V1/BUNDLES/{bundle_id}': {'GET': None}, # Test multiple methods - '/ga4gh/dos/v1/dataobjects/{data_object_id}': {'GET': None, + '/ga4gh/drs/v1/objects/{object_id}': {'GET': None, 'PUT': None} } - schema = from_chalice_routes(routes, base_path='/ga4gh/dos/v1') + schema = from_chalice_routes(routes, base_path='/ga4gh/drs/v1') paths = schema['paths'] self.assertNotIn('/PathThatDoesNotExist', paths.keys()) # Test that base path is correctly stripped - self.assertNotIn('/ga4gh/dos/v1/databundles', paths.keys()) - self.assertIn('/databundles/{data_bundle_id}', paths.keys()) - self.assertNotIn('MethodThatDoesNotExist', paths['/databundles'].keys()) - self.assertIn('get', paths['/dataobjects/{data_object_id}'].keys()) - self.assertIn('put', paths['/dataobjects/{data_object_id}'].keys()) - self.assertNotIn('/dataobjects', paths.keys()) + self.assertNotIn('/ga4gh/drs/v1/bundles', paths.keys()) + self.assertIn('/bundles/{bundle_id}', paths.keys()) + self.assertNotIn('MethodThatDoesNotExist', paths['/bundles'].keys()) + self.assertIn('get', paths['/objects/{object_id}'].keys()) + self.assertIn('put', paths['/objects/{object_id}'].keys()) + self.assertNotIn('/objects', paths.keys()) # Make sure that the schema is intact - self.assertIn('200', paths['/dataobjects/{data_object_id}']['get']['responses'].keys()) + self.assertIn('200', paths['/objects/{object_id}']['get']['responses'].keys()) diff --git a/python/test/test_server.py b/python/test/test_server.py index 5986045f3..b550b193b 100644 --- a/python/test/test_server.py +++ b/python/test/test_server.py @@ -6,11 +6,11 @@ import bravado.exception import jsonschema.exceptions -import ga4gh.dos -import ga4gh.dos.test -import ga4gh.dos.client +import ga4gh.drs +import ga4gh.drs.test +import ga4gh.drs.client -SERVER_URL = 'http://localhost:8080/ga4gh/dos/v1' +SERVER_URL = 'http://localhost:8080/ga4gh/drs/v1' logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) logging.captureWarnings(True) @@ -20,13 +20,13 @@ logging.getLogger('swagger_spec_validator.validator20').setLevel(logging.INFO) -class TestServer(ga4gh.dos.test.DataObjectServiceTest): +class TestServer(ga4gh.drs.test.DataRepositoryServiceTest): @classmethod def setUpClass(cls): - cls._server_process = subprocess.Popen(['ga4gh_dos_server'], stdout=subprocess.PIPE, + cls._server_process = subprocess.Popen(['ga4gh_drs_server'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) time.sleep(2) - local_client = ga4gh.dos.client.Client(SERVER_URL) + local_client = ga4gh.drs.client.Client(SERVER_URL) cls._models = local_client.models cls._client = local_client.client @@ -36,49 +36,49 @@ def tearDownClass(cls): cls._server_process.kill() cls._server_process.wait() - def generate_data_bundle(self, **kwargs): + def generate_bundle(self, **kwargs): """ - Generates a DataBundle with bravado. - Same arguments as :meth:`generate_data_object`. + Generates a Bundle with bravado. + Same arguments as :meth:`generate_object`. """ - data_bdl_model = self._models.get_model('DataBundle') - data_bdl = next(self.generate_data_bundles(1)) - data_bdl.update(kwargs) - return data_bdl_model.unmarshal(data_bdl) + bdl_model = self._models.get_model('Bundle') + bdl = next(self.generate_bundles(1)) + bdl.update(kwargs) + return bdl_model.unmarshal(bdl) - def generate_data_object(self, **kwargs): + def generate_object(self, **kwargs): """ - Generates a DataObject with bravado. + Generates a Object with bravado. :param kwargs: fields to set in the generated data object """ - data_obj_model = self._models.get_model('DataObject') - data_obj = next(self.generate_data_objects(1)) - data_obj.update(kwargs) - return data_obj_model.unmarshal(data_obj) + obj_model = self._models.get_model('Object') + obj = next(self.generate_objects(1)) + obj.update(kwargs) + return obj_model.unmarshal(obj) def request(self, operation_id, query={}, **params): """ - Make a request to the DOS server with :class:`ga4gh.dos.client.Client`. + Make a request to the DOS server with :class:`ga4gh.drs.client.Client`. :param str operation_id: the name of the operation ID to call (e.g. - ListDataBundles, DeleteDataObject, etc.) + ListBundles, DeleteObject, etc.) :param dict query: parameters to include in the query / path :param \*\*params: parameters to include in the request body (that would normally be provided to the Request model) :returns: response body of the request as a schema model (e.g. - ListDataBundlesResponse) + ListBundlesResponse) """ request_name = operation_id + 'Request' # These two in particular are special cases as they are the only # models that utilize query parameters - if request_name in ['ListDataBundlesRequest', 'ListDataObjectsRequest']: + if request_name in ['ListBundlesRequest', 'ListObjectsRequest']: params = self._models.get_model(request_name)(**params).marshal() elif request_name in self._models.swagger_spec.definitions: params = {'body': self._models.get_model(request_name)(**params)} params.update(query) return getattr(self._client, operation_id)(**params).result() - def assertSameDataObject(self, data_obj_1, data_obj_2, check_version=True): + def assertSameObject(self, obj_1, obj_2, check_version=True): """ Verifies that the two provided data objects are the same by comparing them key-by-key. @@ -96,86 +96,86 @@ def assertSameDataObject(self, data_obj_1, data_obj_2, check_version=True): ignored = ['created', 'updated'] if not check_version: ignored.append('version') - for k in data_obj_1.__dict__['_Model__dict'].keys(): + for k in obj_1.__dict__['_Model__dict'].keys(): if k in ignored: continue - error = "Mismatch on '%s': %s != %s" % (k, data_obj_1[k], data_obj_2[k]) - self.assertEqual(data_obj_1[k], data_obj_2[k], error) + error = "Mismatch on '%s': %s != %s" % (k, obj_1[k], obj_2[k]) + self.assertEqual(obj_1[k], obj_2[k], error) return True - def assertSameDataBundle(self, *args, **kwargs): + def assertSameBundle(self, *args, **kwargs): """ - Wrapper around :meth:`assertSameDataObject`. Has the exact same + Wrapper around :meth:`assertSameObject`. Has the exact same arguments and functionality, as the method by which data objects and data bundles are compared are similar. This method is provided so that the test code can be semantically correct. """ - return self.assertSameDataObject(*args, **kwargs) + return self.assertSameObject(*args, **kwargs) - def test_create_data_object(self): - """Smoke test to verify functionality of the CreateDataObject endpoint.""" + def test_create_object(self): + """Smoke test to verify functionality of the CreateObject endpoint.""" # First, create a data object. - data_obj = self.generate_data_object() - response = self.request('CreateDataObject', data_object=data_obj) + obj = self.generate_object() + response = self.request('CreateObject', object=obj) # Then, verify that the data object id returned by the server is the # same id that we sent to it. - self.assertEqual(response['data_object_id'], data_obj.id, + self.assertEqual(response['object_id'], obj.id, "Mismatch between data object ID in request and response") # Now that we know that things look fine at the surface level, # verify that we can retrieve the data object by its ID. - response = self.request('GetDataObject', data_object_id=data_obj.id) + response = self.request('GetObject', object_id=obj.id) # Finally, ensure that the returned data object is the same as the # one we sent. - self.assertSameDataObject(data_obj, response.data_object) + self.assertSameObject(obj, response.object) def test_duplicate_checksums(self): """ validate expected behavior of multiple creates of same checksum """ - # Create a data object (:var:`data_obj_1`) and save its checksum + # Create a data object (:var:`obj_1`) and save its checksum # for later. - data_obj_1 = self.generate_data_object() - # There's some bug that causes a RecursionError if :var:`data_obj_1_checksum` - # is passed to :meth:`self._client.ListDataObjects` without first being + obj_1 = self.generate_object() + # There's some bug that causes a RecursionError if :var:`obj_1_checksum` + # is passed to :meth:`self._client.ListObjects` without first being # casted to a string... - data_obj_1_checksum = str(data_obj_1.checksums[0].checksum) - data_obj_1_checksum_type = str(data_obj_1.checksums[0].type) - self.request('CreateDataObject', data_object=data_obj_1) - # Create another data object (:var:`data_obj_2`) but with the - # same checksum as :var:`data_obj_1`. - data_obj_2 = self.generate_data_object() - data_obj_2.checksums[0].checksum = data_obj_1_checksum - data_obj_2.checksums[0].type = data_obj_1_checksum_type - self.request('CreateDataObject', data_object=data_obj_2) + obj_1_checksum = str(obj_1.checksums[0].checksum) + obj_1_checksum_type = str(obj_1.checksums[0].type) + self.request('CreateObject', object=obj_1) + # Create another data object (:var:`obj_2`) but with the + # same checksum as :var:`obj_1`. + obj_2 = self.generate_object() + obj_2.checksums[0].checksum = obj_1_checksum + obj_2.checksums[0].type = obj_1_checksum_type + self.request('CreateObject', object=obj_2) # There are now two data objects with the same checksum on the - # server. We can retrieve them using a ListDataObjects request. + # server. We can retrieve them using a ListObjects request. # Even though we're only expecting two data objects to be # returned by this query, we specify a high page_size - that way, # if we receive more than two data objects in the response, we # know something is up. - response = self.request('ListDataObjects', page_size=100, - checksum=data_obj_1_checksum, - checksum_type=data_obj_1_checksum_type) - self.assertEqual(len(response.data_objects), 2) + response = self.request('ListObjects', page_size=100, + checksum=obj_1_checksum, + checksum_type=obj_1_checksum_type) + self.assertEqual(len(response.objects), 2) # Finally, confirm that the server returned both data objects # that we created, and that they're all intact. try: - self.assertSameDataObject(data_obj_1, response.data_objects[0]) + self.assertSameObject(obj_1, response.objects[0]) except AssertionError: - self.assertSameDataObject(data_obj_2, response.data_objects[0]) + self.assertSameObject(obj_2, response.objects[0]) try: - self.assertSameDataObject(data_obj_2, response.data_objects[1]) + self.assertSameObject(obj_2, response.objects[1]) except AssertionError: - self.assertSameDataObject(data_obj_1, response.data_objects[1]) - - def test_update_data_object(self): - # Create a data object using CreateDataObject, then retrieve it - # using GetDataObject to make sure it exists. - old_data_obj = self.generate_data_object() - self.request('CreateDataObject', data_object=old_data_obj) - response = self.request('GetDataObject', data_object_id=old_data_obj.id) - server_data_obj = response.data_object - self.assertSameDataObject(old_data_obj, server_data_obj) + self.assertSameObject(obj_1, response.objects[1]) + + def test_update_object(self): + # Create a data object using CreateObject, then retrieve it + # using GetObject to make sure it exists. + old_obj = self.generate_object() + self.request('CreateObject', object=old_obj) + response = self.request('GetObject', object_id=old_obj.id) + server_obj = response.object + self.assertSameObject(old_obj, server_obj) # Now that we have a shiny new data object, let's update all of # its attributes - we can do this quickly by generating a new # data object and updating all of the attributes of the old object @@ -183,184 +183,209 @@ def test_update_data_object(self): # need to be careful that the id of the data object we send in the # request body is the same as the original data object, or the data # object's id will be changed, rendering this exercise moot.) - new_data_obj = self.generate_data_object(id=old_data_obj.id) - self.request('UpdateDataObject', data_object=new_data_obj, - query={'data_object_id': old_data_obj.id}) - response = self.request('GetDataObject', data_object_id=old_data_obj.id) - server_data_obj = response.data_object - # The data object should now be updated. If we use the GetDataObject + new_obj = self.generate_object(id=old_obj.id) + self.request('UpdateObject', object=new_obj, + query={'object_id': old_obj.id}) + response = self.request('GetObject', object_id=old_obj.id) + server_obj = response.object + # The data object should now be updated. If we use the GetObject # endpoint to retrieve the updated data object from the server, # it should be the same as the one we have in memory. - response = self.request('GetDataObject', data_object_id=old_data_obj.id) - server_data_obj = response.data_object - self.assertSameDataObject(server_data_obj, new_data_obj, check_version=False) + response = self.request('GetObject', object_id=old_obj.id) + server_obj = response.object + self.assertSameObject(server_obj, new_obj, check_version=False) # TODO: DOS server currently does not support updating a data object id but # it should. - # def test_update_data_object_id(self): + # def test_update_object_id(self): # """ # Test that updating a data object's id works correctly # """ # # Create a data object - # data_obj_1 = self.generate_data_object() - # self.request('CreateDataObject', data_object=data_obj_1) + # obj_1 = self.generate_object() + # self.request('CreateObject', object=obj_1) # # Confirm that the data object we just created exists server-side - # response = self.request('GetDataObject', data_object_id=data_obj_1.id) - # self.assertSameDataObject(data_obj_1, response.data_object) + # response = self.request('GetObject', object_id=obj_1.id) + # self.assertSameObject(obj_1, response.object) # # Update the id of the data object we created to something different - # data_obj_2 = response.data_object - # data_obj_2.id = 'new-data-object-id' - # self.request('UpdateDataObject', data_object=data_obj_2, - # query={'data_object_id': data_obj_1.id}) + # obj_2 = response.object + # obj_2.id = 'new-data-object-id' + # self.request('UpdateObject', object=obj_2, + # query={'object_id': obj_1.id}) # # Try to retrieve the data object by its new id and its old id # # The former should succeed: - # response = self.request('GetDataObject', data_object_id=data_obj_2.id) - # self.assertSameDataObject(response.data_object, data_obj_2) + # response = self.request('GetObject', object_id=obj_2.id) + # self.assertSameObject(response.object, obj_2) # # And the latter should fail: # with self.assertRaises(bravado.exception.HTTPNotFound) as ctx: - # self.request('GetDataObject', data_object_id=data_obj_1.id) + # self.request('GetObject', object_id=obj_1.id) # self.assertEqual(ctx.exception.status_code, 404) - def test_data_object_long_serialization(self): + def test_object_long_serialization(self): # Specify `size` as an int gte 2^31 - 1 (int32 / Javascript's # maximum int size) but lte 2^63 - 1 (int64 / maximum int size # in schema) to test json serialization/casting (see #63) - data_obj = self.generate_data_object(size=2**63 - 1) - self.request('CreateDataObject', data_object=data_obj) + obj = self.generate_object(size=2**63 - 1) + self.request('CreateObject', object=obj) # Now check to make sure that nothing was lost in transit - retrieved_obj = self.request('GetDataObject', data_object_id=data_obj.id).data_object - self.assertEqual(data_obj.size, retrieved_obj.size) + retrieved_obj = self.request('GetObject', object_id=obj.id).object + self.assertEqual(obj.size, retrieved_obj.size) - def test_delete_data_object(self): + def test_delete_object(self): # Create a data object - data_obj = self.generate_data_object() - self.request('CreateDataObject', data_object=data_obj) + obj = self.generate_object() + self.request('CreateObject', object=obj) # Make sure it exists! - response = self.request('GetDataObject', data_object_id=data_obj.id) - self.assertSameDataObject(data_obj, response.data_object) + response = self.request('GetObject', object_id=obj.id) + self.assertSameObject(obj, response.object) # Begone foul data object - self.request('DeleteDataObject', data_object_id=data_obj.id) + self.request('DeleteObject', object_id=obj.id) # Make sure it's gone with self.assertRaises(bravado.exception.HTTPNotFound) as ctx: - self.request('GetDataObject', data_object_id=data_obj.id) + self.request('GetObject', object_id=obj.id) self.assertEqual(ctx.exception.status_code, 404) - def test_list_data_object_querying(self): - data_obj = self.generate_data_object() - self.request('CreateDataObject', data_object=data_obj) + def test_list_object_querying(self): + obj = self.generate_object() + self.request('CreateObject', object=obj) # We should be able to retrieve the data object by a unique alias, ... - results = self.request('ListDataObjects', query={'alias': data_obj.aliases[0]}) - self.assertEqual(len(results['data_objects']), 1) - results = self.request('ListDataObjects', # by a unique checksum... - query={'checksum': data_obj.checksums[0].checksum, - 'checksum_type': data_obj.checksums[0].type}) - self.assertEqual(len(results['data_objects']), 1) - results = self.request('ListDataObjects', # and by a unique url.. - query={'url': data_obj.urls[0].url}) - self.assertEqual(len(results['data_objects']), 1) - # The more advanced ListDataObjects testing is left to :meth:`ComplianceTest.test_list_data_object_querying`. - - def test_data_object_versions(self): - data_obj = self.generate_data_object() - self.request('CreateDataObject', data_object=data_obj) - # Make a GetDataObjectVersions request to see retrieve all the + results = self.request('ListObjects', query={'alias': obj.aliases[0]}) + self.assertEqual(len(results['objects']), 1) + results = self.request('ListObjects', # by a unique checksum... + query={'checksum': obj.checksums[0].checksum, + 'checksum_type': obj.checksums[0].type}) + self.assertEqual(len(results['objects']), 1) + results = self.request('ListObjects', # and by a unique url.. + query={'url': obj.urls[0].url}) + self.assertEqual(len(results['objects']), 1) + # The more advanced ListObjects testing is left to :meth:`ComplianceTest.test_list_object_querying`. + + def test_object_versions(self): + obj = self.generate_object() + self.request('CreateObject', object=obj) + # Make a GetObjectVersions request to see retrieve all the # stored versions of this data object. As we've just created it, # there should onlty be one version. - r = self.request('GetDataObjectVersions', data_object_id=data_obj.id) - self.assertEqual(len(r['data_objects']), 1) - data_obj.version = 'great-version' # Now make a new version and upload it - data_obj.name = 'greatest-change' # technically unnecessary, but just in case - self.request('UpdateDataObject', data_object=data_obj, - query={'data_object_id': data_obj.id}) - # Now that we've added another version, a GetDataObjectVersions + r = self.request('GetObjectVersions', object_id=obj.id) + self.assertEqual(len(r['objects']), 1) + obj.version = 'great-version' # Now make a new version and upload it + obj.name = 'greatest-change' # technically unnecessary, but just in case + self.request('UpdateObject', object=obj, + query={'object_id': obj.id}) + # Now that we've added another version, a GetObjectVersions # query should confirm that there are now two versions - r = self.request('GetDataObjectVersions', data_object_id=data_obj.id) - self.assertEqual(len(r['data_objects']), 2) + r = self.request('GetObjectVersions', object_id=obj.id) + self.assertEqual(len(r['objects']), 2) - def test_data_bundles(self): + def test_bundles(self): ids = [] # Create data objects to populate the data bundle with names = [] aliases = [] for i in range(10): - data_obj = self.generate_data_object() - ids.append(data_obj.id) - names.append(data_obj.name) - aliases.append(data_obj.aliases[0]) - self.request('CreateDataObject', data_object=data_obj) + obj = self.generate_object() + ids.append(obj.id) + names.append(obj.name) + aliases.append(obj.aliases[0]) + self.request('CreateObject', object=obj) # Make sure that the data objects we just created exist for id_ in ids: - self.request('GetDataObject', data_object_id=id_) + self.request('GetObject', object_id=id_) # Mint a data bundle with the data objects we just created then # check to verify its existence - data_bundle = self.generate_data_bundle(data_object_ids=ids) - self.request('CreateDataBundle', data_bundle=data_bundle) - server_bdl = self.request('GetDataBundle', data_bundle_id=data_bundle.id).data_bundle - self.assertSameDataBundle(server_bdl, data_bundle) + bundle = self.generate_bundle(object_ids=ids) + self.request('CreateBundle', bundle=bundle) + server_bdl = self.request('GetBundle', bundle_id=bundle.id).bundle + self.assertSameBundle(server_bdl, bundle) logger.info("..........Update that Bundle.................") server_bdl.aliases = ['ghi'] - update_data_bundle = server_bdl - update_response = self.request('UpdateDataBundle', data_bundle=update_data_bundle, - query={'data_bundle_id': data_bundle.id}) + update_bundle = server_bdl + update_response = self.request('UpdateBundle', bundle=update_bundle, + query={'bundle_id': bundle.id}) logger.info("..........Get that Bundle.................") - updated_bundle = self.request('GetDataBundle', data_bundle_id=update_response['data_bundle_id']).data_bundle + updated_bundle = self.request('GetBundle', bundle_id=update_response['bundle_id']).bundle logger.info('updated_bundle.aliases: %r', updated_bundle.aliases) logger.info('updated_bundle.updated: %r', updated_bundle.updated) - logger.info('data_bundle.aliases: %r', data_bundle.aliases) - logger.info('data_bundle.updated: %r', data_bundle.updated) + logger.info('bundle.aliases: %r', bundle.aliases) + logger.info('bundle.updated: %r', bundle.updated) self.assertEqual(updated_bundle.aliases[0], 'ghi') - logger.info("..........List Data Bundles...............") - list_response = self.request('ListDataBundles') - logger.info(len(list_response.data_bundles)) + logger.info("..........List Bundles...............") + list_response = self.request('ListBundles') + logger.info(len(list_response.bundles)) logger.info("..........Get all Versions of a Bundle...............") - versions_response = self.request('GetDataBundleVersions', data_bundle_id=data_bundle.id) - logger.info(len(versions_response.data_bundles)) + versions_response = self.request('GetBundleVersions', bundle_id=bundle.id) + logger.info(len(versions_response.bundles)) logger.info("..........Get an Object in a Bundle..............") - data_bundle = self.request('GetDataBundle', data_bundle_id=data_bundle.id).data_bundle - data_object = self.request('GetDataObject', data_object_id=data_bundle.data_object_ids[0]).data_object - logger.info(data_object.urls) + bundle = self.request('GetBundle', bundle_id=bundle.id).bundle + object = self.request('GetObject', object_id=bundle.object_ids[0]).object + logger.info(object.urls) logger.info("..........Get all Objects in a Bundle..............") - data_bundle = self.request('GetDataBundle', data_bundle_id=data_bundle.id).data_bundle + bundle = self.request('GetBundle', bundle_id=bundle.id).bundle bundle_objects = [] - for data_object_id in data_bundle.data_object_ids: - bundle_objects.append(self._client.GetDataObject( - data_object_id=data_object_id).result().data_object) + for object_id in bundle.object_ids: + bundle_objects.append(self._client.GetObject( + object_id=object_id).result().object) logger.info([x.name for x in bundle_objects]) logger.info("..........Delete the Bundle...............") - delete_response = self.request('DeleteDataBundle', data_bundle_id=data_bundle.id) - logger.info(delete_response.data_bundle_id) + delete_response = self.request('DeleteBundle', bundle_id=bundle.id) + logger.info(delete_response.bundle_id) with self.assertRaises(bravado.exception.HTTPNotFound): - self.request('GetDataBundle', data_bundle_id=update_response['data_bundle_id']) + self.request('GetBundle', bundle_id=update_response['bundle_id']) - logger.info("..........Page through a listing of Data Bundles......") + logger.info("..........Page through a listing of Bundles......") for i in range(100): num = "BDL{}".format(i) - my_data_bundle = self.generate_data_bundle(name=num, aliases=[num], data_object_ids=data_bundle.data_object_ids) - self.request('CreateDataBundle', data_bundle=my_data_bundle) - list_response = self.request('ListDataBundles', page_size=10) - ids = [x['id'] for x in list_response.data_bundles] + my_bundle = self.generate_bundle(name=num, aliases=[num], object_ids=bundle.object_ids) + self.request('CreateBundle', bundle=my_bundle) + list_response = self.request('ListBundles', page_size=10) + ids = [x['id'] for x in list_response.bundles] logger.info(list_response.next_page_token) logger.info(ids) - list_response = self.request('ListDataBundles', page_size=10, page_token=list_response.next_page_token) - ids = [x['id'] for x in list_response.data_bundles] + list_response = self.request('ListBundles', page_size=10, page_token=list_response.next_page_token) + ids = [x['id'] for x in list_response.bundles] logger.info(ids) - logger.info("..........List Data Bundles by alias..............") - alias_list_response = self.request('ListDataBundles', alias=list_response.data_bundles[0].aliases[0]) - logger.info(list_response.data_bundles[0].aliases[0]) - logger.info(alias_list_response.data_bundles[0].aliases[0]) + logger.info("..........List Bundles by alias..............") + alias_list_response = self.request('ListBundles', alias=list_response.bundles[0].aliases[0]) + logger.info(list_response.bundles[0].aliases[0]) + logger.info(alias_list_response.bundles[0].aliases[0]) - def test_get_nonexistent_databundle(self): - """Test querying GetDataBundle with a nonexistent data bundle.""" + def test_list_bundle_querying(self): + ids = [] # Create data objects to populate the data bundle with + names = [] + aliases = [] + for i in range(10): + obj = self.generate_object() + ids.append(obj.id) + names.append(obj.name) + aliases.append(obj.aliases[0]) + self.request('CreateObject', object=obj) + # Make sure that the data objects we just created exist + for id_ in ids: + self.request('GetObject', object_id=id_) + + # Mint a data bundle with the data objects we just created then + # check to verify its existence + bundle = self.generate_bundle(object_ids=ids) + self.request('CreateBundle', bundle=bundle) + results = self.request('ListBundles', query={'alias': bundle.aliases[0]}) + self.assertEqual(len(results['bundles']), 1) + results = self.request('ListBundles', # by a unique checksum... + query={'checksum': bundle.checksums[0].checksum, + 'checksum_type': bundle.checksums[0].type}) + self.assertEqual(len(results['bundles']), 1) + + def test_get_nonexistent_bundle(self): + """Test querying GetBundle with a nonexistent data bundle.""" with self.assertRaises(bravado.exception.HTTPNotFound) as ctx: - self._client.GetDataBundle(data_bundle_id='nonexistent-key').result() + self._client.GetBundle(bundle_id='nonexistent-key').result() self.assertEqual(ctx.exception.status_code, 404) def test_schema_required(self): @@ -368,24 +393,24 @@ def test_schema_required(self): Tests that the server properly rejects a request missing a parameter that is marked as required. """ - CreateDataObjectRequest = self._models.get_model('CreateDataObjectRequest') - DataObject = self._models.get_model('CreateDataObjectRequest') - data_object = DataObject(name='random-name', size='1') # Missing the `id` parameter - create_request = CreateDataObjectRequest(data_object=data_object) + CreateObjectRequest = self._models.get_model('CreateObjectRequest') + Object = self._models.get_model('CreateObjectRequest') + object = Object(name='random-name', size='1') # Missing the `id` parameter + create_request = CreateObjectRequest(object=object) with self.assertRaises(jsonschema.exceptions.ValidationError) as ctx: - self._client.CreateDataObject(body=create_request) + self._client.CreateObject(body=create_request) self.assertIn('required property', ctx.exception.message) def test_service_info(self): r = self._client.GetServiceInfo().result() - self.assertEqual(ga4gh.dos.__version__, r.version) + self.assertEqual(ga4gh.drs.__version__, r.version) class TestServerWithLocalClient(TestServer): """ Runs all of the test cases in the :class:`TestServer` test suite but - using :class:`ga4gh.dos.client.Client` when loaded locally. (In fact, + using :class:`ga4gh.drs.client.Client` when loaded locally. (In fact, this suite is exactly the same as :class:`TestServer` except with :meth:`setUpClass` modified to load the client locally.) Running all the same tests is a little overkill but they're fast enough that it @@ -393,9 +418,9 @@ class TestServerWithLocalClient(TestServer): """ @classmethod def setUpClass(cls): - cls._server_process = subprocess.Popen(['ga4gh_dos_server'], stdout=subprocess.PIPE, + cls._server_process = subprocess.Popen(['ga4gh_drs_server'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False) time.sleep(2) - local_client = ga4gh.dos.client.Client(SERVER_URL, local=True) + local_client = ga4gh.drs.client.Client(SERVER_URL, local=True) cls._models = local_client.models cls._client = local_client.client diff --git a/scripts/buildui.js b/scripts/buildui.js new file mode 100755 index 000000000..ce0588f0a --- /dev/null +++ b/scripts/buildui.js @@ -0,0 +1,21 @@ +#!/usr/bin/env node +'use strict'; +var path = require('path'); + +require('shelljs/global'); +set('-e'); +set('-v'); + +mkdir('-p', 'spec') +mkdir('-p', 'web_deploy') + +cp('openapi/data_repository_service.swagger.yaml', 'spec/swagger.yaml'); + +exec('npm run swagger bundle -- -o web_deploy/swagger.json'); +exec('npm run swagger bundle -- --yaml -o web_deploy/swagger.yaml'); + +var SWAGGER_UI_DIST = path.dirname(require.resolve('swagger-ui')); +//rm('-rf', 'web_deploy/swagger-ui/') +cp('-R', SWAGGER_UI_DIST, 'web_deploy/swagger-ui/') +ls('web_deploy/swagger-ui') +sed('-i', 'http://petstore.swagger.io/v2/swagger.json', '../swagger.json', 'web_deploy/swagger-ui/index.html') diff --git a/scripts/fetchpages.sh b/scripts/fetchpages.sh new file mode 100644 index 000000000..7ad867088 --- /dev/null +++ b/scripts/fetchpages.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -e +set -v + +REPO_URL="https://github.com/$TRAVIS_REPO_SLUG" +rm -rf .ghpages-tmp +mkdir -p .ghpages-tmp +cd .ghpages-tmp +git clone --depth=1 --branch=gh-pages $REPO_URL . +if [ "$TRAVIS_BRANCH" == "master" ]; then +cp -Rn . ../ +else +# in case it doesn't exist +mkdir -p preview +cp -Rn preview ../preview/ +fi +cd .. +rm -rf .ghpages-tmp diff --git a/scripts/stagepages.sh b/scripts/stagepages.sh new file mode 100644 index 000000000..607c19ff8 --- /dev/null +++ b/scripts/stagepages.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +set -e +set -v + +if [ "$TRAVIS_BRANCH" != "gh-pages" ]; then + if [ "$TRAVIS_BRANCH" == "master" ]; then + if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then + branchpath="." + else + branch=$(echo "$TRAVIS_PULL_REQUEST_BRANCH" | awk '{print tolower($0)}') + branchpath="preview/$branch" + fi + else + branch=$(echo "$TRAVIS_BRANCH" | awk '{print tolower($0)}') + branchpath="preview/$branch" + fi + mkdir -p "$branchpath/docs" + cp docs/html5/index.html "$branchpath/docs/" + cp docs/pdf/index.pdf "$branchpath/docs/" + cp docs/asciidoc/*.png "$branchpath/docs/" + cp openapi/data_repository_service.swagger.yaml "$branchpath/swagger.yaml" + cp -R web_deploy/* "$branchpath/" +fi + +# do some cleanup, these cause the gh-pages deploy to break +rm -rf node_modules diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 000000000..a65a13387 --- /dev/null +++ b/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'data-repository-service-schemas' diff --git a/setup.py b/setup.py index aa141dd83..a1048f152 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ # Get version sys.path.insert(0, 'python/') -from ga4gh.dos import __version__ # noqa +from ga4gh.drs import __version__ # noqa # First, we try to use setuptools. If it's not available locally, # we fall back on ez_setup. @@ -19,19 +19,19 @@ setup( - name="ga4gh_dos_schemas", - description="GA4GH Data Object Service Schemas", + name="ga4gh_drs_schemas", + description="GA4GH Data Repository Service Schemas", packages=[ "ga4gh", - "ga4gh.dos", - 'ga4gh.dos.test' + "ga4gh.drs", + 'ga4gh.drs.test' ], namespace_packages=["ga4gh"], - url="https://github.com/ga4gh/data-object-service-schemas", + url="https://github.com/ga4gh/data-repository-service-schemas", entry_points={ 'console_scripts': [ - 'ga4gh_dos_server=ga4gh.dos.server:main', - 'ga4gh_dos_client=ga4gh.dos.client:main', + 'ga4gh_drs_server=ga4gh.drs.server:main', + 'ga4gh_drs_client=ga4gh.drs.client:main', ] }, package_dir={'': 'python'}, @@ -56,8 +56,8 @@ ], license='Apache License 2.0', package_data={ - 'ga4gh.dos': ['data_object_service.swagger.yaml'], - '': ['openapi/data_object_service.swagger.yaml'] + 'ga4gh.drs': ['data_repository_service.swagger.yaml'], + '': ['openapi/data_repository_service.swagger.yaml'] }, zip_safe=False, author="Global Alliance for Genomics and Health",