Skip to content

Commit d850b0f

Browse files
authored
Merge pull request #1043 from scitran/swagger
Replace RAML with Swagger and removed ABAO tests
2 parents 1dab2a1 + d24537b commit d850b0f

File tree

387 files changed

+9012
-5379
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

387 files changed

+9012
-5379
lines changed

.github_deploy_key.enc

3.17 KB
Binary file not shown.

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ bootstrap.json
88
.cache
99
/.coverage*
1010
coverage.xml
11+
endpoints.json
1112
/htmlcov
1213
node_modules/
1314
/bin/accesslog.csv

.travis.yml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,31 @@ script:
1515
- SCITRAN_PERSISTENT_DB_PORT=27017 tests/bin/run-tests-ubuntu.sh
1616

1717
after_success:
18+
- if [ "$TRAVIS_EVENT_TYPE" == "push" -o "$TRAVIS_TAG" ]; then
19+
SSH_KEY_FILE=$(mktemp -p $HOME/.ssh/);
20+
21+
openssl aes-256-cbc -K $encrypted_55750ae1fbc7_key -iv $encrypted_55750ae1fbc7_iv -in .github_deploy_key.enc -out "$SSH_KEY_FILE" -d;
22+
23+
chmod 600 "$SSH_KEY_FILE" && printf "%s\n" \
24+
"Host github.com" \
25+
" IdentityFile $SSH_KEY_FILE" \
26+
" LogLevel ERROR" >> ~/.ssh/config;
27+
28+
git config --global user.email "[email protected]";
29+
git config --global user.name "Travis CI";
30+
git config --global push.default simple;
31+
fi
1832
- if [ "$TRAVIS_BRANCH" == "master" -o "$TRAVIS_EVENT_TYPE" == "pull_request" ]; then
1933
bash <(curl -s https://codecov.io/bash) -cF python;
2034
fi
2135
- if [ "$TRAVIS_TAG" ]; then
2236
./docker/build-trigger.sh Tag "$TRAVIS_TAG" "$BUILD_TRIGGER_URL";
37+
./bin/push-docs.sh "$GIT_REMOTE" tags "$TRAVIS_TAG" "Travis Core Docs Build - ${TRAVIS_BUILD_NUMBER}";
2338
fi
2439
- if [ "$TRAVIS_EVENT_TYPE" == "push" -a "$TRAVIS_BRANCH" == "master" ]; then
2540
./docker/build-trigger.sh Branch "$TRAVIS_BRANCH" "$BUILD_TRIGGER_URL";
2641
fi
42+
- if [ "$TRAVIS_EVENT_TYPE" == "push" -a -z "$TRAVIS_TAG" ]; then
43+
./bin/push-docs.sh "$GIT_REMOTE" branches "$TRAVIS_BRANCH" "Travis Core Docs Build - ${TRAVIS_BUILD_NUMBER}";
44+
fi
45+

CONTRIBUTING.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,12 @@ Ensure that `./tests/bin/run-tests-docker.sh -- -l` exits without errors.
4343
1. All API resources must have input validation
4444
1. New API resources should follow the error handling convention and raise webapp2.HTTPException
4545

46-
### Add RAML for API Endpoints
47-
1. Create a new resource file, called `<resource_name>.raml`, e.g., `files.raml`. Create this file in the `raml/resources` directory.
48-
1. In `api.raml`, add a line with the URL of your resource and an include directive for your resource raml file, e.g., `/files: !include resources/files.raml`.
46+
### Add Swagger for API Endpoints
47+
1. Create a new resource file, called `<resource_name>.yaml`, e.g., `files.yaml`. Create this file in the `swagger/paths` directory.
48+
1. In `index.yaml`, add an `$include` line for your resource file, e.g., `- paths/files.yaml`.
4949
1. In your resource file, define your resource. Begin by adding a `description` property with the description you wrote in step 1.
50-
1. Add example properties for both request and response. Examples should be stored in the `examples/` directory, e.g., `raml/examples/request/files.json`.
51-
1. Use [JSONSchema.net](http://jsonschema.net/) to generate a JSON schema for both request and response body. Edit the schema as necessary. Before generating your schema, scroll down and uncheck "allow additional properties". Schemas are stored in the `schemas/` directory, e.g., `raml/schemas/input/files.json`.
50+
1. Add example properties for both request and response. Examples should be stored in the `examples/` directory, e.g., `swagger/examples/request/files.json`.
51+
1. Use [JSONSchema.net](http://jsonschema.net/) to generate a JSON schema for both request and response body. Edit the schema as necessary. Before generating your schema, scroll down and uncheck "allow additional properties". Schemas are stored in the `schemas/` directory, e.g., `swagger/schemas/input/files.json`.
5252
1. Verify that the example properties pass schema validation by running the unit tests. New schemas and examples will be tested automatically. (See testing instructions below)
5353

5454
### Testing

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@ SciTran Core is a RESTful HTTP API, written in Python and backed by MongoDB. It
1111

1212
### [Documentation](https://scitran.github.io/core)
1313

14+
API documentation for branches and tags can be found at `https://scitran.github.io/core/branches/<branchname>` and
15+
`https://scitran.github.io/core/tags/<tagname>`.
16+
1417
### [Contributing](https://github.com/scitran/core/blob/master/CONTRIBUTING.md)
1518

1619
### [Testing](https://github.com/scitran/core/blob/master/TESTING.md)

TESTING.md

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ All tests are executed by default. Subsets can be run using the filtering option
1313
* To run linting, use `--lint` (`-l`)
1414
* To run unit tests, use `--unit` (`-u`)
1515
* To run integration tests, use `--integ` (`-i`)
16-
* To run abao tests, use `--abao` (`-a`)
1716
* To pass any arguments to `py.test`, use `-- PYTEST_ARGS`
1817

1918
See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more.
@@ -34,11 +33,3 @@ Without rebuilding the image, run only integration tests matching `foo`, use the
3433
```
3534

3635
**NOTE:** The mongodb version is pinned via the `MONGO_VERSION` variable in `tests/bin/run-tests-docker.sh`.
37-
38-
### Tools
39-
- [abao](https://github.com/cybertk/abao/)
40-
41-
### Testing API against RAML with Abao
42-
Abao is one of the testing tools run during our TravisCI build. It tests the API implementation against what’s defined in the RAML spec. Adding a new resource / url to the RAML spec will cause Abao to verify that resource during integration tests. Sometimes abao cannot properly test a resource (file field uploads) or a test may require chaining variable. Abao has before and after hooks for tests, written in javascript. These can be used to skip a test, inject variables into the request, or make extra assertions about the response. See tests/integration/abao in the repo for the hooks file. See [abao github readme](https://github.com/cybertk/abao/blob/master/README.md) for more information on how to use hooks.
43-
44-
Abao tests can depend on specific resources (eg. group, project, session, etc.) pre-existing in the DB. That resource loading should be maintained within `tests/integration_tests/abao/load_fixture.py` and is executed automatically via the integration test scripts at `test/bin`.

api/config.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ def apply_env_variables(config):
130130
es = elasticsearch.Elasticsearch([__config['persistent']['elasticsearch_host']])
131131

132132
# validate the lists of json schemas
133-
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../raml/schemas')
133+
schema_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../swagger/schemas')
134134

135135
expected_mongo_schemas = set([
136136
'acquisition.json',
@@ -157,7 +157,6 @@ def apply_env_variables(config):
157157
'avatars.json',
158158
'collection.json',
159159
'collection-update.json',
160-
'container.json',
161160
'device.json',
162161
'file.json',
163162
'file-update.json',
@@ -182,8 +181,7 @@ def apply_env_variables(config):
182181
'enginemetadata.json',
183182
'labelupload.json',
184183
'uidupload.json',
185-
'uidmatchupload.json',
186-
'search.json'
184+
'uidmatchupload.json'
187185
])
188186
mongo_schemas = set()
189187
input_schemas = set()

api/web/start.py

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,33 @@ def start_coverage():
2222

2323
start_coverage()
2424

25+
# Enable collecting endpoints for checking documentation coverage
26+
if os.environ.get("SCITRAN_COLLECT_ENDPOINTS") == "true": #pragma no cover
27+
ENDPOINTS = set()
28+
29+
def save_endpoints():
30+
print('Saving endpoints')
31+
try:
32+
results = list(sorted(ENDPOINTS))
33+
with open('endpoints.json', 'w') as f:
34+
json.dump(results, f)
35+
36+
except: #pylint: disable=bare-except
37+
print('Could not save endpoints.json: {0}'.format(traceback.format_exc()))
38+
39+
def start_collecting_endpoints():
40+
print('Collecting endpoints...')
41+
atexit.register(save_endpoints)
42+
43+
def collect_endpoint(request):
44+
ENDPOINTS.add('{0} {1}'.format(request.method, request.path))
45+
46+
start_collecting_endpoints()
47+
48+
else:
49+
def collect_endpoint(request):
50+
#pylint: disable=unused-argument
51+
pass
2552

2653
from ..api import endpoints
2754
from .. import config
@@ -43,6 +70,8 @@ def dispatcher(router, request, response):
4370
except: # pylint: disable=bare-except
4471
request.logger.error("Error setting request_id log var", exc_info=True)
4572

73+
collect_endpoint(request)
74+
4675
try:
4776
rv = router.default_dispatcher(request, response)
4877
if rv is not None:

bin/install-ubuntu.sh

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,21 @@ set -eu
55
unset CDPATH
66
cd "$( dirname "${BASH_SOURCE[0]}" )/.."
77

8-
sudo apt-get update
8+
# Add the apt repo for modern node-js, this will run apt-get update
9+
curl -sL https://deb.nodesource.com/setup_8.x | sudo bash -
10+
911
sudo apt-get install -y \
1012
build-essential \
1113
ca-certificates \
12-
curl \
1314
libatlas3-base \
1415
numactl \
1516
python-dev \
1617
libffi-dev \
1718
libssl-dev \
1819
libpcre3 \
1920
libpcre3-dev \
20-
git
21+
git \
22+
nodejs
2123

2224
sudo pip install -U pip
2325

bin/push-docs.sh

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
#!/usr/bin/env bash
2+
# This script will build the swagger documentation and push it to the gh-pages doc
3+
# For now this should only be run on the master branch
4+
5+
set -eu
6+
7+
unset CDPATH
8+
cd "$( dirname "${BASH_SOURCE[0]}" )/.."
9+
10+
# Environment Variables:
11+
# Args: <remote> (branches|tags) <branch_or_tag_name> <commit_message>
12+
# remote: The git remote url
13+
# branch_or_tag: Whether docs should go into the branches or tags subdirectory
14+
# branch_or_tag_name: The name of the tag or branch
15+
# commit_message: The commit message
16+
17+
main() {
18+
if [ "$#" -ne 4 -o "$1" == "-h" ]; then
19+
print_usage
20+
fi
21+
22+
GIT_REMOTE=$1
23+
DOCS_SUBDIR=$2
24+
BRANCH_NAME=$3
25+
COMMIT_MESSAGE=$4
26+
27+
# Determine version string
28+
if [ "$DOCS_SUBDIR" == "branches" ]; then
29+
COMMIT_REF="$(git rev-parse --short HEAD)"
30+
DOC_VERSION="$BRANCH_NAME/$COMMIT_REF"
31+
elif [ "$DOCS_SUBDIR" == "tags" ]; then
32+
DOC_VERSION="$BRANCH_NAME"
33+
else
34+
print_usage
35+
fi
36+
37+
# Build documentation
38+
(
39+
cd swagger
40+
npm install
41+
npm run build -- "--docs-version=$DOC_VERSION"
42+
)
43+
44+
# Copy documentation
45+
if [ "$BRANCH_NAME" == "master" ]; then
46+
checkin_master
47+
else
48+
checkin_branch "$DOCS_SUBDIR/$BRANCH_NAME"
49+
fi
50+
51+
# Cleanup subdirectory
52+
rm -rf gh-pages/
53+
}
54+
55+
# Print usage and exit
56+
print_usage() {
57+
echo "Usage: $0 <remote> Branch|Tag <branch_or_tag_name> <commit_message>"
58+
exit 1
59+
}
60+
61+
# Prune branches in a subdirectory of gh-pages
62+
# subdir: The subdirectory name (branches|tags)
63+
# remote_types: The remote type for ls-remote (head|tags)
64+
prune_branches() {
65+
subdir=$1
66+
remote_types=$2
67+
if [ -d "gh-pages/${subdir}/" ]; then
68+
(
69+
cd gh-pages
70+
for branch_dir in ${subdir}/*; do
71+
branch_name="$(basename ${branch_dir})"
72+
branch_exists="$(git ls-remote --${remote_types} ${GIT_REMOTE} ${branch_name} | wc -l)"
73+
if [ "$branch_exists" -eq 0 ]; then
74+
echo "Pruning branch: ${branch_name}"
75+
git rm --quiet -rf "${branch_dir}"
76+
fi
77+
done
78+
)
79+
fi
80+
}
81+
82+
# Checkin documentation for a single branche
83+
# target_dir: The destination directory (e.g. branches/<branch_name>)
84+
checkin_branch() {
85+
target_dir=$1
86+
# We try up to 3 times, sleeping for 3 or 7 seconds between attempts
87+
for i in 3 7 100; do
88+
# Allow capture of exit code of subshell
89+
set +e
90+
(
91+
set -e
92+
93+
# Checkout gh-pages
94+
rm -rf gh-pages/
95+
git clone ${GIT_REMOTE} --branch gh-pages --single-branch gh-pages
96+
97+
# Create target directory and copy files
98+
mkdir -p "gh-pages/${target_dir}"
99+
cp -R swagger/build/swagger-ui/* "gh-pages/${target_dir}"
100+
101+
cd gh-pages
102+
if [ "$(git status --porcelain)" ]; then
103+
# Add files
104+
git add "${target_dir}*"
105+
106+
# Add any modified files, and push
107+
git commit --message "$COMMIT_MESSAGE"
108+
109+
# Push to remote repo
110+
git push --quiet
111+
else
112+
echo "No changes to commit"
113+
fi
114+
)
115+
if [ "$?" -eq "0" ]; then
116+
# Success case
117+
break
118+
elif [ "$i" -lt 100 ]; then
119+
# Failure case, sleep and retry
120+
echo "Error pushing branch docs, retrying in $i seconds."
121+
sleep $i
122+
else
123+
# Final failure case
124+
echo "Could not push branch docs, exiting"
125+
exit 1
126+
fi
127+
done
128+
129+
set -e
130+
}
131+
132+
# Prune non-existing branches and tags, and check-in master documentation, doing a force-push
133+
checkin_master() {
134+
(
135+
# Clone the gh-pages branch and prune any branches that don't exist in remotes
136+
git clone ${GIT_REMOTE} --branch gh-pages --single-branch gh-pages
137+
prune_branches branches heads
138+
prune_branches tags tags
139+
140+
# Copy currently generated documentation into gh-pages
141+
cp -R swagger/build/swagger-ui/* gh-pages/
142+
cd gh-pages/
143+
144+
if [ "$(git status --porcelain)" ]; then
145+
# Checkout a new orphan branch
146+
git checkout --quiet --orphan gh-pages-new
147+
# Add everything that still exists in this folder
148+
git add *
149+
# Commit
150+
git commit --quiet --message "$COMMIT_MESSAGE"
151+
# Force push to gh-pages
152+
git push --quiet --force --set-upstream origin gh-pages-new:gh-pages
153+
else
154+
echo "No changes to commit"
155+
fi
156+
)
157+
}
158+
159+
main "$@"
160+

0 commit comments

Comments
 (0)