Commit 99bcd32d authored by Pablo Panero's avatar Pablo Panero
Browse files

Merge branch 'dev' into 'master'

Version 0.3

See merge request webservices/cern-search/cern-search-rest-api!33
parents 6d388737 b536ffb0
......@@ -6,7 +6,7 @@
# Python compiled files
*pyc
cern_search_rest.egg-info/
cern_search_rest_api.egg-info/
# OpenShift Secrets
......
### Based on ``discourse-cern`` CI.
###
### This GitLab-CI scipt provides a template to publish an OpenShift resource (a Docker image and/or a template)
### shared with other users.
###
### Requirements:
### 1. An `externally managed resource` must be created before you can push the image to OpenShift.
### This step must be done by OpenShift admins, so contact them before anything else.
### 2. After step 1, you will receive the credentials of a service account with rights to publish a image and update
### a template with name '$RESOURCE'.
### 3. Adapt this .gitlab-ci.yml definition to match your deployment. In most cases, only the `variables` section needs
### to be adapted
###
### In this template, we use three different environments to represent the status of the Continuous integration build.
### 1. `dev` represents development on a custom git branch. Once there is something new pushed to a custom branch
### The CI build will create a new Image, store it in the GitLab registry with tag `latest`, import it to openshift-dev
### and run some tests on it.
### The template can be deployed to the development cluster (i.e. openshift-dev.cern.ch) with a manual trigger
### so it can be tested.
### 2. `staging` represents deployment to the development OpenShift cluster (i.e. openshift-dev.cern.ch).
### This environment runs when something is pushed with a tag and tries to replicate a deployment
### to production. The image gets built and pushed to the GitLab registry using the git tag as the Docker tag.
### Whenever this happens, the template is automatically updated and a manual trigger
### is enabled to tag the image as `stable`, all of this in the development cluster (i.e. openshift-dev.cern.ch).
### NOTE: tagging the image as `stable` will trigger a re-deploy of all the applications using it in
### `openshift-dev.cern.ch`, so do it with care!
### 3. `production` represents deployed to the production OpenShift cluster (i.e. openshift.cern.ch).
### This environment also runs when a change is pushed with a tag to master. The template is automatically
### updated on production (e.g `openshift.cern.ch`) but the image requires a manual trigger before
### it is tagged as `stable`.
### NOTE: tagging the image as `stable` will trigger a re-deploy of all the applications
### using it in `openshift.cern.ch`, so do it with care!
###
variables:
### Disable cache in Docker builds, as this has occasionally resulted in images not containing what was
### expected multiple MRs where being built/retried.
NO_CACHE: 'true'
### Replace RESOURCE with the name of the image you want to build and publish in OpenShift
### Important! In order for this template to work, the name of the gitlab repo must match
### also the variable name
RESOURCE: cern-search-rest-api
###
### You shouldn't change the following variables
NAMESPACE: openshift
OPENSHIFT_SERVER: https://openshift-dev.cern.ch
### By default, there are 6 stages that we may use:
### Feel free to adapt this to your specific case.
stages:
- build
- tag_image
- import_image # This stage is only used when the built image is stored in the GitLab Registry
- update_template
- deploy
### 'Build' stage
### Build the image and store it in the registry. It is important that this step
......@@ -17,10 +66,10 @@ build_dev_version:
stage: build
except:
- tags
environment: master
environment: dev
tags:
- docker-image-build
script: 'echo "Building Docker Master image..."'
script: 'echo "Building Docker image..."'
### When building tags, use the git tag as the docker tag of the image
build_tagged_version:
......@@ -29,6 +78,118 @@ build_tagged_version:
- tags
tags:
- docker-image-build
script: 'echo "Building Docker Tag image..."'
script: 'echo "Building Docker image..."'
variables:
TO: ${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG}
### If a new tag is pushed it needs to be referenced into the imagestream
tag_image_dev: &tag_image
stage: tag_image
only:
- tags
environment: staging
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc tag --source=docker ${CI_REGISTRY_IMAGE}:${CI_COMMIT_TAG} ${RESOURCE}:${CI_COMMIT_TAG} --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE}
variables:
TOKEN: ${SERVICE_ACCOUNT_TOKEN_DEV}
tag_image_prod:
<<: *tag_image
variables:
OPENSHIFT_SERVER: https://openshift.cern.ch
TOKEN: ${SERVICE_ACCOUNT_TOKEN_PROD}
### Import image into OpenShift. Import $CI_COMMIT_TAG if present or 'latest' if not.
import_image_dev:
stage: import_image
environment: staging
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc import-image ${RESOURCE}:${CI_COMMIT_TAG:-latest} --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE}
variables:
TOKEN: ${SERVICE_ACCOUNT_TOKEN_DEV}
import_image_prod:
stage: import_image
environment: production
only:
- tags
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc import-image ${RESOURCE}:${CI_COMMIT_TAG:-latest} --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE}
variables:
OPENSHIFT_SERVER: https://openshift.cern.ch
TOKEN: ${SERVICE_ACCOUNT_TOKEN_PROD}
### 'update_template' stage
### Uploads a new version of the template to OpenShift.
### For development purposes, there is a manual trigger than allows to publish a new
### template to the development cluster (i.e openshift-dev.cern.ch) even if building
### the image fails
update_template_dev:
stage: update_template
environment: dev
when: manual
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc replace template --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE} -f templates/${RESOURCE}.yaml
variables:
TOKEN: ${SERVICE_ACCOUNT_TOKEN_DEV}
update_template_staging:
stage: update_template
environment: staging
only:
- tags
when: always # This will allow us to deploy the template even if building the image fails
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc replace template --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE} -f templates/${RESOURCE}.yaml
variables:
TOKEN: ${SERVICE_ACCOUNT_TOKEN_DEV}
update_template_production:
stage: update_template
environment: production
only:
- tags
when: always # This will allow us to deploy the template even if building the image fails
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc replace template --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE} -f templates/${RESOURCE}.yaml
variables:
OPENSHIFT_SERVER: https://openshift.cern.ch
TOKEN: ${SERVICE_ACCOUNT_TOKEN_PROD}
### 'Deploy' stage
### Publish the image with tag `stable`. NOTE: this will re-deploy all the applications using
### the `stable` tag (by default, all of them) so do it with care. In the `production` environment,
### taggig with `stable` requires launching a manual trigger
deploy_staging:
stage: deploy
environment: staging
only:
- tags
when: manual
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE} tag ${RESOURCE}:${CI_COMMIT_TAG} ${RESOURCE}:stable
variables:
OPENSHIFT_SERVER: https://openshift-dev.cern.ch
TOKEN: ${SERVICE_ACCOUNT_TOKEN_DEV}
GIT_STRATEGY: none
deploy_production:
stage: deploy
environment: production
only:
- tags
when: manual
image: gitlab-registry.cern.ch/paas-tools/openshift-client:latest
script:
- oc --token=${TOKEN} --server=${OPENSHIFT_SERVER} -n ${NAMESPACE} tag ${RESOURCE}:${CI_COMMIT_TAG} ${RESOURCE}:stable
variables:
OPENSHIFT_SERVER: https://openshift.cern.ch
TOKEN: ${SERVICE_ACCOUNT_TOKEN_PROD}
GIT_STRATEGY: none
......@@ -40,8 +40,8 @@ RUN chmod g=u /etc/passwd && \
chown -R invenio:root /code
# uWSGI configuration
ARG UWSGI_WSGI_MODULE=cern_search_rest.wsgi:application
ENV UWSGI_WSGI_MODULE ${UWSGI_WSGI_MODULE:-cern_search_rest.wsgi:application}
ARG UWSGI_WSGI_MODULE=cern_search_rest_api.wsgi:application
ENV UWSGI_WSGI_MODULE ${UWSGI_WSGI_MODULE:-cern_search_rest_api.wsgi:application}
ARG UWSGI_PORT=5000
ENV UWSGI_PORT ${UWSGI_PORT:-5000}
ARG UWSGI_PROCESSES=2
......
......@@ -457,7 +457,7 @@ stringData:
Starting command throw ssl:
```bash
gunicorn -b :5000 --certfile=ssl.crt --keyfile=ssl.key cern_search_rest.wsgi
gunicorn -b :5000 --certfile=ssl.crt --keyfile=ssl.key cern_search_rest_api.wsgi
```
## Configuration
......@@ -471,11 +471,11 @@ your server, which if it is deployed in OpenShift would be like ``you-project-na
- DEFAULT_DOC_TYPE: The value of the default document type. It must be part of the default index,
defined in the above variable.
- SEARCH_INSTANCE: The name of the instance. A folder with this name must exist in
``cern_search_rest/modules/cernsearch/jsonschemas/``, therefore, upon index creation an alias will be set for all the
``cern_search_rest_api/modules/cernsearch/jsonschemas/``, therefore, upon index creation an alias will be set for all the
indexes (mappings existing in this folder). This indexes will be the ones over whom searches will be performed.
- ADMIN_USER: Superuser's email account. If it is a non-CERN account, it should go without a domain
(``@cern.ch``).
The rest of the configuration comes from parameters that are configurable through the Invenio Framework or Flask.
The full list of the overwritten ones can be found in ``cern_search_rest/config.py``, nonetheless, if needed
The full list of the overwritten ones can be found in ``cern_search_rest_api/config.py``, nonetheless, if needed
others can be overwritten (check documentation of the corresponding project in the
[invenio repository](www.github.com/inveniosoftware)).
\ No newline at end of file
[invenio repository](www.github.com/inveniosoftware)).
{
"title": "Indico Event schema v0.0.1",
"id": "http://localhost:5000/schemas/indico/event_v0.0.1.json",
"$schema": "http://localhost:5000/schemas/indico/event_v0.0.1.json",
"type": "object",
"properties": {
"_access": {
"type": "object",
"properties": {
"owner":{
"type": "array",
"items": {
"type": "string"
}
},
"read":{
"type": "array",
"items": {
"type": "string"
}
},
"update":{
"type": "array",
"items": {
"type": "string"
}
},
"delete":{
"type": "array",
"items": {
"type": "string"
}
}
}
},
"id": {
"type": "string",
"description": "Event id."
},
"category_path": {
"type": "array",
"items": {
"type": "string"
},
"description": "Event category path (ordered array)."
},
"title": {
"type": "string",
"description": "Event title."
},
"start_date": {
"type": "date-time",
"description": "Event start date."
},
"creation_date": {
"type": "date-time",
"description": "Event creation date."
},
"end_date": {
"type": "date-time",
"description": "Event end date."
},
"location": {
"type": "string",
"description": "Event location."
},
"description": {
"type": "string",
"description": "Event description."
},
"speakers_chairs": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Event speaker/chair name."
},
"affiliation": {
"type": "string",
"description": "Event speaker/chair affiliation."
}
}
}
},
"event_type": {
"type": "string",
"description": "Event type."
},
"custom_pid": {
"type": "string"
},
"$schema": {
"type": "string"
}
}
}
\ No newline at end of file
{
"settings": {
"index.percolator.map_unmapped_fields_as_string": true,
"index.mapping.total_fields.limit": 30
},
"mappings": {
"event_v0.0.1": {
"numeric_detection": true,
"_meta": {
"_owner": "indico@cern.ch"
},
"properties": {
"_access": {
"type": "nested",
"properties": {
"owner":{
"type": "keyword"
},
"read": {
"type": "keyword"
},
"update": {
"type": "keyword"
},
"delete": {
"type": "keyword"
}
}
},
"id": {
"type": "keyword",
"analyzer": "not_analyzed"
},
"event_type": {
"type": "keyword",
"analyzer": "not_analyzed"
},
"category_path": {
"type": "keyword",
"analyzer": "not_analyzed"
},
"title": {
"type": "text",
"fields": [
{
"english": {
"type": "text",
"analyzer": "english"
}
},
{
"french": {
"type": "text",
"analyzer": "french"
}
}
]
},
"start_date": {
"type": "date",
"analyzer": "not_analyzed"
},
"creation_date": {
"type": "date",
"analyzer": "not_analyzed"
},
"end_date": {
"type": "date",
"analyzer": "not_analyzed"
},
"location": {
"type": "text"
},
"description": {
"type": "text",
"fields": [
{
"english": {
"type": "text",
"analyzer": "english"
}
},
{
"french": {
"type": "text",
"analyzer": "french"
}
}
]
},
"speakers_chairs": {
"type": "nested",
"properties": {
"name": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"analyzer": "not_analyzed"
}
}
},
"affiliation": {
"type": "text"
}
}
},
"custom_pid": {
"type": "text",
"index": "not_analyzed"
},
"$schema": {
"type": "text",
"index": "not_analyzed"
}
}
}
}
}
\ No newline at end of file
......@@ -10,7 +10,8 @@ from invenio_oauthclient.contrib import cern
from .modules.cernsearch.permissions import (record_read_permission_factory,
record_create_permission_factory,
record_update_permission_factory,
record_delete_permission_factory)
record_delete_permission_factory,
record_read_list_permission_factory)
def _(x):
......@@ -31,7 +32,7 @@ CERN_REMOTE_APP["params"].update(dict(request_token_params={
"scope": "Name Email Bio Groups",
}))
CERN_REMOTE_APP["authorized_handler"] = 'cern_search_rest.modules.cernsearch.handlers:cern_authorized_signup_handler'
CERN_REMOTE_APP["authorized_handler"] = 'cern_search_rest_api.modules.cernsearch.handlers:cern_authorized_signup_handler'
OAUTHCLIENT_REMOTE_APPS = dict(
cern=CERN_REMOTE_APP,
......@@ -46,7 +47,7 @@ ACCOUNTS_SESSION_ACTIVITY_ENABLED = False
# Admin
# =====
ADMIN_PERMISSION_FACTORY = 'cern_search_rest.modules.cernsearch.permissions:admin_permission_factory'
ADMIN_PERMISSION_FACTORY = 'cern_search_rest_api.modules.cernsearch.permissions:admin_permission_factory'
# JSON Schemas configuration
# ==========================
......@@ -74,7 +75,7 @@ SEARCH_MAPPINGS = [os.getenv('CERN_SEARCH_INSTANCE', 'cernsearch-test')]
#: Records REST API configuration
_Record_PID = 'pid(recid, record_class="cern_search_rest.modules.cernsearch.api:CernSearchRecord")' # TODO
_Record_PID = 'pid(recid, record_class="cern_search_rest_api.modules.cernsearch.api:CernSearchRecord")' # TODO
RECORDS_REST_ENDPOINTS = dict(
docid=dict(
......@@ -86,12 +87,12 @@ RECORDS_REST_ENDPOINTS = dict(
item_route='/record/<{0}:pid_value>'.format(_Record_PID),
list_route='/records/',
links_factory_imp='invenio_records_rest.links:default_links_factory',
record_class='cern_search_rest.modules.cernsearch.api:CernSearchRecord',
record_class='cern_search_rest_api.modules.cernsearch.api:CernSearchRecord',
record_serializers={
'application/json': ('invenio_records_rest.serializers'
':json_v1_response'),
},
search_class='cern_search_rest.modules.cernsearch.search.RecordCERNSearch',
search_class='cern_search_rest_api.modules.cernsearch.search.RecordCERNSearch',
search_index='cernsearch-test', # TODO: Parametrize this, along with the rest of the config file
search_serializers={
'application/json': ('invenio_records_rest.serializers'
......@@ -99,6 +100,7 @@ RECORDS_REST_ENDPOINTS = dict(
},
max_result_window=10000,
read_permission_factory_imp=record_read_permission_factory,
read_list_permission_factory_imp=record_read_list_permission_factory,
create_permission_factory_imp=record_create_permission_factory,
update_permission_factory_imp=record_update_permission_factory,
delete_permission_factory_imp=record_delete_permission_factory,
......
......@@ -21,7 +21,7 @@ from invenio_oauthclient.handlers import oauth_error_handler, token_session_key,
token_getter, get_session_next_url
from cern_search_rest.modules.cernsearch.utils import get_user_provides
from cern_search_rest_api.modules.cernsearch.utils import get_user_provides
@oauth_error_handler
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment